Compare commits

..

114 Commits

Author SHA1 Message Date
Piyush Singariya
26207cf0b3 fix: go tests 2025-12-27 10:56:16 +05:30
Piyush Singariya
d93a8d1f63 fix: tests 2025-12-24 16:18:55 +05:30
Piyush Singariya
f5cdce4f70 fix: lint issues 2025-12-24 14:24:09 +05:30
Piyush Singariya
08bce90f1d fix: revert changes and static errors 2025-12-24 13:45:46 +05:30
Piyush Singariya
63c27e9af6 Merge branch 'main' into json 2025-12-24 13:35:17 +05:30
Piyush Singariya
85f0193a1e chore: remove redundent changes 2025-12-23 12:48:00 +05:30
Piyush Singariya
bde9d91867 chore: remove utils 2025-12-23 12:42:59 +05:30
Piyush Singariya
5147db6997 Merge branch 'json-plan' into json 2025-11-28 12:32:35 +05:30
Piyush Singariya
0c5c2a00d9 Merge branch 'main' into json-plan 2025-11-28 12:32:08 +05:30
Piyush Singariya
2669eda2f8 Merge branch 'main' into json 2025-11-28 12:27:12 +05:30
Piyush Singariya
b4df9ba1b9 chore: minor changes in reflection of merge 2025-11-28 12:25:27 +05:30
Piyush Singariya
af7b4c6fa8 Merge branch 'json-plan' into json 2025-11-28 12:19:42 +05:30
Piyush Singariya
5667793d7f chore: changes from overhaul 2025-11-28 12:02:10 +05:30
Piyush Singariya
92a79bbdce Merge branch 'promoted-paths' into json-plan 2025-11-28 11:58:29 +05:30
Piyush Singariya
1887ddd49c Merge branch 'body-json-keys' into promoted-paths 2025-11-28 11:45:59 +05:30
Piyush Singariya
57aac8b800 chore: self review 2025-11-28 11:43:07 +05:30
Piyush Singariya
e4c1b2ce50 chore: remove unnecessary TTL code 2025-11-28 11:40:27 +05:30
Piyush Singariya
3c564b6809 revert: unnecessary binary 2025-11-27 18:03:14 +05:30
Piyush Singariya
d149e53f70 revert: unnecessary changes 2025-11-27 18:02:26 +05:30
Piyush Singariya
220c78e72b test: delete request tested 2025-11-27 17:56:15 +05:30
Piyush Singariya
1ff971dac4 feat: ready to be tested 2025-11-27 17:06:54 +05:30
Piyush Singariya
1dc03eebd4 feat: drop indexes 2025-11-27 16:08:43 +05:30
Piyush Singariya
9f71a6423f chore: changes based on review 2025-11-27 15:55:47 +05:30
Piyush Singariya
0a3e2e6215 chore: in progress changes 2025-11-27 14:15:00 +05:30
Piyush Singariya
12476b719f chore: go mod 2025-11-27 12:18:36 +05:30
Piyush Singariya
193f35ba17 chore: remove db 2025-11-27 12:17:26 +05:30
Piyush Singariya
de28d6ba15 fix: test TestPrepareLogsQuery 2025-11-27 11:49:41 +05:30
Piyush Singariya
7a3f9b963d fix: test TestQueryToKeys 2025-11-27 11:32:32 +05:30
Piyush Singariya
aedf61c8e0 Merge branch 'main' into body-json-keys 2025-11-27 11:11:46 +05:30
Piyush Singariya
f12f16f996 test: fixing test 1 2025-11-27 11:11:17 +05:30
Piyush Singariya
ad61e8f700 chore: changes based on review 2025-11-27 10:47:42 +05:30
Piyush Singariya
286129c7a0 chore: reflection from json branch 2025-11-26 12:53:53 +05:30
Piyush Singariya
78a3cc69ee Merge branch 'body-json-keys' into promoted-paths 2025-11-26 12:51:30 +05:30
Piyush Singariya
c2790258a3 Merge branch 'body-json-keys' into json-plan 2025-11-26 12:43:07 +05:30
Piyush Singariya
4c70b44230 chore: reflect changes from the overhaul 2025-11-26 12:41:06 +05:30
Piyush Singariya
a178c90a08 chore: shift constants 2025-11-26 12:33:28 +05:30
Piyush Singariya
cc9c316bfe task: JSON Code overhaul for Query Builder (#9677)
* chore: minor comment change

* chore: func rename, file rename

* chore: change table names

* feat: list values for simple paths

* chore: in progress

* feat: in progress TestStatementBuilderListQueryBody working

* feat: in progress TestStatementBuilderListQueryBodyPromoted

* feat: in progress TestStatementBuilderListQueryBodyMessage

* feat: in progress group by

* feat: all query tests passed new
2025-11-25 19:26:38 +05:30
Piyush Singariya
6ea517f530 chore: change table names 2025-11-19 12:22:49 +05:30
Piyush Singariya
c1789e7921 chore: func rename, file rename 2025-11-19 11:58:26 +05:30
Piyush Singariya
4b67a1c52f chore: minor comment change 2025-11-18 19:39:01 +05:30
Piyush Singariya
beb4dc060d Merge branch 'body-json-keys' into promoted-paths 2025-11-18 19:36:23 +05:30
Piyush Singariya
f6fd182558 Merge branch 'json-plan' into json 2025-11-18 19:34:01 +05:30
Piyush Singariya
92e5abed6e Merge branch 'body-json-keys' into json-plan 2025-11-18 19:32:55 +05:30
Piyush Singariya
dcb0ca9265 Merge branch 'body-json-keys' into json 2025-11-18 19:31:49 +05:30
Piyush Singariya
8ab44fd846 feat: change ExtractBodyPaths 2025-11-18 19:30:33 +05:30
Piyush Singariya
3d3a566094 fix: type extraction changed 2025-11-18 17:20:40 +05:30
Piyush Singariya
6b86779623 feat: using lru cache 2025-11-18 15:35:26 +05:30
Piyush Singariya
f0c405f068 feat: parameterize granularity and index 2025-11-18 13:50:44 +05:30
Piyush Singariya
9bea32c354 Merge branch 'json-plan' into json 2025-11-17 18:07:06 +05:30
Piyush Singariya
1bb9386ea1 test: json plan 2025-11-17 16:48:27 +05:30
Piyush Singariya
f3dfaf37ce test: json plan test 2025-11-17 16:47:32 +05:30
Piyush Singariya
0498c77634 chore: remove unused field 2025-11-17 15:36:20 +05:30
Piyush Singariya
38146ae364 fix: import issues 2025-11-17 15:20:13 +05:30
Piyush Singariya
f911f98f83 chore: shift function 2025-11-17 15:16:44 +05:30
Piyush Singariya
28b1656d4c fix: go mod 2025-11-17 15:14:57 +05:30
Piyush Singariya
fe28290c76 Merge branch 'promoted-paths' into json-plan 2025-11-17 15:11:51 +05:30
Piyush Singariya
1b5738cdae fix: remove bad import of collector constants 2025-11-17 15:11:37 +05:30
Piyush Singariya
0c61174506 feat: json plan 2025-11-17 15:09:24 +05:30
Piyush Singariya
b850b69e16 feat: changes based on review 2025-11-17 15:06:55 +05:30
Piyush Singariya
b0d52ee87a feat: telemetry types 2025-11-17 14:34:48 +05:30
Piyush Singariya
97ead5c5b7 fix: revert ttl logs api change 2025-11-17 14:30:52 +05:30
Piyush Singariya
aee5c4f8f7 fix: ttl bug 2025-11-17 14:28:51 +05:30
Piyush Singariya
255b39f43c fix: promote paths if already promoted 2025-11-17 14:24:35 +05:30
Piyush Singariya
441d328976 Merge branch 'body-json-keys' into promoted-paths 2025-11-17 13:19:50 +05:30
Piyush Singariya
93ea44ff62 feat: json Body Keys 2025-11-17 13:11:37 +05:30
Piyush Singariya
d31cce3a1f feat: split promote API 2025-11-17 13:02:58 +05:30
Piyush Singariya
b7fc9ee85a feat: api GET or POST promote paths 2025-11-17 12:53:08 +05:30
Piyush Singariya
2cc3ad335c Merge branch 'promoted-paths' into json 2025-11-14 16:30:33 +05:30
Piyush Singariya
748b08827d fix: minor change of ars in ListIndexedPaths 2025-11-14 16:26:48 +05:30
Piyush Singariya
be17b71a93 chore: minor change to split json_body_keys 2025-11-14 16:24:10 +05:30
Piyush Singariya
917345ddf6 feat: create String indexes on promoted and body paths 2025-11-14 16:08:34 +05:30
Nityananda Gohain
9d7260ad40 Merge branch 'main' into json 2025-11-13 14:33:31 +05:30
Piyush Singariya
f240bbdfd6 chore: remove log 2025-11-13 11:12:28 +05:30
Piyush Singariya
b2d5560576 fix: json search text 2025-11-12 15:01:00 +05:30
Piyush Singariya
8880d13b8b chore: minor changes 2025-11-12 13:43:01 +05:30
Piyush Singariya
3ef9f8750f fix: minor changes in json_plan 2025-11-12 11:07:21 +05:30
Piyush Singariya
b2bdfa25c7 fix: review based on cursor 2025-11-11 15:57:33 +05:30
Piyush Singariya
a4c2234e05 fix: stmt builder test file -x2 2025-11-10 17:32:44 +05:30
Piyush Singariya
3abb9c824f fix: stmt builder test file 2025-11-10 17:29:25 +05:30
Piyush Singariya
2876312719 test: fix tests 2025-11-10 14:44:17 +05:30
Piyush Singariya
7d56901562 Merge branch 'main' into json 2025-11-10 12:49:46 +05:30
Piyush Singariya
9619356c33 fix: in progress; fixing tests 2025-11-10 12:43:02 +05:30
Piyush Singariya
7bce659e5a feat: api to promote + index paths 2025-11-03 22:05:37 +05:30
Piyush Singariya
7ac05c901f fix: backticks on promoted and body_v2 2025-10-31 18:36:35 +05:30
Piyush Singariya
8573ed4021 feat: string indexed paths acknowledged 2025-10-31 18:34:37 +05:30
Piyush Singariya
09155f7ede feat: remove message from body 2025-10-31 15:57:28 +05:30
Piyush Singariya
b2c0ff566a feat: acknowledge string indexing in Query building 2025-10-31 12:21:47 +05:30
Piyush Singariya
0e6b310865 feat: fetching ngram indexes 2025-10-31 12:05:35 +05:30
Piyush Singariya
99ec44a793 fix: delete message exsits key 2025-10-30 12:12:22 +05:30
Piyush Singariya
bb4eae3fd9 feat: body_v2.message exists 2025-10-29 17:17:07 +05:30
Piyush Singariya
8af86a1e6a fix: seeder logic 2025-10-21 19:53:52 +05:30
Piyush Singariya
016c92ea7e feat: body.message handled 2025-10-21 15:03:46 +05:30
Piyush Singariya
b7ffbca598 chore: removing unnecessary code 2025-10-21 12:54:01 +05:30
Piyush Singariya
23fee19048 feat: add default pipelines during agent config 2025-10-21 11:44:46 +05:30
Piyush Singariya
c5622a5560 fix: minor revert to disable grpby on body column 2025-10-16 16:56:15 +05:30
Piyush Singariya
8f1dc4c36f fix: variablized bodyv2 querying 2025-10-16 16:52:47 +05:30
Piyush Singariya
21f918a950 feat: sync paths table TTL with logs table using same function 2025-10-15 15:41:41 +05:30
Piyush Singariya
471bcc9de8 fix: convert body to JSON string for display on UI 2025-10-15 15:41:20 +05:30
Piyush Singariya
14e4564188 feat: groupby promoted + body_v2 works 2025-10-14 16:25:32 +05:30
Piyush Singariya
8d007836da test: added promoted column WHERE condition builder 2025-10-13 20:46:50 +05:30
Piyush Singariya
589f32d326 fix: minor fix in checking promoted 2025-10-13 20:46:08 +05:30
Piyush Singariya
74c31c97f9 feat: updated aliases logic 2025-10-13 19:16:05 +05:30
Piyush Singariya
696f457521 fix: tested in UI 2025-10-13 16:04:42 +05:30
Piyush Singariya
1b12ebd76a feat: suggestions working 2025-10-13 14:37:59 +05:30
Piyush Singariya
82a67f7604 fix: removal of some code 2025-10-12 21:54:12 +05:30
Piyush Singariya
4c8fedc881 feat: simple group by works 2025-10-12 20:18:24 +05:30
Piyush Singariya
de2c95e2f2 feat: better json plan struct 2025-10-12 18:09:07 +05:30
Piyush Singariya
07052d9118 feat: started working on GroupBy 2025-10-10 19:51:58 +05:30
Piyush Singariya
db88929016 feat: jSON Arr + Dyn Arr branching 2025-10-10 17:28:51 +05:30
Piyush Singariya
356e84fb96 feat: nested array and dynamic Query building works 2025-10-09 11:27:13 +05:30
Piyush Singariya
034ce8054c feat: three query patterns working 2025-10-07 14:29:43 +05:30
Piyush Singariya
fdf01ad707 feat: basic fetching works 2025-09-29 15:06:00 +05:30
Piyush Singariya
d1a27fc3cd Merge branch 'main' into json 2025-09-26 17:23:23 +05:30
Piyush Singariya
405b51c680 test: fetching logs basic 2025-09-25 17:56:24 +05:30
79 changed files with 1759 additions and 2299 deletions

View File

@@ -1,29 +0,0 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
export const getMetricMetadata = async (
metricName: string,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
try {
const encodedMetricName = encodeURIComponent(metricName);
const response = await axios.get(
`/metrics/metadata?metricName=${encodedMetricName}`,
{
signal,
headers,
},
);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};

View File

@@ -560,10 +560,6 @@
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
}
}
@@ -573,10 +569,6 @@
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
.ant-select-arrow {

View File

@@ -169,10 +169,6 @@
.ant-select-selector {
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
}
}

View File

@@ -32,7 +32,6 @@ const ADD_ONS_KEYS = {
ORDER_BY: 'order_by',
LIMIT: 'limit',
LEGEND_FORMAT: 'legend_format',
REDUCE_TO: 'reduce_to',
};
const ADD_ONS_KEYS_TO_QUERY_PATH = {
@@ -41,14 +40,13 @@ const ADD_ONS_KEYS_TO_QUERY_PATH = {
[ADD_ONS_KEYS.ORDER_BY]: 'orderBy',
[ADD_ONS_KEYS.LIMIT]: 'limit',
[ADD_ONS_KEYS.LEGEND_FORMAT]: 'legend',
[ADD_ONS_KEYS.REDUCE_TO]: 'reduceTo',
};
const ADD_ONS = [
{
icon: <BarChart2 size={14} />,
label: 'Group By',
key: ADD_ONS_KEYS.GROUP_BY,
key: 'group_by',
description:
'Break down data by attributes like service name, endpoint, status code, or region. Essential for spotting patterns and comparing performance across different segments.',
docLink: 'https://signoz.io/docs/userguide/query-builder-v5/#grouping',
@@ -56,7 +54,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Having',
key: ADD_ONS_KEYS.HAVING,
key: 'having',
description:
'Filter grouped results based on aggregate conditions. Show only groups meeting specific criteria, like error rates > 5% or p99 latency > 500',
docLink:
@@ -65,7 +63,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Order By',
key: ADD_ONS_KEYS.ORDER_BY,
key: 'order_by',
description:
'Sort results to surface what matters most. Quickly identify slowest operations, most frequent errors, or highest resource consumers.',
docLink:
@@ -74,7 +72,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Limit',
key: ADD_ONS_KEYS.LIMIT,
key: 'limit',
description:
'Show only the top/bottom N results. Perfect for focusing on outliers, reducing noise, and improving dashboard performance.',
docLink:
@@ -83,7 +81,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Legend format',
key: ADD_ONS_KEYS.LEGEND_FORMAT,
key: 'legend_format',
description:
'Customize series labels using variables like {{service.name}}-{{endpoint}}. Makes charts readable at a glance during incident investigation.',
docLink:
@@ -94,7 +92,7 @@ const ADD_ONS = [
const REDUCE_TO = {
icon: <ScrollText size={14} />,
label: 'Reduce to',
key: ADD_ONS_KEYS.REDUCE_TO,
key: 'reduce_to',
description:
'Apply mathematical operations like sum, average, min, max, or percentiles to reduce multiple time series into a single value.',
docLink:
@@ -220,9 +218,10 @@ function QueryAddOns({
);
const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key));
// Filter and set selected views: add-ons that are both active and available
setSelectedViews(
filteredAddOns.filter(
ADD_ONS.filter(
(addOn) =>
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
),

View File

@@ -1,12 +1,6 @@
/* eslint-disable */
import {
fireEvent,
render,
screen,
userEvent,
waitFor,
within,
} from 'tests/test-utils';
import { fireEvent, render, screen } from '@testing-library/react';
import React from 'react';
import QueryAddOns from '../QueryV2/QueryAddOns/QueryAddOns';
import { PANEL_TYPES } from 'constants/queryBuilder';
@@ -61,7 +55,16 @@ jest.mock('../QueryV2/QueryAddOns/HavingFilter/HavingFilter', () => ({
),
}));
// ReduceToFilter is not mocked - we test the actual Ant Design Select component
jest.mock(
'container/QueryBuilder/filters/ReduceToFilter/ReduceToFilter',
() => ({
ReduceToFilter: ({ onChange }: any) => (
<button data-testid="reduce-to" onClick={() => onChange('sum')}>
ReduceToFilter
</button>
),
}),
);
function baseQuery(overrides: Partial<any> = {}): any {
return {
@@ -137,7 +140,7 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('order-by-content')).toBeInTheDocument();
});
it('limit input auto-opens when limit is set and changing it calls handler', async () => {
it('limit input auto-opens when limit is set and changing it calls handler', () => {
render(
<QueryAddOns
query={baseQuery({ limit: 5 })}
@@ -180,88 +183,4 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('limit-content')).toBeInTheDocument();
expect(limitInput.value).toBe('7');
});
it('shows reduce-to add-on when showReduceTo is true', () => {
render(
<QueryAddOns
query={baseQuery()}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('query-add-on-reduce_to')).toBeInTheDocument();
});
it('auto-opens reduce-to content when reduceTo is set', () => {
render(
<QueryAddOns
query={baseQuery({ reduceTo: 'sum' })}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
it('calls handleSetQueryData when reduce-to value changes', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const query = baseQuery({
reduceTo: 'avg',
aggregations: [{ id: 'a', operator: 'count', reduceTo: 'avg' }],
});
render(
<QueryAddOns
query={query}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
// Wait for the reduce-to content section to be visible (it auto-opens when reduceTo is set)
await waitFor(() => {
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
// Get the Select component by its role (combobox)
// The Select is within the reduce-to-content section
const reduceToContent = screen.getByTestId('reduce-to-content');
const selectCombobox = within(reduceToContent).getByRole('combobox');
// Open the dropdown by clicking on the combobox
await user.click(selectCombobox);
// Wait for the dropdown listbox to appear
await screen.findByRole('listbox');
// Find and click the "Sum" option
const sumOption = await screen.findByText('Sum of values in timeframe');
await user.click(sumOption);
// Verify the handler was called with the correct value
await waitFor(() => {
expect(mockHandleSetQueryData).toHaveBeenCalledWith(0, {
...query,
aggregations: [
{
...(query.aggregations?.[0] as any),
reduceTo: 'sum',
},
],
});
});
});
});

View File

@@ -421,16 +421,11 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
...currentFilter,
value: currentFilter.value.filter((val) => val !== value),
};
if (newFilter.value.length === 0) {
query.filters.items = query.filters.items.filter(
(item) => !isEqual(item.key?.key, filter.attributeKey.key),
);
if (query.filter?.expression) {
query.filter.expression = removeKeysFromExpression(
query.filter.expression,
[filter.attributeKey.key],
);
}
} else {
query.filters.items = query.filters.items.map((item) => {
if (isEqual(item.key?.key, filter.attributeKey.key)) {
@@ -440,16 +435,6 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
});
}
} else {
const newFilter = {
...currentFilter,
value: currentFilter.value === value ? null : currentFilter.value,
};
if (newFilter.value === null && query.filter?.expression) {
query.filter.expression = removeKeysFromExpression(
query.filter.expression,
[filter.attributeKey.key],
);
}
query.filters.items = query.filters.items.filter(
(item) => !isEqual(item.key?.key, filter.attributeKey.key),
);

View File

@@ -1,18 +1,11 @@
import './styles.scss';
import { WarningFilled } from '@ant-design/icons';
import { Select, Tooltip } from 'antd';
import { Select } from 'antd';
import { DefaultOptionType } from 'antd/es/select';
import classNames from 'classnames';
import { useMemo } from 'react';
import { UniversalYAxisUnitMappings } from './constants';
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
import {
getUniversalNameFromMetricUnit,
getYAxisCategories,
mapMetricUnitToUniversalUnit,
} from './utils';
import { getYAxisCategories, mapMetricUnitToUniversalUnit } from './utils';
function YAxisUnitSelector({
value,
@@ -21,24 +14,9 @@ function YAxisUnitSelector({
loading = false,
'data-testid': dataTestId,
source,
initialValue,
}: YAxisUnitSelectorProps): JSX.Element {
const universalUnit = mapMetricUnitToUniversalUnit(value);
const incompatibleUnitMessage = useMemo(() => {
if (!initialValue || !value || loading) return '';
const initialUniversalUnit = mapMetricUnitToUniversalUnit(initialValue);
const currentUniversalUnit = mapMetricUnitToUniversalUnit(value);
if (initialUniversalUnit !== currentUniversalUnit) {
const initialUniversalUnitName = getUniversalNameFromMetricUnit(
initialValue,
);
const currentUniversalUnitName = getUniversalNameFromMetricUnit(value);
return `Unit mismatch. Saved unit is ${initialUniversalUnitName}, but ${currentUniversalUnitName} is selected.`;
}
return '';
}, [initialValue, value, loading]);
const handleSearch = (
searchTerm: string,
currentOption: DefaultOptionType | undefined,
@@ -71,16 +49,6 @@ function YAxisUnitSelector({
placeholder={placeholder}
filterOption={(input, option): boolean => handleSearch(input, option)}
loading={loading}
suffixIcon={
incompatibleUnitMessage ? (
<Tooltip title={incompatibleUnitMessage}>
<WarningFilled />
</Tooltip>
) : undefined
}
className={classNames({
'warning-state': incompatibleUnitMessage,
})}
data-testid={dataTestId}
>
{categories.map((category) => (

View File

@@ -91,36 +91,4 @@ describe('YAxisUnitSelector', () => {
expect(screen.getByText('Bytes (B)')).toBeInTheDocument();
expect(screen.getByText('Seconds (s)')).toBeInTheDocument();
});
it('shows warning message when incompatible unit is selected', () => {
render(
<YAxisUnitSelector
source={YAxisSource.ALERTS}
value="By"
onChange={mockOnChange}
initialValue="s"
/>,
);
const warningIcon = screen.getByLabelText('warning');
expect(warningIcon).toBeInTheDocument();
fireEvent.mouseOver(warningIcon);
return screen
.findByText(
'Unit mismatch. Saved unit is Seconds (s), but Bytes (B) is selected.',
)
.then((el) => expect(el).toBeInTheDocument());
});
it('does not show warning message when compatible unit is selected', () => {
render(
<YAxisUnitSelector
source={YAxisSource.ALERTS}
value="s"
onChange={mockOnChange}
initialValue="s"
/>,
);
const warningIcon = screen.queryByLabelText('warning');
expect(warningIcon).not.toBeInTheDocument();
});
});

View File

@@ -3,13 +3,3 @@
width: 220px;
}
}
.warning-state {
.ant-select-selector {
border-color: var(--bg-amber-400) !important;
}
.anticon {
color: var(--bg-amber-400) !important;
}
}

View File

@@ -6,7 +6,6 @@ export interface YAxisUnitSelectorProps {
disabled?: boolean;
'data-testid'?: string;
source: YAxisSource;
initialValue?: string;
}
export enum UniversalYAxisUnit {

View File

@@ -55,7 +55,6 @@ export const REACT_QUERY_KEY = {
GET_METRIC_DETAILS: 'GET_METRIC_DETAILS',
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
GET_INSPECT_METRICS_DETAILS: 'GET_INSPECT_METRICS_DETAILS',
GET_METRIC_METADATA: 'GET_METRIC_METADATA',
// Traces Funnels Query Keys
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',

View File

@@ -5,11 +5,9 @@ import { useCreateAlertState } from 'container/CreateAlertV2/context';
import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview';
import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import useGetYAxisUnit from 'hooks/useGetYAxisUnit';
import { useEffect, useState } from 'react';
import { useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { AlertDef } from 'types/api/alerts/def';
import { EQueryType } from 'types/common/dashboard';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -20,13 +18,7 @@ export interface ChartPreviewProps {
function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
const { currentQuery, panelType, stagedQuery } = useQueryBuilder();
const {
alertType,
thresholdState,
alertState,
setAlertState,
isEditMode,
} = useCreateAlertState();
const { thresholdState, alertState, setAlertState } = useCreateAlertState();
const { selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
@@ -35,25 +27,6 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
const yAxisUnit = alertState.yAxisUnit || '';
const fetchYAxisUnit =
!isEditMode && alertType === AlertTypes.METRICS_BASED_ALERT;
const selectedQueryName = thresholdState.selectedQuery;
const { yAxisUnit: initialYAxisUnit, isLoading } = useGetYAxisUnit(
selectedQueryName,
{
enabled: fetchYAxisUnit,
},
);
// Every time a new metric is selected, set the y-axis unit to its unit value if present
// Only for metrics-based alerts
useEffect(() => {
if (fetchYAxisUnit) {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: initialYAxisUnit });
}
}, [initialYAxisUnit, setAlertState, fetchYAxisUnit]);
const headline = (
<div className="chart-preview-headline">
<PlotTag
@@ -61,13 +34,11 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
panelType={panelType || PANEL_TYPES.TIME_SERIES}
/>
<YAxisUnitSelector
value={yAxisUnit}
initialValue={initialYAxisUnit}
value={alertState.yAxisUnit}
onChange={(value): void => {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
}}
source={YAxisSource.ALERTS}
loading={isLoading}
/>
</div>
);

View File

@@ -120,6 +120,7 @@ function FullView({
originalGraphType: selectedPanelType,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
return {
query: updatedQuery,
graphType: PANEL_TYPES.LIST,

View File

@@ -137,6 +137,7 @@ function GridCardGraph({
originalGraphType: widget.panelTypes,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
const initialDataSource = updatedQuery.builder.queryData[0].dataSource;
return {
query: updatedQuery,

View File

@@ -58,27 +58,6 @@
.explore-content {
padding: 0 8px;
.y-axis-unit-selector-container {
display: flex;
align-items: center;
gap: 10px;
padding-top: 10px;
margin-bottom: 10px;
.save-unit-container {
display: flex;
align-items: center;
gap: 10px;
.ant-btn {
border-radius: 2px;
.ant-typography {
font-size: 12px;
}
}
}
}
.ant-space {
margin-top: 10px;
margin-bottom: 20px;
@@ -96,14 +75,6 @@
.time-series-view {
min-width: 100%;
width: 100%;
position: relative;
.no-unit-warning {
position: absolute;
top: 30px;
right: 40px;
z-index: 1000;
}
}
.time-series-container {

View File

@@ -1,7 +1,7 @@
import './Explorer.styles.scss';
import * as Sentry from '@sentry/react';
import { Switch, Tooltip } from 'antd';
import { Switch } from 'antd';
import logEvent from 'api/common/logEvent';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import WarningPopover from 'components/WarningPopover/WarningPopover';
@@ -25,14 +25,10 @@ import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToD
import { v4 as uuid } from 'uuid';
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
import MetricDetails from '../MetricDetails/MetricDetails';
// import QuerySection from './QuerySection';
import TimeSeries from './TimeSeries';
import { ExplorerTabs } from './types';
import {
getMetricUnits,
splitQueryIntoOneChartPerQuery,
useGetMetrics,
} from './utils';
import { splitQueryIntoOneChartPerQuery } from './utils';
const ONE_CHART_PER_QUERY_ENABLED_KEY = 'isOneChartPerQueryEnabled';
@@ -44,34 +40,6 @@ function Explorer(): JSX.Element {
currentQuery,
} = useQueryBuilder();
const { safeNavigate } = useSafeNavigate();
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(false);
const metricNames = useMemo(() => {
const currentMetricNames: string[] = [];
stagedQuery?.builder.queryData.forEach((query) => {
if (query.aggregateAttribute?.key) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames;
}, [stagedQuery]);
const {
metrics,
isLoading: isMetricUnitsLoading,
isError: isMetricUnitsError,
} = useGetMetrics(metricNames);
const units = useMemo(() => getMetricUnits(metrics), [metrics]);
const areAllMetricUnitsSame = useMemo(
() =>
!isMetricUnitsLoading &&
!isMetricUnitsError &&
units.length > 0 &&
units.every((unit) => unit && unit === units[0]),
[units, isMetricUnitsLoading, isMetricUnitsError],
);
const [searchParams, setSearchParams] = useSearchParams();
const isOneChartPerQueryEnabled =
@@ -80,66 +48,7 @@ function Explorer(): JSX.Element {
const [showOneChartPerQuery, toggleShowOneChartPerQuery] = useState(
isOneChartPerQueryEnabled,
);
const [disableOneChartPerQuery, toggleDisableOneChartPerQuery] = useState(
false,
);
const [selectedTab] = useState<ExplorerTabs>(ExplorerTabs.TIME_SERIES);
const [yAxisUnit, setYAxisUnit] = useState<string | undefined>();
const unitsLength = useMemo(() => units.length, [units]);
const firstUnit = useMemo(() => units?.[0], [units]);
useEffect(() => {
// Set the y axis unit to the first metric unit if
// 1. There is one metric unit and it is not empty
// 2. All metric units are the same and not empty
// Else, set the y axis unit to empty if
// 1. There are more than one metric units and they are not the same
// 2. There are no metric units
// 3. There is exactly one metric unit but it is empty/undefined
if (unitsLength === 0) {
setYAxisUnit(undefined);
} else if (unitsLength === 1 && firstUnit) {
setYAxisUnit(firstUnit);
} else if (unitsLength === 1 && !firstUnit) {
setYAxisUnit(undefined);
} else if (areAllMetricUnitsSame) {
if (firstUnit) {
setYAxisUnit(firstUnit);
} else {
setYAxisUnit(undefined);
}
} else if (unitsLength > 1 && !areAllMetricUnitsSame) {
setYAxisUnit(undefined);
}
}, [unitsLength, firstUnit, areAllMetricUnitsSame]);
useEffect(() => {
// Don't apply logic during loading to avoid overwriting user preferences
if (isMetricUnitsLoading) {
return;
}
// Disable one chart per query if -
// 1. There are more than one metric
// 2. The metric units are not the same
if (units.length > 1 && !areAllMetricUnitsSame) {
toggleShowOneChartPerQuery(true);
toggleDisableOneChartPerQuery(true);
} else if (units.length <= 1) {
toggleShowOneChartPerQuery(false);
toggleDisableOneChartPerQuery(true);
} else {
// When units are the same and loading is complete, restore URL-based preference
toggleShowOneChartPerQuery(isOneChartPerQueryEnabled);
toggleDisableOneChartPerQuery(false);
}
}, [
units,
areAllMetricUnitsSame,
isMetricUnitsLoading,
isOneChartPerQueryEnabled,
]);
const handleToggleShowOneChartPerQuery = (): void => {
toggleShowOneChartPerQuery(!showOneChartPerQuery);
@@ -159,20 +68,15 @@ function Explorer(): JSX.Element {
[updateAllQueriesOperators],
);
const exportDefaultQuery = useMemo(() => {
const query = updateAllQueriesOperators(
currentQuery || initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
);
if (yAxisUnit && !query.unit) {
return {
...query,
unit: yAxisUnit,
};
}
return query;
}, [currentQuery, updateAllQueriesOperators, yAxisUnit]);
const exportDefaultQuery = useMemo(
() =>
updateAllQueriesOperators(
currentQuery || initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
),
[currentQuery, updateAllQueriesOperators],
);
useShareBuilderUrl({ defaultValue: defaultQuery });
@@ -186,16 +90,8 @@ function Explorer(): JSX.Element {
const widgetId = uuid();
let query = queryToExport || exportDefaultQuery;
if (yAxisUnit && !query.unit) {
query = {
...query,
unit: yAxisUnit,
};
}
const dashboardEditView = generateExportToDashboardLink({
query,
query: queryToExport || exportDefaultQuery,
panelType: PANEL_TYPES.TIME_SERIES,
dashboardId: dashboard.id,
widgetId,
@@ -203,33 +99,17 @@ function Explorer(): JSX.Element {
safeNavigate(dashboardEditView);
},
[exportDefaultQuery, safeNavigate, yAxisUnit],
[exportDefaultQuery, safeNavigate],
);
const splitedQueries = useMemo(
() =>
splitQueryIntoOneChartPerQuery(
stagedQuery || initialQueriesMap[DataSource.METRICS],
metricNames,
units,
),
[stagedQuery, metricNames, units],
[stagedQuery],
);
const [selectedMetricName, setSelectedMetricName] = useState<string | null>(
null,
);
const handleOpenMetricDetails = (metricName: string): void => {
setIsMetricDetailsOpen(true);
setSelectedMetricName(metricName);
};
const handleCloseMetricDetails = (): void => {
setIsMetricDetailsOpen(false);
setSelectedMetricName(null);
};
useEffect(() => {
logEvent(MetricsExplorerEvents.TabChanged, {
[MetricsExplorerEventKeys.Tab]: 'explorer',
@@ -243,44 +123,17 @@ function Explorer(): JSX.Element {
const [warning, setWarning] = useState<Warning | undefined>(undefined);
const oneChartPerQueryDisabledTooltip = useMemo(() => {
if (splitedQueries.length <= 1) {
return 'One chart per query cannot be toggled for a single query.';
}
if (units.length <= 1) {
return 'One chart per query cannot be toggled when there is only one metric.';
}
if (disableOneChartPerQuery) {
return 'One chart per query cannot be disabled for multiple queries with different units.';
}
return undefined;
}, [disableOneChartPerQuery, splitedQueries.length, units.length]);
// Show the y axis unit selector if -
// 1. There is only one metric
// 2. The metric has no saved unit
const showYAxisUnitSelector = useMemo(
() => !isMetricUnitsLoading && units.length === 1 && !units[0],
[units, isMetricUnitsLoading],
);
return (
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="metrics-explorer-explore-container">
<div className="explore-header">
<div className="explore-header-left-actions">
<span>1 chart/query</span>
<Tooltip
open={disableOneChartPerQuery ? undefined : false}
title={oneChartPerQueryDisabledTooltip}
>
<Switch
checked={showOneChartPerQuery}
onChange={handleToggleShowOneChartPerQuery}
disabled={disableOneChartPerQuery || splitedQueries.length <= 1}
size="small"
/>
</Tooltip>
<Switch
checked={showOneChartPerQuery}
onChange={handleToggleShowOneChartPerQuery}
size="small"
/>
</div>
<div className="explore-header-right-actions">
{!isEmpty(warning) && <WarningPopover warningData={warning} />}
@@ -321,16 +174,6 @@ function Explorer(): JSX.Element {
<TimeSeries
showOneChartPerQuery={showOneChartPerQuery}
setWarning={setWarning}
areAllMetricUnitsSame={areAllMetricUnitsSame}
isMetricUnitsLoading={isMetricUnitsLoading}
isMetricUnitsError={isMetricUnitsError}
metricUnits={units}
metricNames={metricNames}
metrics={metrics}
handleOpenMetricDetails={handleOpenMetricDetails}
yAxisUnit={yAxisUnit}
setYAxisUnit={setYAxisUnit}
showYAxisUnitSelector={showYAxisUnitSelector}
/>
)}
{/* TODO: Enable once we have resolved all related metrics issues */}
@@ -344,17 +187,9 @@ function Explorer(): JSX.Element {
query={exportDefaultQuery}
sourcepage={DataSource.METRICS}
onExport={handleExport}
isOneChartPerQuery={showOneChartPerQuery}
isOneChartPerQuery={false}
splitedQueries={splitedQueries}
/>
{isMetricDetailsOpen && (
<MetricDetails
metricName={selectedMetricName}
isOpen={isMetricDetailsOpen}
onClose={handleCloseMetricDetails}
isModalTimeSelection={false}
/>
)}
</Sentry.ErrorBoundary>
);
}

View File

@@ -1,18 +1,14 @@
import { Color } from '@signozhq/design-tokens';
import { Tooltip, Typography } from 'antd';
import { isAxiosError } from 'axios';
import classNames from 'classnames';
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters/BuilderUnitsFilter/BuilderUnits';
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
import { convertDataValueToMs } from 'container/TimeSeriesView/utils';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { AlertTriangle } from 'lucide-react';
import { useMemo } from 'react';
import { useMemo, useState } from 'react';
import { useQueries } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -28,13 +24,6 @@ import { splitQueryIntoOneChartPerQuery } from './utils';
function TimeSeries({
showOneChartPerQuery,
setWarning,
isMetricUnitsLoading,
metricUnits,
metricNames,
handleOpenMetricDetails,
yAxisUnit,
setYAxisUnit,
showYAxisUnitSelector,
}: TimeSeriesProps): JSX.Element {
const { stagedQuery, currentQuery } = useQueryBuilder();
@@ -67,14 +56,13 @@ function TimeSeries({
showOneChartPerQuery
? splitQueryIntoOneChartPerQuery(
stagedQuery || initialQueriesMap[DataSource.METRICS],
metricNames,
metricUnits,
)
: [stagedQuery || initialQueriesMap[DataSource.METRICS]],
// eslint-disable-next-line react-hooks/exhaustive-deps
[showOneChartPerQuery, stagedQuery, JSON.stringify(metricUnits)],
[showOneChartPerQuery, stagedQuery],
);
const [yAxisUnit, setYAxisUnit] = useState<string>('');
const queries = useQueries(
queryPayloads.map((payload, index) => ({
queryKey: [
@@ -138,148 +126,32 @@ function TimeSeries({
setYAxisUnit(value);
};
// TODO: Enable once we have resolved all related metrics v2 api issues
// Show the save unit button if
// 1. There is only one metric
// 2. The metric has no saved unit
// 3. The user has selected a unit
// const showSaveUnitButton = useMemo(
// () =>
// metricUnits.length === 1 &&
// Boolean(metrics?.[0]) &&
// !metricUnits[0] &&
// yAxisUnit,
// [metricUnits, metrics, yAxisUnit],
// );
// const {
// mutate: updateMetricMetadata,
// isLoading: isUpdatingMetricMetadata,
// } = useUpdateMetricMetadata();
// const handleSaveUnit = (): void => {
// updateMetricMetadata(
// {
// metricName: metricNames[0],
// payload: {
// unit: yAxisUnit,
// description: metrics[0]?.description ?? '',
// metricType: metrics[0]?.type as MetricType,
// temporality: metrics[0]?.temporality,
// },
// },
// {
// onSuccess: () => {
// notifications.success({
// message: 'Unit saved successfully',
// });
// queryClient.invalidateQueries([
// REACT_QUERY_KEY.GET_METRIC_DETAILS,
// metricNames[0],
// ]);
// },
// onError: () => {
// notifications.error({
// message: 'Failed to save unit',
// });
// },
// },
// );
// };
return (
<>
<div className="y-axis-unit-selector-container">
{showYAxisUnitSelector && (
<>
<YAxisUnitSelector
onChange={onUnitChangeHandler}
value={yAxisUnit}
source={YAxisSource.EXPLORER}
data-testid="y-axis-unit-selector"
/>
{/* TODO: Enable once we have resolved all related metrics v2 api issues */}
{/* {showSaveUnitButton && (
<div className="save-unit-container">
<Typography.Text>
Save the selected unit for this metric?
</Typography.Text>
<Button
type="primary"
size="small"
disabled={isUpdatingMetricMetadata}
onClick={handleSaveUnit}
>
<Typography.Paragraph>Yes</Typography.Paragraph>
</Button>
</div>
)} */}
</>
)}
</div>
<BuilderUnitsFilter onChange={onUnitChangeHandler} yAxisUnit={yAxisUnit} />
<div
className={classNames({
'time-series-container': changeLayoutForOneChartPerQuery,
})}
>
{responseData.map((datapoint, index) => {
const isQueryDataItem = index < metricNames.length;
const metricName = isQueryDataItem ? metricNames[index] : undefined;
const metricUnit = isQueryDataItem ? metricUnits[index] : undefined;
// Show the no unit warning if -
// 1. The metric query is not loading
// 2. The metric units are not loading
// 3. There are more than one metric
// 4. The current metric unit is empty
// 5. Is a queryData item
const isMetricUnitEmpty =
isQueryDataItem &&
!queries[index].isLoading &&
!isMetricUnitsLoading &&
metricUnits.length > 1 &&
!metricUnit &&
metricName;
const currentYAxisUnit = yAxisUnit || metricUnit;
return (
<div
className="time-series-view"
// eslint-disable-next-line react/no-array-index-key
key={index}
>
{isMetricUnitEmpty && metricName && (
<Tooltip
className="no-unit-warning"
title={
<Typography.Text>
This metric does not have a unit. Please set one for it in the{' '}
<Typography.Link
onClick={(): void => handleOpenMetricDetails(metricName)}
>
metric details
</Typography.Link>{' '}
page.
</Typography.Text>
}
>
<AlertTriangle size={16} color={Color.BG_AMBER_400} />
</Tooltip>
)}
<TimeSeriesView
isFilterApplied={false}
isError={queries[index].isError}
isLoading={queries[index].isLoading || isMetricUnitsLoading}
data={datapoint}
yAxisUnit={currentYAxisUnit}
dataSource={DataSource.METRICS}
error={queries[index].error as APIError}
setWarning={setWarning}
/>
</div>
);
})}
{responseData.map((datapoint, index) => (
<div
className="time-series-view"
// eslint-disable-next-line react/no-array-index-key
key={index}
>
<TimeSeriesView
isFilterApplied={false}
isError={queries[index].isError}
isLoading={queries[index].isLoading}
data={datapoint}
yAxisUnit={yAxisUnit}
dataSource={DataSource.METRICS}
error={queries[index].error as APIError}
setWarning={setWarning}
/>
</div>
))}
</div>
</>
);

View File

@@ -1,6 +1,4 @@
import { render, screen } from '@testing-library/react';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import * as useOptionsMenuHooks from 'container/OptionsMenu';
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
@@ -14,18 +12,13 @@ import { MemoryRouter } from 'react-router-dom';
import { useSearchParams } from 'react-router-dom-v5-compat';
import store from 'store';
import { LicenseEvent } from 'types/api/licensesV3/getActive';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import { DataSource } from 'types/common/queryBuilder';
import Explorer from '../Explorer';
import * as useGetMetricsHooks from '../utils';
const mockSetSearchParams = jest.fn();
const queryClient = new QueryClient();
const mockUpdateAllQueriesOperators = jest
.fn()
.mockReturnValue(initialQueriesMap[DataSource.METRICS]);
const mockUpdateAllQueriesOperators = jest.fn();
const mockUseQueryBuilderData = {
handleRunQuery: jest.fn(),
stagedQuery: initialQueriesMap[DataSource.METRICS],
@@ -133,30 +126,6 @@ jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
} as any);
const Y_AXIS_UNIT_SELECTOR_TEST_ID = 'y-axis-unit-selector';
const mockMetric: MetricMetadata = {
type: MetricType.SUM,
description: 'metric1 description',
unit: 'metric1 unit',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
};
function renderExplorer(): void {
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
}
describe('Explorer', () => {
beforeEach(() => {
jest.clearAllMocks();
@@ -173,7 +142,17 @@ describe('Explorer', () => {
mockSetSearchParams,
]);
renderExplorer();
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(mockUpdateAllQueriesOperators).toHaveBeenCalledWith(
initialQueriesMap[DataSource.METRICS],
@@ -187,13 +166,18 @@ describe('Explorer', () => {
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).toBeChecked();
@@ -204,132 +188,20 @@ describe('Explorer', () => {
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).not.toBeChecked();
});
it('should not render y axis unit selector for single metric which has a unit', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
});
it('should not render y axis unit selector for mutliple metrics with same unit', () => {
(useSearchParams as jest.Mock).mockReturnValueOnce([
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
});
it('should hide y axis unit selector for multiple metrics with different units', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
// One chart per query toggle should be disabled
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).toBeDisabled();
});
it('should render empty y axis unit selector for a single metric with no unit', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [
{
type: MetricType.SUM,
description: 'metric1 description',
unit: '',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
},
],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).toBeInTheDocument();
expect(yAxisUnitSelector).toHaveTextContent('Please select a unit');
});
it('one chart per query should be off and disabled when there is only one query', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric],
});
renderExplorer();
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).not.toBeChecked();
expect(oneChartPerQueryToggle).toBeDisabled();
});
it('one chart per query should enabled by default when there are multiple metrics with the same unit', () => {
const mockQueryData = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric1',
},
};
const mockStagedQueryWithMultipleQueries = {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [mockQueryData, mockQueryData],
},
};
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
...mockUseQueryBuilderData,
stagedQuery: mockStagedQueryWithMultipleQueries,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).toBeEnabled();
});
});

View File

@@ -1,180 +0,0 @@
import { render, RenderResult, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { UpdateMetricMetadataResponse } from 'api/metricsExplorer/updateMetricMetadata';
import * as useUpdateMetricMetadataHooks from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import { UseUpdateMetricMetadataProps } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import { UseMutationResult } from 'react-query';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import TimeSeries from '../TimeSeries';
import { TimeSeriesProps } from '../types';
type MockUpdateMetricMetadata = UseMutationResult<
SuccessResponse<UpdateMetricMetadataResponse> | ErrorResponse,
Error,
UseUpdateMetricMetadataProps
>;
const mockUpdateMetricMetadata = jest.fn();
jest
.spyOn(useUpdateMetricMetadataHooks, 'useUpdateMetricMetadata')
.mockReturnValue(({
mutate: mockUpdateMetricMetadata,
isLoading: false,
} as Partial<MockUpdateMetricMetadata>) as MockUpdateMetricMetadata);
jest.mock('container/TimeSeriesView/TimeSeriesView', () => ({
__esModule: true,
default: jest.fn().mockReturnValue(
<div role="img" aria-label="warning">
TimeSeriesView
</div>,
),
}));
jest.mock('react-query', () => ({
...jest.requireActual('react-query'),
useQueryClient: jest.fn().mockReturnValue({
invalidateQueries: jest.fn(),
}),
useQueries: jest.fn().mockImplementation((queries: any[]) =>
queries.map(() => ({
data: undefined,
isLoading: false,
isError: false,
error: undefined,
})),
),
}));
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: jest.fn().mockReturnValue({
globalTime: {
selectedTime: '5min',
maxTime: 1713738000000,
minTime: 1713734400000,
},
}),
}));
const mockMetric: MetricMetadata = {
type: MetricType.SUM,
description: 'metric1 description',
unit: 'metric1 unit',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
};
const mockSetWarning = jest.fn();
const mockSetIsMetricDetailsOpen = jest.fn();
const mockSetYAxisUnit = jest.fn();
function renderTimeSeries(
overrides: Partial<TimeSeriesProps> = {},
): RenderResult {
return render(
<TimeSeries
showOneChartPerQuery={false}
setWarning={mockSetWarning}
areAllMetricUnitsSame={false}
isMetricUnitsLoading={false}
metricUnits={[]}
metricNames={[]}
metrics={[]}
isMetricUnitsError={false}
handleOpenMetricDetails={mockSetIsMetricDetailsOpen}
yAxisUnit="count"
setYAxisUnit={mockSetYAxisUnit}
showYAxisUnitSelector={false}
// eslint-disable-next-line react/jsx-props-no-spreading
{...overrides}
/>,
);
}
describe('TimeSeries', () => {
it('should render a warning icon when a metric has no unit among multiple metrics', () => {
const user = userEvent.setup();
const { container } = renderTimeSeries({
metricUnits: ['', 'count'],
metricNames: ['metric1', 'metric2'],
metrics: [undefined, undefined],
});
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
user.hover(alertIcon);
waitFor(() =>
expect(
screen.findByText('This metric does not have a unit'),
).toBeInTheDocument(),
);
});
it('clicking on warning icon tooltip should open metric details modal', async () => {
const user = userEvent.setup();
const { container } = renderTimeSeries({
metricUnits: ['', 'count'],
metricNames: ['metric1', 'metric2'],
metrics: [mockMetric, mockMetric],
yAxisUnit: 'seconds',
});
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
user.hover(alertIcon);
const metricDetailsLink = await screen.findByText('metric details');
user.click(metricDetailsLink);
waitFor(() =>
expect(mockSetIsMetricDetailsOpen).toHaveBeenCalledWith('metric1'),
);
});
// TODO: Unskip this test once the save unit button is implemented
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
it.skip('shows Save unit button when metric had no unit but one is selected', () => {
const { findByText, getByRole } = renderTimeSeries({
metricUnits: [undefined],
metricNames: ['metric1'],
metrics: [mockMetric],
yAxisUnit: 'seconds',
});
expect(
findByText('Save the selected unit for this metric?'),
).toBeInTheDocument();
const yesButton = getByRole('button', { name: 'Yes' });
expect(yesButton).toBeInTheDocument();
expect(yesButton).toBeEnabled();
});
// TODO: Unskip this test once the save unit button is implemented
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
it.skip('clicking on save unit button shoould upated metric metadata', () => {
const user = userEvent.setup();
const { getByRole } = renderTimeSeries({
metricUnits: [''],
metricNames: ['metric1'],
metrics: [mockMetric],
yAxisUnit: 'seconds',
});
const yesButton = getByRole('button', { name: /Yes/i });
user.click(yesButton);
expect(mockUpdateMetricMetadata).toHaveBeenCalledWith(
{
metricName: 'metric1',
payload: expect.objectContaining({ unit: 'seconds' }),
},
expect.objectContaining({
onSuccess: expect.any(Function),
onError: expect.any(Function),
}),
);
});
});

View File

@@ -1,161 +0,0 @@
import { renderHook } from '@testing-library/react';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { initialQueriesMap } from 'constants/queryBuilder';
import * as useGetMultipleMetricsHook from 'hooks/metricsExplorer/useGetMultipleMetrics';
import { UseQueryResult } from 'react-query';
import { SuccessResponseV2 } from 'types/api';
import {
MetricMetadata,
MetricMetadataResponse,
} from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
IBuilderFormula,
IBuilderQuery,
Query,
} from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import {
getMetricUnits,
splitQueryIntoOneChartPerQuery,
useGetMetrics,
} from '../utils';
const MOCK_QUERY_DATA_1: IBuilderQuery = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric1',
},
};
const MOCK_QUERY_DATA_2: IBuilderQuery = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric2',
},
};
const MOCK_FORMULA_DATA: IBuilderFormula = {
expression: '1 + 1',
disabled: false,
queryName: 'Mock Formula',
legend: 'Mock Legend',
};
const MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA: Query = {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [MOCK_QUERY_DATA_1, MOCK_QUERY_DATA_2],
queryFormulas: [MOCK_FORMULA_DATA, MOCK_FORMULA_DATA],
},
};
describe('splitQueryIntoOneChartPerQuery', () => {
it('should split a query with multiple queryData to multiple distinct queries, each with a single queryData', () => {
const result = splitQueryIntoOneChartPerQuery(
MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA,
['metric1', 'metric2'],
[undefined, 'unit2'],
);
expect(result).toHaveLength(4);
// Verify query 1 has the correct data
expect(result[0].builder.queryData).toHaveLength(1);
expect(result[0].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_1);
expect(result[0].builder.queryFormulas).toHaveLength(0);
expect(result[0].unit).toBeUndefined();
// Verify query 2 has the correct data
expect(result[1].builder.queryData).toHaveLength(1);
expect(result[1].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_2);
expect(result[1].builder.queryFormulas).toHaveLength(0);
expect(result[1].unit).toBe('unit2');
// Verify query 3 has the correct data
expect(result[2].builder.queryFormulas).toHaveLength(1);
expect(result[2].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
expect(result[2].builder.queryData).toHaveLength(2); // 2 disabled queries
expect(result[2].builder.queryData[0].disabled).toBe(true);
expect(result[2].builder.queryData[1].disabled).toBe(true);
expect(result[2].unit).toBeUndefined();
// Verify query 4 has the correct data
expect(result[3].builder.queryFormulas).toHaveLength(1);
expect(result[3].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
expect(result[3].builder.queryData).toHaveLength(2); // 2 disabled queries
expect(result[3].builder.queryData[0].disabled).toBe(true);
expect(result[3].builder.queryData[1].disabled).toBe(true);
expect(result[3].unit).toBeUndefined();
});
});
const MOCK_METRIC_METADATA: MetricMetadata = {
description: 'Metric 1 description',
unit: 'unit1',
type: MetricType.GAUGE,
temporality: Temporality.DELTA,
isMonotonic: true,
};
describe('useGetMetrics', () => {
beforeEach(() => {
jest
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
.mockReturnValue([
({
isLoading: false,
isError: false,
data: {
httpStatusCode: 200,
data: {
status: 'success',
data: MOCK_METRIC_METADATA,
},
},
} as Partial<
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
]);
});
it('should return the correct metrics data', () => {
const { result } = renderHook(() => useGetMetrics(['metric1']));
expect(result.current.metrics).toHaveLength(1);
expect(result.current.metrics[0]).toBeDefined();
expect(result.current.metrics[0]).toEqual(MOCK_METRIC_METADATA);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return array of undefined values of correct length when metrics data is not yet loaded', () => {
jest
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
.mockReturnValue([
({
isLoading: true,
isError: false,
} as Partial<
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
]);
const { result } = renderHook(() => useGetMetrics(['metric1']));
expect(result.current.metrics).toHaveLength(1);
expect(result.current.metrics[0]).toBeUndefined();
});
});
describe('getMetricUnits', () => {
it('should return the same unit for units that are not known to the universal unit mapper', () => {
const result = getMetricUnits([MOCK_METRIC_METADATA]);
expect(result).toHaveLength(1);
expect(result[0]).toEqual(MOCK_METRIC_METADATA.unit);
});
it('should return universal unit for units that are known to the universal unit mapper', () => {
const result = getMetricUnits([{ ...MOCK_METRIC_METADATA, unit: 'seconds' }]);
expect(result).toHaveLength(1);
expect(result[0]).toBe('s');
});
});

View File

@@ -3,7 +3,6 @@ import { Dispatch, SetStateAction } from 'react';
import { UseQueryResult } from 'react-query';
import { SuccessResponse, Warning } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
export enum ExplorerTabs {
TIME_SERIES = 'time-series',
@@ -13,16 +12,6 @@ export enum ExplorerTabs {
export interface TimeSeriesProps {
showOneChartPerQuery: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
areAllMetricUnitsSame: boolean;
isMetricUnitsLoading: boolean;
isMetricUnitsError: boolean;
metricUnits: (string | undefined)[];
metricNames: string[];
metrics: (MetricMetadata | undefined)[];
handleOpenMetricDetails: (metricName: string) => void;
yAxisUnit: string | undefined;
setYAxisUnit: (unit: string) => void;
showYAxisUnitSelector: boolean;
}
export interface RelatedMetricsProps {

View File

@@ -1,40 +1,20 @@
import { mapMetricUnitToUniversalUnit } from 'components/YAxisUnitSelector/utils';
import { useGetMultipleMetrics } from 'hooks/metricsExplorer/useGetMultipleMetrics';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { v4 as uuid } from 'uuid';
/**
* Split a query with multiple queryData to multiple distinct queries, each with a single queryData.
* @param query - The query to split
* @param units - The units of the metrics, can be undefined if the metric has no unit
* @returns The split queries
*/
export const splitQueryIntoOneChartPerQuery = (
query: Query,
metricNames: string[],
units: (string | undefined)[],
): Query[] => {
export const splitQueryIntoOneChartPerQuery = (query: Query): Query[] => {
const queries: Query[] = [];
query.builder.queryData.forEach((currentQuery) => {
if (currentQuery.aggregateAttribute?.key) {
const metricIndex = metricNames.indexOf(
currentQuery.aggregateAttribute?.key,
);
const unit = metricIndex >= 0 ? units[metricIndex] : undefined;
const newQuery = {
...query,
id: uuid(),
builder: {
...query.builder,
queryData: [currentQuery],
queryFormulas: [],
},
unit,
};
queries.push(newQuery);
}
const newQuery = {
...query,
id: uuid(),
builder: {
...query.builder,
queryData: [currentQuery],
queryFormulas: [],
},
};
queries.push(newQuery);
});
query.builder.queryFormulas.forEach((currentFormula) => {
@@ -55,43 +35,3 @@ export const splitQueryIntoOneChartPerQuery = (
return queries;
};
/**
* Hook to get data for multiple metrics with a synchronous loading and error state
* @param metricNames - The names of the metrics to get
* @param isEnabled - Whether the hook is enabled
* @returns The loading state, the metrics data, and the error state
*/
export function useGetMetrics(
metricNames: string[],
isEnabled = true,
): {
isLoading: boolean;
isError: boolean;
metrics: (MetricMetadata | undefined)[];
} {
const metricsData = useGetMultipleMetrics(metricNames, {
enabled: metricNames.length > 0 && isEnabled,
});
return {
isLoading: metricsData.some((metric) => metric.isLoading),
metrics: metricsData
.map((metric) => metric.data?.data)
.map((data) => data?.data),
isError: metricsData.some((metric) => metric.isError),
};
}
/**
* To get the units of the metrics in the universal unit standard.
* If the unit is not known to the universal unit mapper, it will return the unit as is.
* @param metrics - The metrics to get the units for
* @returns The units of the metrics, can be undefined if the metric has no unit
*/
export function getMetricUnits(
metrics: (MetricMetadata | undefined)[],
): (string | undefined)[] {
return metrics
.map((metric) => metric?.unit)
.map((unit) => mapMetricUnitToUniversalUnit(unit) || undefined);
}

View File

@@ -131,8 +131,8 @@ function MetricDetails({
>
Open in Explorer
</Button>
{/* Show the inspect button if the metric type is GAUGE */}
{showInspectFeature && openInspectModal && (
{/* Show the based on the feature flag. Will remove before releasing the feature */}
{showInspectFeature && (
<Button
className="inspect-metrics-button"
aria-label="Inspect Metric"

View File

@@ -11,7 +11,7 @@ export interface MetricDetailsProps {
isOpen: boolean;
metricName: string | null;
isModalTimeSelection: boolean;
openInspectModal?: (metricName: string) => void;
openInspectModal: (metricName: string) => void;
}
export interface DashboardsAndAlertsPopoverProps {

View File

@@ -370,6 +370,10 @@ function NewWidget({
// this has been moved here from the left container
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
const updatedQuery = cloneDeep(stagedQuery || initialQueriesMap.metrics);
if (updatedQuery?.builder?.queryData?.[0]) {
updatedQuery.builder.queryData[0].pageSize = 10;
}
if (selectedWidget) {
if (selectedGraph === PANEL_TYPES.LIST) {
return {
@@ -415,12 +419,16 @@ function NewWidget({
useEffect(() => {
if (stagedQuery) {
setIsLoadingPanelData(false);
const updatedStagedQuery = cloneDeep(stagedQuery);
if (updatedStagedQuery?.builder?.queryData?.[0]) {
updatedStagedQuery.builder.queryData[0].pageSize = 10;
}
setRequestData((prev) => ({
...prev,
selectedTime: selectedTime.enum || prev.selectedTime,
globalSelectedInterval: customGlobalSelectedInterval,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery,
query: updatedStagedQuery,
fillGaps: selectedWidget.fillSpans || false,
isLogScale: selectedWidget.isLogScale || false,
formatForWeb:

View File

@@ -206,10 +206,6 @@
.ant-select-selector {
border-color: var(--bg-vanilla-300);
background: var(--bg-vanilla-300);
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
.ant-input-number {

View File

@@ -1,5 +1,7 @@
import { Select } from 'antd';
import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { useEffect, useState } from 'react';
import { MetricAggregateOperator } from 'types/common/queryBuilder';
interface SpaceAggregationOptionsProps {
panelType: PANEL_TYPES | null;
@@ -20,13 +22,39 @@ export default function SpaceAggregationOptions({
operators,
qbVersion,
}: SpaceAggregationOptionsProps): JSX.Element {
const placeHolderText =
panelType === PANEL_TYPES.VALUE || qbVersion === 'v3' ? 'Sum' : 'Sum By';
const [defaultValue, setDefaultValue] = useState(
selectedValue || placeHolderText,
);
useEffect(() => {
if (!selectedValue) {
if (
aggregatorAttributeType === ATTRIBUTE_TYPES.HISTOGRAM ||
aggregatorAttributeType === ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
setDefaultValue(MetricAggregateOperator.P90);
onSelect(MetricAggregateOperator.P90);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.SUM) {
setDefaultValue(MetricAggregateOperator.SUM);
onSelect(MetricAggregateOperator.SUM);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.GAUGE) {
setDefaultValue(MetricAggregateOperator.AVG);
onSelect(MetricAggregateOperator.AVG);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [aggregatorAttributeType]);
return (
<div
className="spaceAggregationOptionsContainer"
key={aggregatorAttributeType}
>
<Select
defaultValue={selectedValue}
defaultValue={defaultValue}
style={{ minWidth: '5.625rem' }}
disabled={disabled}
onChange={onSelect}

View File

@@ -1,16 +0,0 @@
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.option-renderer-tooltip {
pointer-events: none;
}

View File

@@ -1,4 +1,4 @@
import './OptionRenderer.styles.scss';
import './QueryBuilderSearch.styles.scss';
import { Tooltip } from 'antd';
@@ -13,11 +13,7 @@ function OptionRenderer({
return (
<span className="option">
{type ? (
<Tooltip
title={`${value}`}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<Tooltip title={`${value}`} placement="topLeft">
<div className="selectOptionContainer">
<div className="option-value">{value}</div>
<div className="option-meta-data-container">
@@ -33,11 +29,7 @@ function OptionRenderer({
</div>
</Tooltip>
) : (
<Tooltip
title={label}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<Tooltip title={label} placement="topLeft">
<span>{label}</span>
</Tooltip>
)}

View File

@@ -5,6 +5,19 @@
gap: 12px;
}
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.logs-popup {
&.hide-scroll {
.rc-virtual-list-holder {

View File

@@ -1,88 +0,0 @@
import { render, screen } from '@testing-library/react';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { ReduceToFilter } from './ReduceToFilter';
const mockOnChange = jest.fn();
function baseQuery(overrides: Partial<IBuilderQuery> = {}): IBuilderQuery {
return {
dataSource: 'traces',
aggregations: [],
groupBy: [],
orderBy: [],
legend: '',
limit: null,
having: { expression: '' },
...overrides,
} as IBuilderQuery;
}
describe('ReduceToFilter', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('initializes with default avg when no reduceTo is set', () => {
render(<ReduceToFilter query={baseQuery()} onChange={mockOnChange} />);
expect(screen.getByTestId('reduce-to')).toBeInTheDocument();
expect(
screen.getByText('Average of values in timeframe'),
).toBeInTheDocument();
});
it('initializes from query.aggregations[0].reduceTo', () => {
render(
<ReduceToFilter
query={baseQuery({
aggregations: [{ reduceTo: 'sum' } as any],
aggregateAttribute: { key: 'test', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Sum of values in timeframe')).toBeInTheDocument();
});
it('initializes from query.reduceTo when aggregations[0].reduceTo is not set', () => {
render(
<ReduceToFilter
query={baseQuery({
reduceTo: 'max',
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Max of values in timeframe')).toBeInTheDocument();
});
it('updates to sum when aggregateAttribute.type is SUM', async () => {
const { rerender } = render(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
rerender(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test2', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
const reduceToFilterText = (await screen.findByText(
'Sum of values in timeframe',
)) as HTMLElement;
expect(reduceToFilterText).toBeInTheDocument();
});
});

View File

@@ -1,7 +1,6 @@
import { Select } from 'antd';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { REDUCE_TO_VALUES } from 'constants/queryBuilder';
import { memo, useEffect, useRef, useState } from 'react';
import { memo } from 'react';
import { MetricAggregation } from 'types/api/v5/queryRange';
// ** Types
import { ReduceOperators } from 'types/common/queryBuilder';
@@ -13,46 +12,19 @@ export const ReduceToFilter = memo(function ReduceToFilter({
query,
onChange,
}: ReduceToFilterProps): JSX.Element {
const isMounted = useRef<boolean>(false);
const [currentValue, setCurrentValue] = useState<
SelectOption<ReduceOperators, string>
>(REDUCE_TO_VALUES[2]); // default to avg
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
const currentValue =
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[0];
const handleChange = (
newValue: SelectOption<ReduceOperators, string>,
): void => {
setCurrentValue(newValue);
onChange(newValue.value);
};
useEffect(
() => {
if (!isMounted.current) {
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
setCurrentValue(
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[2],
);
isMounted.current = true;
return;
}
const aggregationAttributeType = query.aggregateAttribute?.type as
| MetricType
| undefined;
if (aggregationAttributeType === MetricType.SUM) {
handleChange(REDUCE_TO_VALUES[1]);
} else {
handleChange(REDUCE_TO_VALUES[2]);
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[query.aggregateAttribute?.key],
);
return (
<Select
placeholder="Reduce to"

View File

@@ -1,32 +0,0 @@
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useQueries, UseQueryOptions, UseQueryResult } from 'react-query';
import { SuccessResponseV2 } from 'types/api';
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
type QueryResult = UseQueryResult<
SuccessResponseV2<MetricMetadataResponse>,
Error
>;
type UseGetMultipleMetrics = (
metricNames: string[],
options?: UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>,
headers?: Record<string, string>,
) => QueryResult[];
export const useGetMultipleMetrics: UseGetMultipleMetrics = (
metricNames,
options,
headers,
) =>
useQueries(
metricNames.map(
(metricName) =>
({
queryKey: [REACT_QUERY_KEY.GET_METRIC_METADATA, metricName],
queryFn: ({ signal }) => getMetricMetadata(metricName, signal, headers),
...options,
} as UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>),
),
);

View File

@@ -5,7 +5,7 @@ import updateMetricMetadata, {
import { useMutation, UseMutationResult } from 'react-query';
import { ErrorResponse, SuccessResponse } from 'types/api';
export interface UseUpdateMetricMetadataProps {
interface UseUpdateMetricMetadataProps {
metricName: string;
payload: UpdateMetricMetadataProps;
}

View File

@@ -188,7 +188,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
spaceAggregation: '',
},
],
}),
@@ -239,7 +239,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
spaceAggregation: '',
},
],
}),
@@ -315,7 +315,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.AVG,
metricName: 'new_gauge',
temporality: '',
spaceAggregation: MetricAggregateOperator.AVG,
spaceAggregation: '',
},
],
}),

View File

@@ -317,7 +317,7 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
spaceAggregation: '',
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
@@ -326,20 +326,7 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
];
} else if (
newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.HISTOGRAM ||
newQuery.aggregateAttribute?.type ===
ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
newQuery.aggregations = [
{
timeAggregation: '',
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.P90,
spaceAggregation: '',
},
];
} else {

View File

@@ -1,238 +0,0 @@
import { renderHook } from '@testing-library/react';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { useGetMetrics } from 'container/MetricsExplorer/Explorer/utils';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
import useGetYAxisUnit from './useGetYAxisUnit';
jest.mock('./queryBuilder/useQueryBuilder');
jest.mock('container/MetricsExplorer/Explorer/utils', () => ({
...jest.requireActual('container/MetricsExplorer/Explorer/utils'),
useGetMetrics: jest.fn(),
}));
const mockUseQueryBuilder = useQueryBuilder as jest.MockedFunction<
typeof useQueryBuilder
>;
const mockUseGetMetrics = useGetMetrics as jest.MockedFunction<
typeof useGetMetrics
>;
const MOCK_METRIC_1 = {
unit: UniversalYAxisUnit.BYTES,
} as MetricMetadata;
const MOCK_METRIC_2 = {
unit: UniversalYAxisUnit.SECONDS,
} as MetricMetadata;
const MOCK_METRIC_3 = {
unit: '',
} as MetricMetadata;
function createMockCurrentQuery(
queryType: EQueryType,
queryData: Query['builder']['queryData'] = [],
): Query {
return {
queryType,
promql: [],
builder: {
queryData,
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: [],
id: 'test-id',
};
}
describe('useGetYAxisUnit', () => {
beforeEach(() => {
jest.clearAllMocks();
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [],
});
mockUseQueryBuilder.mockReturnValue(({
currentQuery: undefined,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
});
it('should return undefined yAxisUnit and not call useGetMetrics when currentQuery is null', async () => {
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when queryType is PROM', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.PROM);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when queryType is CLICKHOUSE', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.CLICKHOUSE);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when dataSource is TRACES', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.TRACES,
aggregateAttribute: { key: 'trace_metric' },
} as Query['builder']['queryData'][0],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when dataSource is LOGS', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.LOGS,
aggregateAttribute: { key: 'log_metric' },
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should extract all metric names from queryData when no selected query name is provided', () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric1' },
queryName: 'query1',
} as Query['builder']['queryData'][number],
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric2' },
queryName: 'query2',
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
stagedQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderHook(() => useGetYAxisUnit());
expect(mockUseGetMetrics).toHaveBeenCalledWith(['metric1', 'metric2'], true);
});
it('should extract metric name for the selected query only when one is provided', () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric1' },
queryName: 'query1',
} as Query['builder']['queryData'][number],
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric2' },
queryName: 'query2',
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
stagedQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderHook(() => useGetYAxisUnit('query2'));
expect(mockUseGetMetrics).toHaveBeenCalledWith(['metric2'], true);
});
it('should return the unit when there is a single metric with a non-empty unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBe(UniversalYAxisUnit.BYTES);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return undefined when there is a single metric with no unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_3],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return the unit when all metrics have the same non-empty unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1, MOCK_METRIC_1],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBe(UniversalYAxisUnit.BYTES);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return undefined when metrics have different units', async () => {
mockUseGetMetrics.mockReturnValueOnce({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1, MOCK_METRIC_2],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
});

View File

@@ -1,108 +0,0 @@
import {
getMetricUnits,
useGetMetrics,
} from 'container/MetricsExplorer/Explorer/utils';
import { useEffect, useMemo, useState } from 'react';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
interface UseGetYAxisUnitResult {
yAxisUnit: string | undefined;
isLoading: boolean;
isError: boolean;
}
/**
* Hook to get the y-axis unit for a given metrics-based query.
* @param selectedQueryName - The name of the query to get the y-axis unit for.
* @param params.enabled - Active state of the hook.
* @returns `{ yAxisUnit, isLoading, isError }` The y-axis unit, loading state, and error state
*/
function useGetYAxisUnit(
selectedQueryName?: string,
params: {
enabled?: boolean;
} = {
enabled: true,
},
): UseGetYAxisUnitResult {
const { stagedQuery } = useQueryBuilder();
const [yAxisUnit, setYAxisUnit] = useState<string | undefined>();
const metricNames: string[] | null = useMemo(() => {
// If the query type is not QUERY_BUILDER, return null
if (stagedQuery?.queryType !== EQueryType.QUERY_BUILDER) {
return null;
}
// If the data source is not METRICS, return null
const dataSource = stagedQuery?.builder?.queryData?.[0]?.dataSource;
if (dataSource !== DataSource.METRICS) {
return null;
}
const currentMetricNames: string[] = [];
// If a selected query name is provided, return the metric name for that query only
if (selectedQueryName) {
stagedQuery?.builder?.queryData?.forEach((query) => {
if (
query.queryName === selectedQueryName &&
query.aggregateAttribute?.key
) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames.length ? currentMetricNames : null;
}
// Else, return all metric names
stagedQuery?.builder?.queryData?.forEach((query) => {
if (query.aggregateAttribute?.key) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames.length ? currentMetricNames : null;
}, [
selectedQueryName,
stagedQuery?.builder?.queryData,
stagedQuery?.queryType,
]);
const { metrics, isLoading, isError } = useGetMetrics(
metricNames ?? [],
!!metricNames && params?.enabled,
);
const units = useMemo(() => getMetricUnits(metrics), [metrics]);
const areAllMetricUnitsSame = useMemo(
() => units.every((unit) => unit === units[0]),
[units],
);
useEffect(() => {
// If there are no metrics, set the y-axis unit to undefined
if (units.length === 0) {
setYAxisUnit(undefined);
// If there is one metric and it has a non-empty unit, set the y-axis unit to it
} else if (units.length === 1 && units[0] !== '') {
setYAxisUnit(units[0]);
// If all metrics have the same non-empty unit, set the y-axis unit to it
} else if (areAllMetricUnitsSame) {
if (units[0] !== '') {
setYAxisUnit(units[0]);
} else {
setYAxisUnit(undefined);
}
// If there is more than one metric and they have different units, set the y-axis unit to undefined
} else if (units.length > 1 && !areAllMetricUnitsSame) {
setYAxisUnit(undefined);
// If there is one metric and it has an empty unit, set the y-axis unit to undefined
} else if (units.length === 1 && units[0] === '') {
setYAxisUnit(undefined);
}
}, [units, areAllMetricUnitsSame]);
return { yAxisUnit, isLoading, isError };
}
export default useGetYAxisUnit;

View File

@@ -1,15 +0,0 @@
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
export interface MetricMetadata {
description: string;
type: MetricType;
unit: string;
temporality: Temporality;
isMonotonic: boolean;
}
export interface MetricMetadataResponse {
status: string;
data: MetricMetadata;
}

2
go.mod
View File

@@ -11,6 +11,7 @@ require (
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
github.com/antlr4-go/antlr/v4 v4.13.1
github.com/antonmedv/expr v1.15.3
github.com/bytedance/sonic v1.14.1
github.com/cespare/xxhash/v2 v2.3.0
github.com/coreos/go-oidc/v3 v3.14.1
github.com/dgraph-io/ristretto/v2 v2.3.0
@@ -89,7 +90,6 @@ require (
require (
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic v1.14.1 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect

View File

@@ -80,17 +80,6 @@ func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, er
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
@@ -113,16 +102,6 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
@@ -142,21 +121,3 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -61,7 +61,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
response := []promotetypes.PromotePath{}
for _, path := range promotedPaths {
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
path = telemetrylogs.BodyJSONStringSearchPrefix + path
path = telemetrytypes.BodyJSONStringSearchPrefix + path
item := promotetypes.PromotePath{
Path: path,
Promote: true,
@@ -77,7 +77,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
// add the paths that are not promoted but have indexes
for path, indexes := range aggr {
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
path = telemetrylogs.BodyJSONStringSearchPrefix + path
path = telemetrytypes.BodyJSONStringSearchPrefix + path
response = append(response, promotetypes.PromotePath{
Path: path,
Indexes: indexes,

View File

@@ -10,9 +10,11 @@ import (
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
type builderQuery[T any] struct {
@@ -248,6 +250,40 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
return nil, err
}
// merge body_json and promoted into body
if q.spec.Signal == telemetrytypes.SignalLogs {
switch typedPayload := payload.(type) {
case *qbtypes.RawData:
for _, rr := range typedPayload.Rows {
seeder := func() error {
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
if !ok {
return nil
}
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
if !ok {
return nil
}
seed(promoted, body)
str, err := sonic.MarshalString(body)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
}
rr.Data["body"] = str
return nil
}
err := seeder()
if err != nil {
return nil, err
}
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
}
payload = typedPayload
}
}
return &qbtypes.Result{
Type: q.kind,
Value: payload,
@@ -375,3 +411,18 @@ func decodeCursor(cur string) (int64, error) {
}
return strconv.ParseInt(string(b), 10, 64)
}
func seed(promoted map[string]any, body map[string]any) {
for key, fromValue := range promoted {
if toValue, ok := body[key]; !ok {
body[key] = fromValue
} else {
if fromValue, ok := fromValue.(map[string]any); ok {
if toValue, ok := toValue.(map[string]any); ok {
seed(fromValue, toValue)
body[key] = toValue
}
}
}
}
}

View File

@@ -14,6 +14,7 @@ import (
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
var (
@@ -51,7 +52,6 @@ func consume(rows driver.Rows, kind qbtypes.RequestType, queryWindow *qbtypes.Ti
}
func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbtypes.Step, queryName string) (*qbtypes.TimeSeriesData, error) {
colTypes := rows.ColumnTypes()
colNames := rows.Columns()
@@ -354,10 +354,22 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
colTypes := rows.ColumnTypes()
colCnt := len(colNames)
// Helper that decides scan target per column based on DB type
makeScanTarget := func(i int) any {
dbt := strings.ToUpper(colTypes[i].DatabaseTypeName())
if strings.HasPrefix(dbt, "JSON") {
// Since the driver fails to decode JSON/Dynamic into native Go values, we read it as raw bytes
// TODO: check in future if fixed in the driver
var v []byte
return &v
}
return reflect.New(colTypes[i].ScanType()).Interface()
}
// Build a template slice of correctly-typed pointers once
scanTpl := make([]any, colCnt)
for i, ct := range colTypes {
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
for i := range colTypes {
scanTpl[i] = makeScanTarget(i)
}
var outRows []*qbtypes.RawRow
@@ -366,7 +378,7 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
// fresh copy of the scan slice (otherwise the driver reuses pointers)
scan := make([]any, colCnt)
for i := range scanTpl {
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
scan[i] = makeScanTarget(i)
}
if err := rows.Scan(scan...); err != nil {
@@ -383,6 +395,28 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
// de-reference the typed pointer to any
val := reflect.ValueOf(cellPtr).Elem().Interface()
// Post-process JSON columns: normalize into structured values
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
switch x := val.(type) {
case []byte:
if len(x) > 0 {
var v any
if err := sonic.Unmarshal(x, &v); err == nil {
val = v
}
}
case string:
if x != "" {
var v any
if err := sonic.Unmarshal([]byte(x), &v); err == nil {
val = v
}
}
default:
// already a structured type (map[string]any, []any, etc.)
}
}
// special-case: timestamp column
if name == "timestamp" || name == "timestamp_datetime" {
switch t := val.(type) {

View File

@@ -78,7 +78,7 @@ func newProvider(
telemetryMetadataStore,
)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
settings,
telemetryMetadataStore,
@@ -102,14 +102,13 @@ func newProvider(
// Create log statement builder
logFieldMapper := telemetrylogs.NewFieldMapper()
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, telemetryMetadataStore)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
@@ -117,7 +116,6 @@ func newProvider(
telemetrylogs.DefaultFullTextColumn,
logFieldMapper,
logConditionBuilder,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)
logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder(
@@ -128,7 +126,6 @@ func newProvider(
logResourceFilterStmtBuilder,
logAggExprRewriter,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)

View File

@@ -520,7 +520,7 @@ func (h *HostsRepo) GetHostList(ctx context.Context, orgID valuer.UUID, req mode
if _, ok := hostAttrs[record.HostName]; ok {
record.Meta = hostAttrs[record.HostName]
}
if osType, ok := record.Meta[GetDotMetrics("os_type")]; ok {
if osType, ok := record.Meta["os_type"]; ok {
record.OS = osType
}
record.Active = activeHosts[record.HostName]

View File

@@ -11,13 +11,16 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"go.uber.org/zap"
)
@@ -128,6 +131,40 @@ func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
return err
}
func (ic *LogParsingPipelineController) getDefaultPipelines() ([]pipelinetypes.GettablePipeline, error) {
defaultPipelines := []pipelinetypes.GettablePipeline{}
if querybuilder.BodyJSONQueryEnabled {
preprocessingPipeline := pipelinetypes.GettablePipeline{
StoreablePipeline: pipelinetypes.StoreablePipeline{
Name: "Default Pipeline - PreProcessing Body",
Alias: "NormalizeBodyDefault",
Enabled: true,
},
Filter: &v3.FilterSet{
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "body",
},
Operator: v3.FilterOperatorExists,
},
},
},
Config: []pipelinetypes.PipelineOperator{
{
ID: uuid.NewString(),
Type: "normalize",
Enabled: true,
If: "body != nil",
},
},
}
defaultPipelines = append(defaultPipelines, preprocessingPipeline)
}
return defaultPipelines, nil
}
// Returns effective list of pipelines including user created
// pipelines and pipelines for installed integrations
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
@@ -258,6 +295,13 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
return nil, "", err
}
// recommend default pipelines along with user created pipelines
defaultPipelines, err := pc.getDefaultPipelines()
if err != nil {
return nil, "", model.InternalError(fmt.Errorf("failed to get default pipelines: %w", err))
}
pipelinesResp.Pipelines = append(pipelinesResp.Pipelines, defaultPipelines...)
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
if err != nil {
return nil, "", err

View File

@@ -132,7 +132,7 @@ func SignozLogsToPLogs(logs []model.SignozLog) []plog.Logs {
slRecord.SetSeverityText(log.SeverityText)
slRecord.SetSeverityNumber(plog.SeverityNumber(log.SeverityNumber))
slRecord.Body().SetStr(log.Body)
slRecord.Body().FromRaw(log.Body)
slAttribs := slRecord.Attributes()
for k, v := range log.Attributes_int64 {

View File

@@ -20,7 +20,6 @@ type aggExprRewriter struct {
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -31,7 +30,6 @@ func NewAggExprRewriter(
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *aggExprRewriter {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
@@ -41,7 +39,6 @@ func NewAggExprRewriter(
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -81,7 +78,6 @@ func (r *aggExprRewriter) Rewrite(
r.fullTextColumn,
r.fieldMapper,
r.conditionBuilder,
r.jsonBodyPrefix,
r.jsonKeyToKey,
)
// Rewrite the first select item (our expression)
@@ -129,7 +125,6 @@ type exprVisitor struct {
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
Modified bool
chArgs []any
@@ -142,7 +137,6 @@ func newExprVisitor(
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *exprVisitor {
return &exprVisitor{
@@ -151,7 +145,6 @@ func newExprVisitor(
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -190,7 +183,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
if aggFunc.FuncCombinator {
// Map the predicate (last argument)
origPred := args[len(args)-1].String()
whereClause, err := PrepareWhereClause(
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
@@ -199,7 +192,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
ConditionBuilder: v.conditionBuilder,
FullTextColumn: v.fullTextColumn,
JsonKeyToKey: v.jsonKeyToKey,
}, 0, 0,
}, 0, 0,
)
if err != nil {
return err
@@ -219,7 +212,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i := 0; i < len(args)-1; i++ {
origVal := args[i].String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
if err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
}
@@ -237,7 +230,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i, arg := range args {
orig := arg.String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
if err != nil {
return err
}

View File

@@ -24,7 +24,6 @@ func CollisionHandledFinalExpr(
cb qbtypes.ConditionBuilder,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
requiredDataType telemetrytypes.FieldDataType,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) (string, []any, error) {
@@ -45,7 +44,7 @@ func CollisionHandledFinalExpr(
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
sb := sqlbuilder.NewSelectBuilder()
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
if err != nil {
return err
}
@@ -58,8 +57,8 @@ func CollisionHandledFinalExpr(
return nil
}
colName, err := fm.FieldFor(ctx, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
colName, fieldForErr := fm.FieldFor(ctx, field)
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
keysForField := keys[field.Name]
@@ -82,10 +81,10 @@ func CollisionHandledFinalExpr(
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
if found {
// we found a close match, in the error message send the suggestion
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
return "", nil, errors.WithAdditionalf(fieldForErr, "%s", correction)
} else {
// not even a close match, return an error
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
return "", nil, errors.WithAdditionalf(fieldForErr, "field `%s` not found", field.Name)
}
} else {
for _, key := range keysForField {
@@ -104,10 +103,11 @@ func CollisionHandledFinalExpr(
return "", nil, err
}
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
// TODO(nitya): enable group by on body column?
// first if condition covers the older tests and second if condition covers the array conditions
if !BodyJSONQueryEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil {
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
} else if strings.Contains(field.Name, telemetrytypes.ArraySep) || strings.Contains(field.Name, telemetrytypes.ArrayAnyIndex) {
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", field.Name)
} else {
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
}
@@ -204,7 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeString:
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
switch v := value.(type) {
case float64:
// try to convert the string value to to number
@@ -219,8 +219,36 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case telemetrytypes.FieldDataTypeFloat64,
telemetrytypes.FieldDataTypeArrayFloat64:
switch v := value.(type) {
case float32, float64:
tblFieldName = castFloatHack(tblFieldName)
case string:
// check if it's a number inside a string
isNumber := false
if _, err := strconv.ParseFloat(v, 64); err == nil {
isNumber = true
}
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
if !operator.IsComparisonOperator() || !isNumber {
// try to convert the number attribute to string
tblFieldName = castString(tblFieldName) // numeric col vs string literal
} else {
tblFieldName = castFloatHack(tblFieldName)
}
case []any:
if allFloats(v) {
tblFieldName = castFloatHack(tblFieldName)
} else if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
case telemetrytypes.FieldDataTypeInt64,
telemetrytypes.FieldDataTypeArrayInt64,
telemetrytypes.FieldDataTypeNumber,
telemetrytypes.FieldDataTypeArrayNumber:
switch v := value.(type) {
// why? ; CH returns an error for a simple check
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
@@ -258,7 +286,8 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
}
}
case telemetrytypes.FieldDataTypeBool:
case telemetrytypes.FieldDataTypeBool,
telemetrytypes.FieldDataTypeArrayBool:
switch v := value.(type) {
case string:
tblFieldName = castString(tblFieldName)

View File

@@ -43,7 +43,6 @@ type resourceFilterStatementBuilder[T any] struct {
signal telemetrytypes.Signal
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -76,7 +75,6 @@ func NewLogResourceFilterStatementBuilder(
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
@@ -87,7 +85,6 @@ func NewLogResourceFilterStatementBuilder(
metadataStore: metadataStore,
signal: telemetrytypes.SignalLogs,
fullTextColumn: fullTextColumn,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -100,12 +97,18 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
keySelectors = append(keySelectors, whereClauseSelectors...)
}
// exclude out the body related key selectors
filteredKeySelectors := []*telemetrytypes.FieldKeySelector{}
for idx := range keySelectors {
if keySelectors[idx].FieldContext == telemetrytypes.FieldContextBody {
continue
}
keySelectors[idx].Signal = b.signal
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
}
return keySelectors
return filteredKeySelectors
}
// Build builds a SQL query based on the given parameters
@@ -168,7 +171,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
// there is no need for "key" not found error for resource filtering
IgnoreNotFoundKeys: true,
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return err

View File

@@ -16,11 +16,12 @@ import (
)
type conditionBuilder struct {
fm qbtypes.FieldMapper
fm qbtypes.FieldMapper
metadataStore telemetrytypes.MetadataStore
}
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
return &conditionBuilder{fm: fm}
func NewConditionBuilder(fm qbtypes.FieldMapper, metadataStore telemetrytypes.MetadataStore) *conditionBuilder {
return &conditionBuilder{fm: fm, metadataStore: metadataStore}
}
func (c *conditionBuilder) conditionFor(
@@ -30,22 +31,34 @@ func (c *conditionBuilder) conditionFor(
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := c.fm.ColumnFor(ctx, key)
if err != nil {
return "", err
}
// For JSON columns, preserve the original value type (numeric, bool, etc.)
// Only format to string for non-JSON columns that need string formatting
isJSONColumn := column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled && key.FieldContext == telemetrytypes.FieldContextBody
if !isJSONColumn {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
}
if isJSONColumn {
cond, err := c.buildJSONCondition(ctx, key, operator, value, sb)
if err != nil {
return "", err
}
return cond, nil
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
if err != nil {
return "", err
@@ -163,9 +176,7 @@ func (c *conditionBuilder) conditionFor(
// in the UI based query builder, `exists` and `not exists` are used for
// key membership checks, so depending on the column type, the condition changes
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
// Check if this is a body JSON search - by FieldContext
if key.FieldContext == telemetrytypes.FieldContextBody {
if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled {
if operator == qbtypes.FilterOperatorExists {
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
} else {
@@ -247,7 +258,7 @@ func (c *conditionBuilder) ConditionFor(
return "", err
}
if operator.AddDefaultExistsFilter() {
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
// with an exception for body json search
field, _ := c.fm.FieldFor(ctx, key)

View File

@@ -373,7 +373,8 @@ func TestConditionFor(t *testing.T) {
}
fm := NewFieldMapper()
conditionBuilder := NewConditionBuilder(fm)
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
@@ -426,7 +427,8 @@ func TestConditionForMultipleKeys(t *testing.T) {
}
fm := NewFieldMapper()
conditionBuilder := NewConditionBuilder(fm)
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
@@ -685,7 +687,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
}
fm := NewFieldMapper()
conditionBuilder := NewConditionBuilder(fm)
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()

View File

@@ -2,7 +2,6 @@ package telemetrylogs
import (
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -37,8 +36,6 @@ const (
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
ArraySep = jsontypeexporter.ArraySeparator
ArrayAnyIndex = "[*]."
)
var (
@@ -48,8 +45,7 @@ var (
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
}
BodyJSONStringSearchPrefix = `body.`
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
"body": {
Name: "body",
Signal: telemetrytypes.SignalLogs,

View File

@@ -6,7 +6,9 @@ import (
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz-otel-collector/utils"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -28,6 +30,11 @@ var (
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
"body": {Name: "body", Type: schema.ColumnTypeString},
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
MaxDynamicTypes: utils.ToPointer(uint(32)),
MaxDynamicPaths: utils.ToPointer(uint(0)),
}},
LogsV2BodyPromotedColumn: {Name: LogsV2BodyPromotedColumn, Type: schema.JSONColumnType{}},
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeString,
@@ -83,13 +90,23 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
return logsV2Columns["attributes_bool"], nil
}
case telemetrytypes.FieldContextBody:
// body context fields are stored in the body column
// Body context is for JSON body fields
// Use body_json if feature flag is enabled
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
}
// Fall back to legacy body column
return logsV2Columns["body"], nil
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
col, ok := logsV2Columns[key.Name]
if !ok {
// check if the key has body JSON search (backward compatibility)
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
// check if the key has body JSON search
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
// Use body_json if feature flag is enabled and we have a body condition builder
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
}
// Fall back to legacy body column
return logsV2Columns["body"], nil
}
return nil, qbtypes.ErrColumnNotFound
@@ -109,20 +126,33 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
switch column.Type.GetType() {
case schema.ColumnTypeEnumJSON:
// json is only supported for resource context as of now
if key.FieldContext != telemetrytypes.FieldContextResource {
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
}
oldColumn := logsV2Columns["resources_string"]
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
switch key.FieldContext {
case telemetrytypes.FieldContextResource:
oldColumn := logsV2Columns["resources_string"]
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
// once clickHouse dependency is updated, we need to check if we can remove it.
if key.Materialized {
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
} else {
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
// once clickHouse dependency is updated, we need to check if we can remove it.
if key.Materialized {
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
}
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
case telemetrytypes.FieldContextBody:
if strings.Contains(key.Name, telemetrytypes.ArraySep) || strings.Contains(key.Name, telemetrytypes.ArrayAnyIndex) {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", key.Name)
}
fieldExpr := BodyJSONColumnPrefix + fmt.Sprintf("`%s`", key.Name)
expr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldExpr, key.JSONDataType.StringValue())
if key.Materialized {
promotedFieldExpr := BodyPromotedColumnPrefix + fmt.Sprintf("`%s`", key.Name)
expr = fmt.Sprintf("coalesce(%s, %s)", expr, fmt.Sprintf("dynamicElement(%s, '%s')", promotedFieldExpr, key.JSONDataType.StringValue()))
}
// returning qbtypes.ErrColumnNotFound is a hack that will trigger the fallback expr logic to include all the types for the key
return expr, qbtypes.ErrColumnNotFound
default:
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
}
case schema.ColumnTypeEnumLowCardinality:
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {

View File

@@ -11,7 +11,7 @@ import (
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
cb := NewConditionBuilder(fm, nil)
keys := buildCompleteFieldKeyMap()
@@ -33,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)
@@ -47,7 +47,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
cb := NewConditionBuilder(fm, nil)
keys := buildCompleteFieldKeyMap()
@@ -69,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
@@ -14,8 +15,7 @@ import (
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
func TestFilterExprLogsBodyJSON(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
cb := NewConditionBuilder(fm, telemetrytypestest.NewMockMetadataStore())
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()

View File

@@ -16,7 +16,7 @@ import (
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogs(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
cb := NewConditionBuilder(fm, nil)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
@@ -2423,7 +2423,7 @@ func TestFilterExprLogs(t *testing.T) {
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogsConflictNegation(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
cb := NewConditionBuilder(fm, nil)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()

View File

@@ -30,7 +30,7 @@ func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, paren
}
part := pb.parts[index]
pathSoFar := strings.Join(pb.parts[:index+1], ArraySep)
pathSoFar := strings.Join(pb.parts[:index+1], telemetrytypes.ArraySep)
isTerminal := index == len(pb.parts)-1
// Calculate progression parameters based on parent's values
@@ -110,8 +110,8 @@ func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbt
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
// creation time in distributed_promoted_paths
path := strings.ReplaceAll(key.Name, ArrayAnyIndex, ArraySep)
parts := strings.Split(path, ArraySep)
path := strings.ReplaceAll(key.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
parts := strings.Split(path, telemetrytypes.ArraySep)
pb := &JSONAccessPlanBuilder{
key: key,

View File

@@ -814,9 +814,6 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
// testTypeSet returns a map of path->types and a getTypes function for testing
// This represents the type information available in the test JSON structure
//
// TODO(Piyush): Remove this unparam nolint
// nolint:unparam
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
types := map[string][]telemetrytypes.JSONDataType{
"user.name": {telemetrytypes.String},

View File

@@ -0,0 +1,455 @@
package telemetrylogs
import (
"context"
"fmt"
"slices"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
)
var (
CodeCurrentNodeNil = errors.MustNewCode("current_node_nil")
CodeNextNodeNil = errors.MustNewCode("next_node_nil")
CodeNestedExpressionsEmpty = errors.MustNewCode("nested_expressions_empty")
CodeGroupByPlanEmpty = errors.MustNewCode("group_by_plan_empty")
CodeArrayMapExpressionsEmpty = errors.MustNewCode("array_map_expressions_empty")
CodePromotedPlanMissing = errors.MustNewCode("promoted_plan_missing")
CodeArrayNavigationFailed = errors.MustNewCode("array_navigation_failed")
)
func (c *conditionBuilder) getTypes(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
keys, _, err := c.metadataStore.GetKeys(ctx, &telemetrytypes.FieldKeySelector{
Name: path,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
Signal: telemetrytypes.SignalLogs,
Limit: 1,
})
if err != nil {
return nil, err
}
types := []telemetrytypes.JSONDataType{}
for _, key := range keys[path] {
if key.JSONDataType != nil {
types = append(types, *key.JSONDataType)
}
}
return types, nil
}
// BuildCondition builds the full WHERE condition for body_json JSON paths
func (c *conditionBuilder) buildJSONCondition(ctx context.Context, key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
plan, err := PlanJSON(ctx, key, operator, value, c.getTypes)
if err != nil {
return "", err
}
conditions := []string{}
for _, plan := range plan {
condition, err := c.emitPlannedCondition(plan, operator, value, sb)
if err != nil {
return "", err
}
conditions = append(conditions, condition)
}
return sb.Or(conditions...), nil
}
// emitPlannedCondition handles paths with array traversal
func (c *conditionBuilder) emitPlannedCondition(plan *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Build traversal + terminal recursively per-hop
compiled, err := c.recurseArrayHops(plan, operator, value, sb)
if err != nil {
return "", err
}
// sb.AddWhereClause(sqlbuilder.NewWhereClause().AddWhereExpr(sb.Args, compiled))
return compiled, nil
}
// buildTerminalCondition creates the innermost condition
func (c *conditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Use the parent's alias + current field name for the full path
fieldPath := node.FieldPath()
if node.TerminalConfig.ElemType.IsArray {
// switch operator for array membership checks
switch operator {
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
operator = qbtypes.FilterOperatorEqual
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
operator = qbtypes.FilterOperatorNotEqual
}
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
if err != nil {
return "", err
}
return arrayCond, nil
}
conditions := []string{}
elemType := node.TerminalConfig.ElemType
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
fieldExpr, value = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, fieldExpr, operator)
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
return index.Type == elemType && index.ColumnExpression == fieldPath
})
if elemType.IndexSupported && indexed {
indexedExpr := assumeNotNull(fieldPath, elemType)
emptyValue := func() any {
switch elemType {
case telemetrytypes.String:
return ""
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
return 0
default:
return nil
}
}()
// switch the operator and value for exists and not exists
switch operator {
case qbtypes.FilterOperatorExists:
operator = qbtypes.FilterOperatorNotEqual
value = emptyValue
case qbtypes.FilterOperatorNotExists:
operator = qbtypes.FilterOperatorEqual
value = emptyValue
default:
// do nothing
}
cond, err := c.applyOperator(sb, indexedExpr, operator, value)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
// Switch operator to EXISTS
operator = qbtypes.FilterOperatorExists
}
cond, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
if len(conditions) > 1 {
return sb.And(conditions...), nil
}
return cond, nil
}
// buildArrayMembershipCondition handles array membership checks
func (c *conditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
arrayPath := node.FieldPath()
// create typed array out of a dynamic array
filteredDynamicExpr := func() string {
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
node.TerminalConfig.ValueType.StringValue(),
node.TerminalConfig.ValueType.StringValue(),
baseArrayDynamicExpr)
}
typedArrayExpr := func() string {
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
}
var arrayExpr string
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
arrayExpr = filteredDynamicExpr()
} else {
arrayExpr = typedArrayExpr()
}
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, "x", operator)
op, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
}
// recurseArrayHops recursively builds array traversal conditions
func (c *conditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
if current == nil {
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
}
if current.IsTerminal {
terminalCond, err := c.buildTerminalCondition(current, operator, value, sb)
if err != nil {
return "", err
}
return terminalCond, nil
}
currAlias := current.Alias()
fieldPath := current.FieldPath()
// Determine availability of Array(JSON) and Array(Dynamic) at this hop
hasArrayJSON := current.Branches[telemetrytypes.BranchJSON] != nil
hasArrayDynamic := current.Branches[telemetrytypes.BranchDynamic] != nil
// Then, at this hop, compute child per branch and wrap
branches := make([]string, 0, 2)
if hasArrayJSON {
jsonArrayExpr := fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')", fieldPath, current.MaxDynamicTypes, current.MaxDynamicPaths)
childGroupJSON, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchJSON], operator, value, sb)
if err != nil {
return "", err
}
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupJSON, jsonArrayExpr))
}
if hasArrayDynamic {
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", fieldPath)
dynFilteredExpr := fmt.Sprintf("arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
// Create the Query for Dynamic array
childGroupDyn, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchDynamic], operator, value, sb)
if err != nil {
return "", err
}
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupDyn, dynFilteredExpr))
}
if len(branches) == 1 {
return branches[0], nil
}
return fmt.Sprintf("(%s)", strings.Join(branches, " OR ")), nil
}
func (c *conditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, fieldExpr string, operator qbtypes.FilterOperator, value any) (string, error) {
switch operator {
case qbtypes.FilterOperatorEqual:
return sb.E(fieldExpr, value), nil
case qbtypes.FilterOperatorNotEqual:
return sb.NE(fieldExpr, value), nil
case qbtypes.FilterOperatorGreaterThan:
return sb.G(fieldExpr, value), nil
case qbtypes.FilterOperatorGreaterThanOrEq:
return sb.GE(fieldExpr, value), nil
case qbtypes.FilterOperatorLessThan:
return sb.LT(fieldExpr, value), nil
case qbtypes.FilterOperatorLessThanOrEq:
return sb.LE(fieldExpr, value), nil
case qbtypes.FilterOperatorLike:
return sb.Like(fieldExpr, value), nil
case qbtypes.FilterOperatorNotLike:
return sb.NotLike(fieldExpr, value), nil
case qbtypes.FilterOperatorILike:
return sb.ILike(fieldExpr, value), nil
case qbtypes.FilterOperatorNotILike:
return sb.NotILike(fieldExpr, value), nil
case qbtypes.FilterOperatorRegexp:
return fmt.Sprintf("match(%s, %s)", fieldExpr, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
return fmt.Sprintf("NOT match(%s, %s)", fieldExpr, sb.Var(value)), nil
case qbtypes.FilterOperatorContains:
return sb.ILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorNotContains:
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
values, ok := value.([]any)
if !ok {
values = []any{value}
}
conds := []string{}
for _, v := range values {
if operator == qbtypes.FilterOperatorIn {
conds = append(conds, sb.E(fieldExpr, v))
} else {
conds = append(conds, sb.NE(fieldExpr, v))
}
}
if operator == qbtypes.FilterOperatorIn {
return sb.Or(conds...), nil
}
return sb.And(conds...), nil
case qbtypes.FilterOperatorExists:
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
case qbtypes.FilterOperatorNotExists:
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
default:
return "", qbtypes.ErrUnsupportedOperator
}
}
// GroupByArrayJoinInfo contains information about array joins needed for GroupBy
type GroupByArrayJoinInfo struct {
ArrayJoinClauses []string // ARRAY JOIN clauses to add to FROM clause
TerminalExpr string // Terminal field expression for SELECT/GROUP BY
}
// BuildGroupBy builds GroupBy information for body JSON fields using arrayConcat pattern
func (c *conditionBuilder) BuildGroupBy(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*GroupByArrayJoinInfo, error) {
path := strings.TrimPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix)
plan, err := PlanJSON(ctx, key, qbtypes.FilterOperatorExists, nil, c.getTypes)
if err != nil {
return nil, err
}
if len(plan) == 0 {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
"Could not find any valid paths for: %s", path)
}
if plan[0].IsTerminal {
node := plan[0]
expr := fmt.Sprintf("dynamicElement(%s, '%s')", node.FieldPath(), node.TerminalConfig.ElemType.StringValue())
if key.Materialized {
if len(plan) < 2 {
return nil, errors.Newf(errors.TypeUnexpected, CodePromotedPlanMissing,
"plan length is less than 2 for promoted path: %s", path)
}
// promoted column first then body_json column
// TODO(Piyush): Change this in future for better performance
expr = fmt.Sprintf("coalesce(%s, %s)",
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
expr,
)
}
return &GroupByArrayJoinInfo{
ArrayJoinClauses: []string{},
TerminalExpr: expr,
}, nil
}
// Build arrayConcat pattern directly from the tree structure
arrayConcatExpr, err := c.buildArrayConcat(plan)
if err != nil {
return nil, err
}
// Create single ARRAY JOIN clause with arrayFlatten
arrayJoinClause := fmt.Sprintf("ARRAY JOIN %s AS `%s`", arrayConcatExpr, key.Name)
return &GroupByArrayJoinInfo{
ArrayJoinClauses: []string{arrayJoinClause},
TerminalExpr: fmt.Sprintf("`%s`", key.Name),
}, nil
}
// buildArrayConcat builds the arrayConcat pattern directly from the tree structure
func (c *conditionBuilder) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (string, error) {
if len(plan) == 0 {
return "", errors.Newf(errors.TypeInternal, CodeGroupByPlanEmpty, "group by plan is empty while building arrayConcat")
}
// Build arrayMap expressions for ALL available branches at the root level
var arrayMapExpressions []string
for _, node := range plan {
hasJSON := node.Branches[telemetrytypes.BranchJSON] != nil
hasDynamic := node.Branches[telemetrytypes.BranchDynamic] != nil
if hasJSON {
jsonExpr, err := c.buildArrayMap(node, telemetrytypes.BranchJSON)
if err != nil {
return "", err
}
arrayMapExpressions = append(arrayMapExpressions, jsonExpr)
}
if hasDynamic {
dynamicExpr, err := c.buildArrayMap(node, telemetrytypes.BranchDynamic)
if err != nil {
return "", err
}
arrayMapExpressions = append(arrayMapExpressions, dynamicExpr)
}
}
if len(arrayMapExpressions) == 0 {
return "", errors.Newf(errors.TypeInternal, CodeArrayMapExpressionsEmpty, "array map expressions are empty while building arrayConcat")
}
// Build the arrayConcat expression
arrayConcatExpr := fmt.Sprintf("arrayConcat(%s)", strings.Join(arrayMapExpressions, ", "))
// Wrap with arrayFlatten
arrayFlattenExpr := fmt.Sprintf("arrayFlatten(%s)", arrayConcatExpr)
return arrayFlattenExpr, nil
}
// buildArrayMap builds the arrayMap expression for a specific branch, handling all sub-branches
func (c *conditionBuilder) buildArrayMap(currentNode *telemetrytypes.JSONAccessNode, branchType telemetrytypes.JSONAccessBranchType) (string, error) {
if currentNode == nil {
return "", errors.Newf(errors.TypeInternal, CodeCurrentNodeNil, "current node is nil while building arrayMap")
}
nextNode := currentNode.Branches[branchType]
if nextNode == nil {
return "", errors.Newf(errors.TypeInternal, CodeNextNodeNil, "next node is nil while building arrayMap")
}
// Build the array expression for this level
var arrayExpr string
if branchType == telemetrytypes.BranchJSON {
// Array(JSON) branch
arrayExpr = fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')",
currentNode.FieldPath(), currentNode.MaxDynamicTypes, currentNode.MaxDynamicPaths)
} else {
// Array(Dynamic) branch - filter for JSON objects
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", currentNode.FieldPath())
arrayExpr = fmt.Sprintf("arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
}
// If this is the terminal level, return the simple arrayMap
if nextNode.IsTerminal {
dynamicElementExpr := fmt.Sprintf("dynamicElement(%s, '%s')", nextNode.FieldPath(),
nextNode.TerminalConfig.ElemType.StringValue(),
)
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), dynamicElementExpr, arrayExpr), nil
}
// For non-terminal nodes, we need to handle ALL possible branches at the next level
var nestedExpressions []string
hasJSON := nextNode.Branches[telemetrytypes.BranchJSON] != nil
hasDynamic := nextNode.Branches[telemetrytypes.BranchDynamic] != nil
if hasJSON {
jsonNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchJSON)
if err != nil {
return "", err
}
nestedExpressions = append(nestedExpressions, jsonNested)
}
if hasDynamic {
dynamicNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchDynamic)
if err != nil {
return "", err
}
nestedExpressions = append(nestedExpressions, dynamicNested)
}
// If we have multiple nested expressions, we need to concat them
var nestedExpr string
if len(nestedExpressions) == 1 {
nestedExpr = nestedExpressions[0]
} else if len(nestedExpressions) > 1 {
// This shouldn't happen in our current tree structure, but handle it just in case
nestedExpr = fmt.Sprintf("arrayConcat(%s)", strings.Join(nestedExpressions, ", "))
} else {
return "", errors.Newf(errors.TypeInternal, CodeNestedExpressionsEmpty, "nested expressions are empty while building arrayMap")
}
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), nestedExpr, arrayExpr), nil
}
func assumeNotNull(column string, elemType telemetrytypes.JSONDataType) string {
return fmt.Sprintf("assumeNotNull(dynamicElement(%s, '%s'))", column, elemType.StringValue())
}

File diff suppressed because one or more lines are too long

View File

@@ -84,7 +84,6 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
}
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
dataType, value := inferDataType(value, operator, key)
// for array types, we need to extract the value from the JSON_QUERY

View File

@@ -23,7 +23,6 @@ type logQueryStatementBuilder struct {
aggExprRewriter qbtypes.AggExprRewriter
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -37,7 +36,6 @@ func NewLogQueryStatementBuilder(
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
aggExprRewriter qbtypes.AggExprRewriter,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *logQueryStatementBuilder {
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
@@ -50,7 +48,6 @@ func NewLogQueryStatementBuilder(
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
aggExprRewriter: aggExprRewriter,
fullTextColumn: fullTextColumn,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -171,6 +168,25 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
overallMatch = overallMatch || findMatch(IntrinsicFields)
}
if strings.Contains(k.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
k.Name = strings.TrimPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix)
fieldKeys, found := keys[k.Name]
if found && len(fieldKeys) > 0 {
k.FieldContext = fieldKeys[0].FieldContext
k.FieldDataType = fieldKeys[0].FieldDataType
k.Materialized = fieldKeys[0].Materialized
k.JSONDataType = fieldKeys[0].JSONDataType
k.Indexes = fieldKeys[0].Indexes
overallMatch = true // because we found a match
} else {
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
k.FieldContext = telemetrytypes.FieldContextBody
k.FieldDataType = telemetrytypes.FieldDataTypeString
k.JSONDataType = &telemetrytypes.String
}
}
if !overallMatch {
// check if all the key for the given field have been materialized, if so
// set the key to materialized
@@ -221,6 +237,9 @@ func (b *logQueryStatementBuilder) buildListQuery(
cteArgs = append(cteArgs, args)
}
// Collect array join info for body JSON fields
var arrayJoinClauses []string
// Select timestamp and id by default
sb.Select(LogsV2TimestampColumn)
sb.SelectMore(LogsV2IDColumn)
@@ -234,6 +253,10 @@ func (b *logQueryStatementBuilder) buildListQuery(
sb.SelectMore(LogsV2ScopeNameColumn)
sb.SelectMore(LogsV2ScopeVersionColumn)
sb.SelectMore(LogsV2BodyColumn)
if querybuilder.BodyJSONQueryEnabled {
sb.SelectMore(LogsV2BodyJSONColumn)
sb.SelectMore(LogsV2BodyPromotedColumn)
}
sb.SelectMore(LogsV2AttributesStringColumn)
sb.SelectMore(LogsV2AttributesNumberColumn)
sb.SelectMore(LogsV2AttributesBoolColumn)
@@ -246,6 +269,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn {
continue
}
// get column expression for the field - use array index directly to avoid pointer to loop variable
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
if err != nil {
@@ -255,8 +279,12 @@ func (b *logQueryStatementBuilder) buildListQuery(
}
}
// From table
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// From table (inject ARRAY JOINs if collected)
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
// Add filter conditions
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
@@ -330,13 +358,17 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
var allGroupByArgs []any
// Collect array join info for body JSON fields
var arrayJoinClauses []string
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
@@ -358,7 +390,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
}
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add FROM clause
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
@@ -404,7 +442,6 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
// Stitch it all together: WITH … SELECT …
@@ -431,7 +468,6 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
// Stitch it all together: WITH … SELECT …
@@ -478,11 +514,15 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
// Collect array join info for body JSON fields
var arrayJoinClauses []string
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
@@ -508,8 +548,12 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
}
}
// From table
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// From table (inject ARRAY JOINs if collected)
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
// Add filter conditions
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
@@ -654,7 +698,6 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
start, end uint64,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
return b.resourceFilterStmtBuilder.Build(
ctx,
start,

View File

@@ -32,7 +32,6 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
cb,
mockMetadataStore,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
}
@@ -197,11 +196,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -213,7 +212,6 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -318,11 +316,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -334,7 +332,6 @@ func TestStatementBuilderListQuery(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -427,11 +424,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -443,10 +440,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
//
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
@@ -491,7 +489,8 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "body.status",
Name: "status",
FieldContext: telemetrytypes.FieldContextBody,
},
},
},
@@ -501,11 +500,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -517,7 +516,6 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -597,11 +595,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -613,7 +611,6 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)

View File

@@ -47,9 +47,6 @@ var (
//
// searchOperator: LIKE for pattern matching, EQUAL for exact match
// Returns: (paths, error)
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
@@ -135,7 +132,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
orClauses := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
// replace [*] with []
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrylogs.ArrayAnyIndex, telemetrylogs.ArraySep)
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
// Extract search text for body JSON keys
keyName := CleanPathPrefixes(fieldKeySelector.Name)
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
@@ -162,13 +159,11 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
return query, args, limit, nil
}
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
filteredPaths := []string{}
for _, path := range paths {
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
continue
}
filteredPaths = append(filteredPaths, path)
@@ -296,7 +291,7 @@ func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...str
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
path = CleanPathPrefixes(path)
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
}
@@ -456,7 +451,7 @@ func derefValue(v any) any {
// IsPathPromoted checks if a specific path is promoted
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
split := strings.Split(path, telemetrylogs.ArraySep)
split := strings.Split(path, telemetrytypes.ArraySep)
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
if err != nil {

View File

@@ -572,6 +572,14 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
}
if querybuilder.BodyJSONQueryEnabled {
bodyJSONPaths, finished, err := t.getBodyJSONPaths(ctx, fieldKeySelectors) // LIKE for pattern matching
if err != nil {
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
}
keys = append(keys, bodyJSONPaths...)
complete = complete && finished
}
return keys, complete, nil
}
@@ -978,7 +986,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
FieldMapper: t.fm,
ConditionBuilder: t.conditionBuilder,
FieldKeys: keys,
}, 0, 0)
}, 0, 0)
if err == nil {
sb.AddWhereClause(whereClause.WhereClause)
} else {
@@ -1002,20 +1010,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
// search on attributes
key.FieldContext = telemetrytypes.FieldContextAttribute
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
// search on resource
key.FieldContext = telemetrytypes.FieldContextResource
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
key.FieldContext = origContext
} else {
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
@@ -1164,6 +1172,10 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
limit = 50
}
if strings.HasPrefix(fieldValueSelector.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
return t.ListJSONValues(ctx, fieldValueSelector.Name, limit)
}
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
if fieldValueSelector.Name != "" {

View File

@@ -32,6 +32,8 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
options.MaxIdleConns = config.Connection.MaxIdleConns
options.MaxOpenConns = config.Connection.MaxOpenConns
options.DialTimeout = config.Connection.DialTimeout
// This is to avoid the driver decoding issues with JSON columns
options.Settings["output_format_native_write_json_as_string"] = 1
chConn, err := clickhouse.Open(options)
if err != nil {

View File

@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
if err != nil {
return nil, err
}
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
if err != nil {
return nil, err
}
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
FieldKeys: keys,
SkipResourceFilter: true,
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return nil, err

View File

@@ -357,7 +357,7 @@ func TestStatementBuilder(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -525,7 +525,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -681,7 +681,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()

View File

@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
ConditionBuilder: b.stmtBuilder.cb,
FieldKeys: keys,
SkipResourceFilter: true,
}, b.start, b.end,
}, b.start, b.end,
)
if err != nil {
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
@@ -552,7 +552,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {
@@ -662,7 +661,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {
@@ -802,7 +800,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {

View File

@@ -390,7 +390,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(
@@ -506,7 +506,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(

View File

@@ -44,7 +44,7 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
mockMetadataStore,
)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),

View File

@@ -6,7 +6,6 @@ import (
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -33,7 +32,7 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
}
if strings.Contains(i.Path, telemetrylogs.ArraySep) || strings.Contains(i.Path, telemetrylogs.ArrayAnyIndex) {
if strings.Contains(i.Path, telemetrytypes.ArraySep) || strings.Contains(i.Path, telemetrytypes.ArrayAnyIndex) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
}
@@ -41,12 +40,12 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
}
if !strings.HasPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix) {
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
}
// remove the "body." prefix from the path
i.Path = strings.TrimPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix)
i.Path = strings.TrimPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix)
isCardinal := keycheck.IsCardinal(i.Path)
if isCardinal {

View File

@@ -153,28 +153,10 @@ func NewFormulaEvaluator(expressionStr string, canDefaultZero map[string]bool) (
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse expression")
}
// Normalize canDefaultZero keys to match variable casing from expression
normalizedCanDefaultZero := make(map[string]bool)
vars := expression.Vars()
for _, variable := range vars {
// If exact match exists, use it
if val, ok := canDefaultZero[variable]; ok {
normalizedCanDefaultZero[variable] = val
continue
}
// Otherwise try case-insensitive lookup
for k, v := range canDefaultZero {
if strings.EqualFold(k, variable) {
normalizedCanDefaultZero[variable] = v
break
}
}
}
evaluator := &FormulaEvaluator{
expression: expression,
variables: vars,
canDefaultZero: normalizedCanDefaultZero,
variables: expression.Vars(),
canDefaultZero: canDefaultZero,
aggRefs: make(map[string]aggregationRef),
}
@@ -299,16 +281,6 @@ func (fe *FormulaEvaluator) buildSeriesLookup(timeSeriesData map[string]*TimeSer
// We are only interested in the time series data for the queries that are
// involved in the formula expression.
data, exists := timeSeriesData[aggRef.QueryName]
if !exists {
// try case-insensitive lookup
for k, v := range timeSeriesData {
if strings.EqualFold(k, aggRef.QueryName) {
data = v
exists = true
break
}
}
}
if !exists {
continue
}

View File

@@ -864,158 +864,6 @@ func TestComplexExpression(t *testing.T) {
}
}
func TestCaseInsensitiveQueryNames(t *testing.T) {
tests := []struct {
name string
expression string
tsData map[string]*TimeSeriesData
expectedValues []float64
}{
{
name: "lowercase query names",
expression: "a / b",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{5.0},
},
{
name: "mixed case query names",
expression: "A / b",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{5.0},
},
{
name: "uppercase query names with lowercase data keys",
expression: "A / B",
tsData: map[string]*TimeSeriesData{
"a": createFormulaTestTimeSeriesData("a", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{5.0},
},
{
name: "all lowercase",
expression: "a/b",
tsData: map[string]*TimeSeriesData{
"a": createFormulaTestTimeSeriesData("a", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 100}),
},
}),
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
},
expectedValues: []float64{10.0},
},
{
name: "complex expression with mixed case",
expression: "a + B * c",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 5}),
},
}),
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 3}),
},
}),
"C": createFormulaTestTimeSeriesData("C", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{11.0}, // 5 + 3 * 2 = 11
},
{
name: "lowercase variables with default zero missing point",
expression: "a + b",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{
1: 10,
2: 20,
}),
},
}),
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{
1: 5,
}),
},
}),
},
expectedValues: []float64{15.0, 20.0}, // t1: 10+5, t2: 20+0
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
evaluator, err := NewFormulaEvaluator(tt.expression, map[string]bool{"a": true, "A": true, "b": true, "B": true, "c": true, "C": true})
require.NoError(t, err)
result, err := evaluator.EvaluateFormula(tt.tsData)
require.NoError(t, err)
require.NotNil(t, result)
assert.Equal(t, 1, len(result), "should have exactly one result series")
assert.Equal(t, len(tt.expectedValues), len(result[0].Values), "should match expected number of values")
for i, v := range tt.expectedValues {
assert.InDelta(t, v, result[0].Values[i].Value, 0.0001, "value at index %d should match", i)
}
})
}
}
func TestAbsValueExpression(t *testing.T) {
tsData := map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"strings"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -17,9 +18,13 @@ var (
FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")}
)
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
// e.g., "body.status" where "body." is the prefix
const BodyJSONStringSearchPrefix = `body.`
const (
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
// e.g., "body.status" where "body." is the prefix
BodyJSONStringSearchPrefix = "body."
ArraySep = jsontypeexporter.ArraySeparator
ArrayAnyIndex = "[*]."
)
type TelemetryFieldKey struct {
Name string `json:"name"`

View File

@@ -67,7 +67,6 @@ def test_logs_list(
"code.file": "/opt/integration.go",
"code.function": "com.example.Integration.process",
"code.line": 120,
"metric.domain_id": "d-001",
"telemetry.sdk.language": "go",
},
body="This is a log message, coming from a go application",
@@ -142,7 +141,6 @@ def test_logs_list(
"code.function": "com.example.Integration.process",
"log.iostream": "stdout",
"logtag": "F",
"metric.domain_id": "d-001",
"telemetry.sdk.language": "go",
}
assert rows[0]["data"]["attributes_number"] == {"code.line": 120}
@@ -310,86 +308,6 @@ def test_logs_list(
assert len(values) == 1
assert 120 in values
# Query keys from the fields API with context specified in the key
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/keys"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "logs",
"searchText": "resource.servic",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
keys = response.json()["data"]["keys"]
assert "service.name" in keys
assert any(k["fieldContext"] == "resource" for k in keys["service.name"])
# Do not treat `metric.` as a context prefix for logs
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/keys"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "logs",
"searchText": "metric.do",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
keys = response.json()["data"]["keys"]
assert "metric.domain_id" in keys
# Query values of service.name resource attribute using context-prefixed key
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "logs",
"name": "resource.service.name",
"searchText": "",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["values"]["stringValues"]
assert "go" in values
assert "java" in values
# Query values of metric.domain_id (string attribute) and ensure context collision doesn't break it
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "logs",
"name": "metric.domain_id",
"searchText": "",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["values"]["stringValues"]
assert "d-001" in values
def test_logs_time_series_count(
signoz: types.SigNoz,

View File

@@ -373,43 +373,3 @@ def test_traces_list(
assert len(values) == 2
assert set(values) == set(["POST", "PATCH"])
# Query keys from the fields API with context specified in the key
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/keys"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "traces",
"searchText": "resource.servic",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
keys = response.json()["data"]["keys"]
assert "service.name" in keys
assert any(k["fieldContext"] == "resource" for k in keys["service.name"])
# Query values of service.name resource attribute using context-prefixed key
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "traces",
"name": "resource.service.name",
"searchText": "",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["values"]["stringValues"]
assert set(values) == set(["topic-service", "http-service"])