Compare commits

..

4 Commits

Author SHA1 Message Date
Yunus M
eb19a00c7a feat: update test cases 2025-12-26 15:08:00 +05:30
Yunus M
c22366cbf4 feat: default max lines to 1 2025-12-26 15:03:31 +05:30
Abhi kumar
09dc95cfe9 fix: added fix for metric selection tooltip scroll issue (#9869) 2025-12-26 13:40:19 +05:30
Abhi kumar
d218cd5733 fix: added fix for reduceTo selection based on metric type + code cleanup (#9732)
* fix: added fixes for reduce-to, auto open + metric based default value

* fix: fixed raise condition

* chore: removed unnessasary useeffect from spaceaggregation

* test: added fix for failing test in usequerybuilderoperations

* fix: pr review comments

* fix: pr review changes
2025-12-25 22:54:25 +05:30
61 changed files with 460 additions and 1671 deletions

View File

@@ -59,7 +59,7 @@ jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
preferences: {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small',
version: 1,

View File

@@ -10,7 +10,7 @@ jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
preferences: {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small',
version: 1,
@@ -51,7 +51,7 @@ describe('LogsFormatOptionsMenu (unit)', () => {
selectedOptionFormat="table"
config={{
format: { value: 'table', onChange: formatOnChange },
maxLines: { value: 2, onChange: maxLinesOnChange },
maxLines: { value: 1, onChange: maxLinesOnChange },
fontSize: { value: FontSize.SMALL, onChange: fontSizeOnChange },
addColumn: {
isFetching: false,

View File

@@ -32,6 +32,7 @@ const ADD_ONS_KEYS = {
ORDER_BY: 'order_by',
LIMIT: 'limit',
LEGEND_FORMAT: 'legend_format',
REDUCE_TO: 'reduce_to',
};
const ADD_ONS_KEYS_TO_QUERY_PATH = {
@@ -40,13 +41,14 @@ const ADD_ONS_KEYS_TO_QUERY_PATH = {
[ADD_ONS_KEYS.ORDER_BY]: 'orderBy',
[ADD_ONS_KEYS.LIMIT]: 'limit',
[ADD_ONS_KEYS.LEGEND_FORMAT]: 'legend',
[ADD_ONS_KEYS.REDUCE_TO]: 'reduceTo',
};
const ADD_ONS = [
{
icon: <BarChart2 size={14} />,
label: 'Group By',
key: 'group_by',
key: ADD_ONS_KEYS.GROUP_BY,
description:
'Break down data by attributes like service name, endpoint, status code, or region. Essential for spotting patterns and comparing performance across different segments.',
docLink: 'https://signoz.io/docs/userguide/query-builder-v5/#grouping',
@@ -54,7 +56,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Having',
key: 'having',
key: ADD_ONS_KEYS.HAVING,
description:
'Filter grouped results based on aggregate conditions. Show only groups meeting specific criteria, like error rates > 5% or p99 latency > 500',
docLink:
@@ -63,7 +65,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Order By',
key: 'order_by',
key: ADD_ONS_KEYS.ORDER_BY,
description:
'Sort results to surface what matters most. Quickly identify slowest operations, most frequent errors, or highest resource consumers.',
docLink:
@@ -72,7 +74,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Limit',
key: 'limit',
key: ADD_ONS_KEYS.LIMIT,
description:
'Show only the top/bottom N results. Perfect for focusing on outliers, reducing noise, and improving dashboard performance.',
docLink:
@@ -81,7 +83,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Legend format',
key: 'legend_format',
key: ADD_ONS_KEYS.LEGEND_FORMAT,
description:
'Customize series labels using variables like {{service.name}}-{{endpoint}}. Makes charts readable at a glance during incident investigation.',
docLink:
@@ -92,7 +94,7 @@ const ADD_ONS = [
const REDUCE_TO = {
icon: <ScrollText size={14} />,
label: 'Reduce to',
key: 'reduce_to',
key: ADD_ONS_KEYS.REDUCE_TO,
description:
'Apply mathematical operations like sum, average, min, max, or percentiles to reduce multiple time series into a single value.',
docLink:
@@ -218,10 +220,9 @@ function QueryAddOns({
);
const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key));
// Filter and set selected views: add-ons that are both active and available
setSelectedViews(
ADD_ONS.filter(
filteredAddOns.filter(
(addOn) =>
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
),

View File

@@ -1,6 +1,12 @@
/* eslint-disable */
import { fireEvent, render, screen } from '@testing-library/react';
import React from 'react';
import {
fireEvent,
render,
screen,
userEvent,
waitFor,
within,
} from 'tests/test-utils';
import QueryAddOns from '../QueryV2/QueryAddOns/QueryAddOns';
import { PANEL_TYPES } from 'constants/queryBuilder';
@@ -55,16 +61,7 @@ jest.mock('../QueryV2/QueryAddOns/HavingFilter/HavingFilter', () => ({
),
}));
jest.mock(
'container/QueryBuilder/filters/ReduceToFilter/ReduceToFilter',
() => ({
ReduceToFilter: ({ onChange }: any) => (
<button data-testid="reduce-to" onClick={() => onChange('sum')}>
ReduceToFilter
</button>
),
}),
);
// ReduceToFilter is not mocked - we test the actual Ant Design Select component
function baseQuery(overrides: Partial<any> = {}): any {
return {
@@ -140,7 +137,7 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('order-by-content')).toBeInTheDocument();
});
it('limit input auto-opens when limit is set and changing it calls handler', () => {
it('limit input auto-opens when limit is set and changing it calls handler', async () => {
render(
<QueryAddOns
query={baseQuery({ limit: 5 })}
@@ -183,4 +180,88 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('limit-content')).toBeInTheDocument();
expect(limitInput.value).toBe('7');
});
it('shows reduce-to add-on when showReduceTo is true', () => {
render(
<QueryAddOns
query={baseQuery()}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('query-add-on-reduce_to')).toBeInTheDocument();
});
it('auto-opens reduce-to content when reduceTo is set', () => {
render(
<QueryAddOns
query={baseQuery({ reduceTo: 'sum' })}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
it('calls handleSetQueryData when reduce-to value changes', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const query = baseQuery({
reduceTo: 'avg',
aggregations: [{ id: 'a', operator: 'count', reduceTo: 'avg' }],
});
render(
<QueryAddOns
query={query}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
// Wait for the reduce-to content section to be visible (it auto-opens when reduceTo is set)
await waitFor(() => {
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
// Get the Select component by its role (combobox)
// The Select is within the reduce-to-content section
const reduceToContent = screen.getByTestId('reduce-to-content');
const selectCombobox = within(reduceToContent).getByRole('combobox');
// Open the dropdown by clicking on the combobox
await user.click(selectCombobox);
// Wait for the dropdown listbox to appear
await screen.findByRole('listbox');
// Find and click the "Sum" option
const sumOption = await screen.findByText('Sum of values in timeframe');
await user.click(sumOption);
// Verify the handler was called with the correct value
await waitFor(() => {
expect(mockHandleSetQueryData).toHaveBeenCalledWith(0, {
...query,
aggregations: [
{
...(query.aggregations?.[0] as any),
reduceTo: 'sum',
},
],
});
});
});
});

View File

@@ -35,7 +35,7 @@ jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
preferences: {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small',
version: 1,

View File

@@ -119,7 +119,7 @@ jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
preferences: {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small',
version: 1,

View File

@@ -37,7 +37,7 @@ describe('useOptionsMenu', () => {
columns: [],
formatting: {
format: 'raw',
maxLines: 2,
maxLines: 1,
fontSize: 'small',
},
},
@@ -49,7 +49,7 @@ describe('useOptionsMenu', () => {
columns: [],
formatting: {
format: 'raw',
maxLines: 2,
maxLines: 1,
fontSize: 'small',
},
},

View File

@@ -7,7 +7,7 @@ export const URL_OPTIONS = 'options';
export const defaultOptionsQuery: OptionsQuery = {
selectColumns: [],
maxLines: 2,
maxLines: 1,
format: 'raw',
fontSize: FontSize.SMALL,
};

View File

@@ -62,7 +62,7 @@ jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
preferences: {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small',
version: 1,

View File

@@ -1,7 +1,5 @@
import { Select } from 'antd';
import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { useEffect, useState } from 'react';
import { MetricAggregateOperator } from 'types/common/queryBuilder';
interface SpaceAggregationOptionsProps {
panelType: PANEL_TYPES | null;
@@ -22,39 +20,13 @@ export default function SpaceAggregationOptions({
operators,
qbVersion,
}: SpaceAggregationOptionsProps): JSX.Element {
const placeHolderText =
panelType === PANEL_TYPES.VALUE || qbVersion === 'v3' ? 'Sum' : 'Sum By';
const [defaultValue, setDefaultValue] = useState(
selectedValue || placeHolderText,
);
useEffect(() => {
if (!selectedValue) {
if (
aggregatorAttributeType === ATTRIBUTE_TYPES.HISTOGRAM ||
aggregatorAttributeType === ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
setDefaultValue(MetricAggregateOperator.P90);
onSelect(MetricAggregateOperator.P90);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.SUM) {
setDefaultValue(MetricAggregateOperator.SUM);
onSelect(MetricAggregateOperator.SUM);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.GAUGE) {
setDefaultValue(MetricAggregateOperator.AVG);
onSelect(MetricAggregateOperator.AVG);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [aggregatorAttributeType]);
return (
<div
className="spaceAggregationOptionsContainer"
key={aggregatorAttributeType}
>
<Select
defaultValue={defaultValue}
defaultValue={selectedValue}
style={{ minWidth: '5.625rem' }}
disabled={disabled}
onChange={onSelect}

View File

@@ -0,0 +1,16 @@
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.option-renderer-tooltip {
pointer-events: none;
}

View File

@@ -1,4 +1,4 @@
import './QueryBuilderSearch.styles.scss';
import './OptionRenderer.styles.scss';
import { Tooltip } from 'antd';
@@ -13,7 +13,11 @@ function OptionRenderer({
return (
<span className="option">
{type ? (
<Tooltip title={`${value}`} placement="topLeft">
<Tooltip
title={`${value}`}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<div className="selectOptionContainer">
<div className="option-value">{value}</div>
<div className="option-meta-data-container">
@@ -29,7 +33,11 @@ function OptionRenderer({
</div>
</Tooltip>
) : (
<Tooltip title={label} placement="topLeft">
<Tooltip
title={label}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<span>{label}</span>
</Tooltip>
)}

View File

@@ -5,19 +5,6 @@
gap: 12px;
}
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.logs-popup {
&.hide-scroll {
.rc-virtual-list-holder {

View File

@@ -0,0 +1,88 @@
import { render, screen } from '@testing-library/react';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { ReduceToFilter } from './ReduceToFilter';
const mockOnChange = jest.fn();
function baseQuery(overrides: Partial<IBuilderQuery> = {}): IBuilderQuery {
return {
dataSource: 'traces',
aggregations: [],
groupBy: [],
orderBy: [],
legend: '',
limit: null,
having: { expression: '' },
...overrides,
} as IBuilderQuery;
}
describe('ReduceToFilter', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('initializes with default avg when no reduceTo is set', () => {
render(<ReduceToFilter query={baseQuery()} onChange={mockOnChange} />);
expect(screen.getByTestId('reduce-to')).toBeInTheDocument();
expect(
screen.getByText('Average of values in timeframe'),
).toBeInTheDocument();
});
it('initializes from query.aggregations[0].reduceTo', () => {
render(
<ReduceToFilter
query={baseQuery({
aggregations: [{ reduceTo: 'sum' } as any],
aggregateAttribute: { key: 'test', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Sum of values in timeframe')).toBeInTheDocument();
});
it('initializes from query.reduceTo when aggregations[0].reduceTo is not set', () => {
render(
<ReduceToFilter
query={baseQuery({
reduceTo: 'max',
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Max of values in timeframe')).toBeInTheDocument();
});
it('updates to sum when aggregateAttribute.type is SUM', async () => {
const { rerender } = render(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
rerender(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test2', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
const reduceToFilterText = (await screen.findByText(
'Sum of values in timeframe',
)) as HTMLElement;
expect(reduceToFilterText).toBeInTheDocument();
});
});

View File

@@ -1,6 +1,7 @@
import { Select } from 'antd';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { REDUCE_TO_VALUES } from 'constants/queryBuilder';
import { memo } from 'react';
import { memo, useEffect, useRef, useState } from 'react';
import { MetricAggregation } from 'types/api/v5/queryRange';
// ** Types
import { ReduceOperators } from 'types/common/queryBuilder';
@@ -12,19 +13,46 @@ export const ReduceToFilter = memo(function ReduceToFilter({
query,
onChange,
}: ReduceToFilterProps): JSX.Element {
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
const currentValue =
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[0];
const isMounted = useRef<boolean>(false);
const [currentValue, setCurrentValue] = useState<
SelectOption<ReduceOperators, string>
>(REDUCE_TO_VALUES[2]); // default to avg
const handleChange = (
newValue: SelectOption<ReduceOperators, string>,
): void => {
setCurrentValue(newValue);
onChange(newValue.value);
};
useEffect(
() => {
if (!isMounted.current) {
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
setCurrentValue(
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[2],
);
isMounted.current = true;
return;
}
const aggregationAttributeType = query.aggregateAttribute?.type as
| MetricType
| undefined;
if (aggregationAttributeType === MetricType.SUM) {
handleChange(REDUCE_TO_VALUES[1]);
} else {
handleChange(REDUCE_TO_VALUES[2]);
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[query.aggregateAttribute?.key],
);
return (
<Select
placeholder="Reduce to"

View File

@@ -188,7 +188,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
],
}),
@@ -239,7 +239,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
],
}),
@@ -315,7 +315,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.AVG,
metricName: 'new_gauge',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
],
}),

View File

@@ -317,7 +317,7 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
@@ -326,7 +326,20 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
];
} else if (
newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.HISTOGRAM ||
newQuery.aggregateAttribute?.type ===
ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
newQuery.aggregations = [
{
timeAggregation: '',
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.P90,
},
];
} else {

View File

@@ -68,7 +68,7 @@ jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
preferences: {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small',
version: 1,

View File

@@ -59,7 +59,7 @@ export const optionMenuReturn = {
id: 'dbName--string--tag--true',
},
],
maxLines: 2,
maxLines: 1,
format: 'list',
},
handleOptionsChange: jest.fn(),

View File

@@ -160,7 +160,7 @@ describe('logsLoaderConfig', () => {
expect(result).toEqual({
columns: defaultLogsSelectedColumns,
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table' as LogViewMode,
fontSize: 'small' as FontSize,
version: 1,

View File

@@ -41,7 +41,7 @@ describe('logsUpdaterConfig', () => {
const mockPreferences: Preferences = {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table' as LogViewMode,
fontSize: 'small' as FontSize,
version: 1,
@@ -80,7 +80,7 @@ describe('logsUpdaterConfig', () => {
dataType: DataTypes.String,
},
],
maxLines: 2,
maxLines: 1,
});
logsUpdater.updateColumns(newColumns, PreferenceMode.DIRECT);
@@ -97,7 +97,7 @@ describe('logsUpdaterConfig', () => {
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
);
expect(storedData.selectColumns).toEqual(newColumns);
expect(storedData.maxLines).toBe(2); // Should preserve other fields
expect(storedData.maxLines).toBe(1); // Should preserve other fields
// Should not update saved view preferences
expect(setSavedViewPreferences).not.toHaveBeenCalled();
@@ -153,7 +153,7 @@ describe('logsUpdaterConfig', () => {
dataType: DataTypes.String,
},
],
maxLines: 2,
maxLines: 1,
format: 'table',
});
@@ -206,7 +206,7 @@ describe('logsUpdaterConfig', () => {
dataType: DataTypes.String,
},
],
maxLines: 2,
maxLines: 1,
format: 'table',
});
@@ -216,7 +216,7 @@ describe('logsUpdaterConfig', () => {
const storedData = JSON.parse(
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
);
expect(storedData.maxLines).toBe(2); // Should remain the same
expect(storedData.maxLines).toBe(1); // Should remain the same
expect(storedData.format).toBe('table'); // Should remain the same
// Should update saved view preferences

View File

@@ -113,7 +113,7 @@ describe('tracesUpdaterConfig', () => {
expect(mockSetSavedViewPreferences).toHaveBeenCalledWith({
columns: mockColumns,
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small',
version: 1,

View File

@@ -46,7 +46,7 @@ describe('usePreferenceUpdater', () => {
const mockPreferences: Preferences = {
columns: [],
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table' as LogViewMode,
fontSize: 'small' as FontSize,
version: 1,

View File

@@ -39,7 +39,7 @@ const logsLoaders = {
return {
columns: validLogColumns.length > 0 ? validLogColumns : [],
formatting: {
maxLines: parsed.maxLines ?? 2,
maxLines: parsed.maxLines ?? 1,
format: parsed.format ?? 'table',
fontSize: parsed.fontSize ?? 'small',
version: parsed.version ?? 1,
@@ -65,7 +65,7 @@ const logsLoaders = {
return {
columns: validLogColumns.length > 0 ? validLogColumns : [],
formatting: {
maxLines: options.maxLines ?? 2,
maxLines: options.maxLines ?? 1,
format: options.format ?? 'table',
fontSize: options.fontSize ?? 'small',
version: options.version ?? 1,
@@ -80,7 +80,7 @@ const logsLoaders = {
} => ({
columns: defaultLogsSelectedColumns,
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small' as FontSize,
version: 1,

View File

@@ -23,7 +23,7 @@ const getLogsUpdaterConfig = (
return {
columns: newColumns,
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small' as FontSize,
version: 1,

View File

@@ -21,7 +21,7 @@ const getTracesUpdaterConfig = (
setSavedViewPreferences({
columns: newColumns,
formatting: {
maxLines: 2,
maxLines: 1,
format: 'table',
fontSize: 'small' as FontSize,
version: 1,

View File

@@ -58,7 +58,7 @@ export function usePreferenceSync({
updateExtraDataSelectColumns(parsedExtraData?.selectColumns) ||
defaultLogsSelectedColumns;
formatting = {
maxLines: parsedExtraData?.maxLines ?? 2,
maxLines: parsedExtraData?.maxLines ?? 1,
format: parsedExtraData?.format ?? 'table',
fontSize: parsedExtraData?.fontSize ?? 'small',
version: parsedExtraData?.version ?? 1,

2
go.mod
View File

@@ -11,7 +11,6 @@ require (
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
github.com/antlr4-go/antlr/v4 v4.13.1
github.com/antonmedv/expr v1.15.3
github.com/bytedance/sonic v1.14.1
github.com/cespare/xxhash/v2 v2.3.0
github.com/coreos/go-oidc/v3 v3.14.1
github.com/dgraph-io/ristretto/v2 v2.3.0
@@ -90,6 +89,7 @@ require (
require (
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic v1.14.1 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect

View File

@@ -61,7 +61,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
response := []promotetypes.PromotePath{}
for _, path := range promotedPaths {
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
path = telemetrytypes.BodyJSONStringSearchPrefix + path
path = telemetrylogs.BodyJSONStringSearchPrefix + path
item := promotetypes.PromotePath{
Path: path,
Promote: true,
@@ -77,7 +77,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
// add the paths that are not promoted but have indexes
for path, indexes := range aggr {
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
path = telemetrytypes.BodyJSONStringSearchPrefix + path
path = telemetrylogs.BodyJSONStringSearchPrefix + path
response = append(response, promotetypes.PromotePath{
Path: path,
Indexes: indexes,

View File

@@ -10,11 +10,9 @@ import (
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
type builderQuery[T any] struct {
@@ -250,40 +248,6 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
return nil, err
}
// merge body_json and promoted into body
if q.spec.Signal == telemetrytypes.SignalLogs {
switch typedPayload := payload.(type) {
case *qbtypes.RawData:
for _, rr := range typedPayload.Rows {
seeder := func() error {
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
if !ok {
return nil
}
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
if !ok {
return nil
}
seed(promoted, body)
str, err := sonic.MarshalString(body)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
}
rr.Data["body"] = str
return nil
}
err := seeder()
if err != nil {
return nil, err
}
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
}
payload = typedPayload
}
}
return &qbtypes.Result{
Type: q.kind,
Value: payload,
@@ -411,18 +375,3 @@ func decodeCursor(cur string) (int64, error) {
}
return strconv.ParseInt(string(b), 10, 64)
}
func seed(promoted map[string]any, body map[string]any) {
for key, fromValue := range promoted {
if toValue, ok := body[key]; !ok {
body[key] = fromValue
} else {
if fromValue, ok := fromValue.(map[string]any); ok {
if toValue, ok := toValue.(map[string]any); ok {
seed(fromValue, toValue)
body[key] = toValue
}
}
}
}
}

View File

@@ -14,7 +14,6 @@ import (
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
var (
@@ -52,6 +51,7 @@ func consume(rows driver.Rows, kind qbtypes.RequestType, queryWindow *qbtypes.Ti
}
func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbtypes.Step, queryName string) (*qbtypes.TimeSeriesData, error) {
colTypes := rows.ColumnTypes()
colNames := rows.Columns()
@@ -354,22 +354,10 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
colTypes := rows.ColumnTypes()
colCnt := len(colNames)
// Helper that decides scan target per column based on DB type
makeScanTarget := func(i int) any {
dbt := strings.ToUpper(colTypes[i].DatabaseTypeName())
if strings.HasPrefix(dbt, "JSON") {
// Since the driver fails to decode JSON/Dynamic into native Go values, we read it as raw bytes
// TODO: check in future if fixed in the driver
var v []byte
return &v
}
return reflect.New(colTypes[i].ScanType()).Interface()
}
// Build a template slice of correctly-typed pointers once
scanTpl := make([]any, colCnt)
for i := range colTypes {
scanTpl[i] = makeScanTarget(i)
for i, ct := range colTypes {
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
}
var outRows []*qbtypes.RawRow
@@ -378,7 +366,7 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
// fresh copy of the scan slice (otherwise the driver reuses pointers)
scan := make([]any, colCnt)
for i := range scanTpl {
scan[i] = makeScanTarget(i)
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
}
if err := rows.Scan(scan...); err != nil {
@@ -395,28 +383,6 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
// de-reference the typed pointer to any
val := reflect.ValueOf(cellPtr).Elem().Interface()
// Post-process JSON columns: normalize into structured values
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
switch x := val.(type) {
case []byte:
if len(x) > 0 {
var v any
if err := sonic.Unmarshal(x, &v); err == nil {
val = v
}
}
case string:
if x != "" {
var v any
if err := sonic.Unmarshal([]byte(x), &v); err == nil {
val = v
}
}
default:
// already a structured type (map[string]any, []any, etc.)
}
}
// special-case: timestamp column
if name == "timestamp" || name == "timestamp_datetime" {
switch t := val.(type) {

View File

@@ -78,7 +78,7 @@ func newProvider(
telemetryMetadataStore,
)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
settings,
telemetryMetadataStore,
@@ -102,13 +102,14 @@ func newProvider(
// Create log statement builder
logFieldMapper := telemetrylogs.NewFieldMapper()
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, telemetryMetadataStore)
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
@@ -116,6 +117,7 @@ func newProvider(
telemetrylogs.DefaultFullTextColumn,
logFieldMapper,
logConditionBuilder,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)
logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder(
@@ -126,6 +128,7 @@ func newProvider(
logResourceFilterStmtBuilder,
logAggExprRewriter,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)

View File

@@ -11,16 +11,13 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"go.uber.org/zap"
)
@@ -131,40 +128,6 @@ func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
return err
}
func (ic *LogParsingPipelineController) getDefaultPipelines() ([]pipelinetypes.GettablePipeline, error) {
defaultPipelines := []pipelinetypes.GettablePipeline{}
if querybuilder.BodyJSONQueryEnabled {
preprocessingPipeline := pipelinetypes.GettablePipeline{
StoreablePipeline: pipelinetypes.StoreablePipeline{
Name: "Default Pipeline - PreProcessing Body",
Alias: "NormalizeBodyDefault",
Enabled: true,
},
Filter: &v3.FilterSet{
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "body",
},
Operator: v3.FilterOperatorExists,
},
},
},
Config: []pipelinetypes.PipelineOperator{
{
ID: uuid.NewString(),
Type: "normalize",
Enabled: true,
If: "body != nil",
},
},
}
defaultPipelines = append(defaultPipelines, preprocessingPipeline)
}
return defaultPipelines, nil
}
// Returns effective list of pipelines including user created
// pipelines and pipelines for installed integrations
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
@@ -295,13 +258,6 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
return nil, "", err
}
// recommend default pipelines along with user created pipelines
defaultPipelines, err := pc.getDefaultPipelines()
if err != nil {
return nil, "", model.InternalError(fmt.Errorf("failed to get default pipelines: %w", err))
}
pipelinesResp.Pipelines = append(pipelinesResp.Pipelines, defaultPipelines...)
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
if err != nil {
return nil, "", err

View File

@@ -132,7 +132,7 @@ func SignozLogsToPLogs(logs []model.SignozLog) []plog.Logs {
slRecord.SetSeverityText(log.SeverityText)
slRecord.SetSeverityNumber(plog.SeverityNumber(log.SeverityNumber))
slRecord.Body().FromRaw(log.Body)
slRecord.Body().SetStr(log.Body)
slAttribs := slRecord.Attributes()
for k, v := range log.Attributes_int64 {

View File

@@ -20,6 +20,7 @@ type aggExprRewriter struct {
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -30,6 +31,7 @@ func NewAggExprRewriter(
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *aggExprRewriter {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
@@ -39,6 +41,7 @@ func NewAggExprRewriter(
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -78,6 +81,7 @@ func (r *aggExprRewriter) Rewrite(
r.fullTextColumn,
r.fieldMapper,
r.conditionBuilder,
r.jsonBodyPrefix,
r.jsonKeyToKey,
)
// Rewrite the first select item (our expression)
@@ -125,6 +129,7 @@ type exprVisitor struct {
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
Modified bool
chArgs []any
@@ -137,6 +142,7 @@ func newExprVisitor(
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *exprVisitor {
return &exprVisitor{
@@ -145,6 +151,7 @@ func newExprVisitor(
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -183,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
if aggFunc.FuncCombinator {
// Map the predicate (last argument)
origPred := args[len(args)-1].String()
whereClause, err := PrepareWhereClause(
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
@@ -192,7 +199,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
ConditionBuilder: v.conditionBuilder,
FullTextColumn: v.fullTextColumn,
JsonKeyToKey: v.jsonKeyToKey,
}, 0, 0,
}, 0, 0,
)
if err != nil {
return err
@@ -212,7 +219,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i := 0; i < len(args)-1; i++ {
origVal := args[i].String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
if err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
}
@@ -230,7 +237,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i, arg := range args {
orig := arg.String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
if err != nil {
return err
}

View File

@@ -24,6 +24,7 @@ func CollisionHandledFinalExpr(
cb qbtypes.ConditionBuilder,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
requiredDataType telemetrytypes.FieldDataType,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) (string, []any, error) {
@@ -44,7 +45,7 @@ func CollisionHandledFinalExpr(
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
sb := sqlbuilder.NewSelectBuilder()
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
if err != nil {
return err
}
@@ -57,8 +58,8 @@ func CollisionHandledFinalExpr(
return nil
}
colName, fieldForErr := fm.FieldFor(ctx, field)
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
colName, err := fm.FieldFor(ctx, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
keysForField := keys[field.Name]
@@ -81,10 +82,10 @@ func CollisionHandledFinalExpr(
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
if found {
// we found a close match, in the error message send the suggestion
return "", nil, errors.WithAdditionalf(fieldForErr, "%s", correction)
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
} else {
// not even a close match, return an error
return "", nil, errors.WithAdditionalf(fieldForErr, "field `%s` not found", field.Name)
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
}
} else {
for _, key := range keysForField {
@@ -103,11 +104,10 @@ func CollisionHandledFinalExpr(
return "", nil, err
}
// first if condition covers the older tests and second if condition covers the array conditions
if !BodyJSONQueryEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil {
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
// TODO(nitya): enable group by on body column?
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
} else if strings.Contains(field.Name, telemetrytypes.ArraySep) || strings.Contains(field.Name, telemetrytypes.ArrayAnyIndex) {
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", field.Name)
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
} else {
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
}
@@ -204,7 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
case telemetrytypes.FieldDataTypeString:
switch v := value.(type) {
case float64:
// try to convert the string value to to number
@@ -219,36 +219,8 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case telemetrytypes.FieldDataTypeFloat64,
telemetrytypes.FieldDataTypeArrayFloat64:
switch v := value.(type) {
case float32, float64:
tblFieldName = castFloatHack(tblFieldName)
case string:
// check if it's a number inside a string
isNumber := false
if _, err := strconv.ParseFloat(v, 64); err == nil {
isNumber = true
}
if !operator.IsComparisonOperator() || !isNumber {
// try to convert the number attribute to string
tblFieldName = castString(tblFieldName) // numeric col vs string literal
} else {
tblFieldName = castFloatHack(tblFieldName)
}
case []any:
if allFloats(v) {
tblFieldName = castFloatHack(tblFieldName)
} else if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
case telemetrytypes.FieldDataTypeInt64,
telemetrytypes.FieldDataTypeArrayInt64,
telemetrytypes.FieldDataTypeNumber,
telemetrytypes.FieldDataTypeArrayNumber:
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
switch v := value.(type) {
// why? ; CH returns an error for a simple check
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
@@ -286,8 +258,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
}
}
case telemetrytypes.FieldDataTypeBool,
telemetrytypes.FieldDataTypeArrayBool:
case telemetrytypes.FieldDataTypeBool:
switch v := value.(type) {
case string:
tblFieldName = castString(tblFieldName)

View File

@@ -43,6 +43,7 @@ type resourceFilterStatementBuilder[T any] struct {
signal telemetrytypes.Signal
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -75,6 +76,7 @@ func NewLogResourceFilterStatementBuilder(
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
@@ -85,6 +87,7 @@ func NewLogResourceFilterStatementBuilder(
metadataStore: metadataStore,
signal: telemetrytypes.SignalLogs,
fullTextColumn: fullTextColumn,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -97,18 +100,12 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
keySelectors = append(keySelectors, whereClauseSelectors...)
}
// exclude out the body related key selectors
filteredKeySelectors := []*telemetrytypes.FieldKeySelector{}
for idx := range keySelectors {
if keySelectors[idx].FieldContext == telemetrytypes.FieldContextBody {
continue
}
keySelectors[idx].Signal = b.signal
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
}
return filteredKeySelectors
return keySelectors
}
// Build builds a SQL query based on the given parameters
@@ -171,7 +168,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
// there is no need for "key" not found error for resource filtering
IgnoreNotFoundKeys: true,
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return err

View File

@@ -16,12 +16,11 @@ import (
)
type conditionBuilder struct {
fm qbtypes.FieldMapper
metadataStore telemetrytypes.MetadataStore
fm qbtypes.FieldMapper
}
func NewConditionBuilder(fm qbtypes.FieldMapper, metadataStore telemetrytypes.MetadataStore) *conditionBuilder {
return &conditionBuilder{fm: fm, metadataStore: metadataStore}
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
return &conditionBuilder{fm: fm}
}
func (c *conditionBuilder) conditionFor(
@@ -31,34 +30,22 @@ func (c *conditionBuilder) conditionFor(
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := c.fm.ColumnFor(ctx, key)
if err != nil {
return "", err
}
// For JSON columns, preserve the original value type (numeric, bool, etc.)
// Only format to string for non-JSON columns that need string formatting
isJSONColumn := column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled && key.FieldContext == telemetrytypes.FieldContextBody
if !isJSONColumn {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
}
if isJSONColumn {
cond, err := c.buildJSONCondition(ctx, key, operator, value, sb)
if err != nil {
return "", err
}
return cond, nil
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
if err != nil {
return "", err
@@ -176,7 +163,9 @@ func (c *conditionBuilder) conditionFor(
// in the UI based query builder, `exists` and `not exists` are used for
// key membership checks, so depending on the column type, the condition changes
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled {
// Check if this is a body JSON search - by FieldContext
if key.FieldContext == telemetrytypes.FieldContextBody {
if operator == qbtypes.FilterOperatorExists {
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
} else {
@@ -258,7 +247,7 @@ func (c *conditionBuilder) ConditionFor(
return "", err
}
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
// with an exception for body json search
field, _ := c.fm.FieldFor(ctx, key)

View File

@@ -373,8 +373,7 @@ func TestConditionFor(t *testing.T) {
}
fm := NewFieldMapper()
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
@@ -427,8 +426,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
}
fm := NewFieldMapper()
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
@@ -687,8 +685,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
}
fm := NewFieldMapper()
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()

View File

@@ -2,6 +2,7 @@ package telemetrylogs
import (
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -36,6 +37,8 @@ const (
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
ArraySep = jsontypeexporter.ArraySeparator
ArrayAnyIndex = "[*]."
)
var (
@@ -45,7 +48,8 @@ var (
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
}
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
BodyJSONStringSearchPrefix = `body.`
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
"body": {
Name: "body",
Signal: telemetrytypes.SignalLogs,

View File

@@ -6,9 +6,7 @@ import (
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz-otel-collector/utils"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -30,11 +28,6 @@ var (
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
"body": {Name: "body", Type: schema.ColumnTypeString},
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
MaxDynamicTypes: utils.ToPointer(uint(32)),
MaxDynamicPaths: utils.ToPointer(uint(0)),
}},
LogsV2BodyPromotedColumn: {Name: LogsV2BodyPromotedColumn, Type: schema.JSONColumnType{}},
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeString,
@@ -90,23 +83,13 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
return logsV2Columns["attributes_bool"], nil
}
case telemetrytypes.FieldContextBody:
// Body context is for JSON body fields
// Use body_json if feature flag is enabled
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
}
// Fall back to legacy body column
// body context fields are stored in the body column
return logsV2Columns["body"], nil
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
col, ok := logsV2Columns[key.Name]
if !ok {
// check if the key has body JSON search
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
// Use body_json if feature flag is enabled and we have a body condition builder
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
}
// Fall back to legacy body column
// check if the key has body JSON search (backward compatibility)
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
return logsV2Columns["body"], nil
}
return nil, qbtypes.ErrColumnNotFound
@@ -126,34 +109,21 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
switch column.Type.GetType() {
case schema.ColumnTypeEnumJSON:
// json is only supported for resource context as of now
switch key.FieldContext {
case telemetrytypes.FieldContextResource:
oldColumn := logsV2Columns["resources_string"]
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
// once clickHouse dependency is updated, we need to check if we can remove it.
if key.Materialized {
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
}
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
case telemetrytypes.FieldContextBody:
if strings.Contains(key.Name, telemetrytypes.ArraySep) || strings.Contains(key.Name, telemetrytypes.ArrayAnyIndex) {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", key.Name)
}
fieldExpr := BodyJSONColumnPrefix + fmt.Sprintf("`%s`", key.Name)
expr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldExpr, key.JSONDataType.StringValue())
if key.Materialized {
promotedFieldExpr := BodyPromotedColumnPrefix + fmt.Sprintf("`%s`", key.Name)
expr = fmt.Sprintf("coalesce(%s, %s)", expr, fmt.Sprintf("dynamicElement(%s, '%s')", promotedFieldExpr, key.JSONDataType.StringValue()))
}
// returning qbtypes.ErrColumnNotFound is a hack that will trigger the fallback expr logic to include all the types for the key
return expr, qbtypes.ErrColumnNotFound
default:
if key.FieldContext != telemetrytypes.FieldContextResource {
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
}
oldColumn := logsV2Columns["resources_string"]
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
// once clickHouse dependency is updated, we need to check if we can remove it.
if key.Materialized {
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
} else {
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
}
case schema.ColumnTypeEnumLowCardinality:
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
case schema.ColumnTypeEnumString:

View File

@@ -11,7 +11,7 @@ import (
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
@@ -33,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)
@@ -47,7 +47,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
@@ -69,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
@@ -15,7 +14,8 @@ import (
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
func TestFilterExprLogsBodyJSON(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, telemetrytypestest.NewMockMetadataStore())
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()

View File

@@ -16,7 +16,7 @@ import (
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogs(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
@@ -2423,7 +2423,7 @@ func TestFilterExprLogs(t *testing.T) {
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogsConflictNegation(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()

View File

@@ -84,6 +84,7 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
}
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
dataType, value := inferDataType(value, operator, key)
// for array types, we need to extract the value from the JSON_QUERY

View File

@@ -30,7 +30,7 @@ func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, paren
}
part := pb.parts[index]
pathSoFar := strings.Join(pb.parts[:index+1], telemetrytypes.ArraySep)
pathSoFar := strings.Join(pb.parts[:index+1], ArraySep)
isTerminal := index == len(pb.parts)-1
// Calculate progression parameters based on parent's values
@@ -110,8 +110,8 @@ func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbt
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
// creation time in distributed_promoted_paths
path := strings.ReplaceAll(key.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
parts := strings.Split(path, telemetrytypes.ArraySep)
path := strings.ReplaceAll(key.Name, ArrayAnyIndex, ArraySep)
parts := strings.Split(path, ArraySep)
pb := &JSONAccessPlanBuilder{
key: key,

View File

@@ -814,6 +814,9 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
// testTypeSet returns a map of path->types and a getTypes function for testing
// This represents the type information available in the test JSON structure
//
// TODO(Piyush): Remove this unparam nolint
// nolint:unparam
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
types := map[string][]telemetrytypes.JSONDataType{
"user.name": {telemetrytypes.String},

View File

@@ -1,455 +0,0 @@
package telemetrylogs
import (
"context"
"fmt"
"slices"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
)
var (
CodeCurrentNodeNil = errors.MustNewCode("current_node_nil")
CodeNextNodeNil = errors.MustNewCode("next_node_nil")
CodeNestedExpressionsEmpty = errors.MustNewCode("nested_expressions_empty")
CodeGroupByPlanEmpty = errors.MustNewCode("group_by_plan_empty")
CodeArrayMapExpressionsEmpty = errors.MustNewCode("array_map_expressions_empty")
CodePromotedPlanMissing = errors.MustNewCode("promoted_plan_missing")
CodeArrayNavigationFailed = errors.MustNewCode("array_navigation_failed")
)
func (c *conditionBuilder) getTypes(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
keys, _, err := c.metadataStore.GetKeys(ctx, &telemetrytypes.FieldKeySelector{
Name: path,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
Signal: telemetrytypes.SignalLogs,
Limit: 1,
})
if err != nil {
return nil, err
}
types := []telemetrytypes.JSONDataType{}
for _, key := range keys[path] {
if key.JSONDataType != nil {
types = append(types, *key.JSONDataType)
}
}
return types, nil
}
// BuildCondition builds the full WHERE condition for body_json JSON paths
func (c *conditionBuilder) buildJSONCondition(ctx context.Context, key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
plan, err := PlanJSON(ctx, key, operator, value, c.getTypes)
if err != nil {
return "", err
}
conditions := []string{}
for _, plan := range plan {
condition, err := c.emitPlannedCondition(plan, operator, value, sb)
if err != nil {
return "", err
}
conditions = append(conditions, condition)
}
return sb.Or(conditions...), nil
}
// emitPlannedCondition handles paths with array traversal
func (c *conditionBuilder) emitPlannedCondition(plan *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Build traversal + terminal recursively per-hop
compiled, err := c.recurseArrayHops(plan, operator, value, sb)
if err != nil {
return "", err
}
// sb.AddWhereClause(sqlbuilder.NewWhereClause().AddWhereExpr(sb.Args, compiled))
return compiled, nil
}
// buildTerminalCondition creates the innermost condition
func (c *conditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Use the parent's alias + current field name for the full path
fieldPath := node.FieldPath()
if node.TerminalConfig.ElemType.IsArray {
// switch operator for array membership checks
switch operator {
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
operator = qbtypes.FilterOperatorEqual
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
operator = qbtypes.FilterOperatorNotEqual
}
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
if err != nil {
return "", err
}
return arrayCond, nil
}
conditions := []string{}
elemType := node.TerminalConfig.ElemType
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
fieldExpr, value = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, fieldExpr, operator)
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
return index.Type == elemType && index.ColumnExpression == fieldPath
})
if elemType.IndexSupported && indexed {
indexedExpr := assumeNotNull(fieldPath, elemType)
emptyValue := func() any {
switch elemType {
case telemetrytypes.String:
return ""
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
return 0
default:
return nil
}
}()
// switch the operator and value for exists and not exists
switch operator {
case qbtypes.FilterOperatorExists:
operator = qbtypes.FilterOperatorNotEqual
value = emptyValue
case qbtypes.FilterOperatorNotExists:
operator = qbtypes.FilterOperatorEqual
value = emptyValue
default:
// do nothing
}
cond, err := c.applyOperator(sb, indexedExpr, operator, value)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
// Switch operator to EXISTS
operator = qbtypes.FilterOperatorExists
}
cond, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
if len(conditions) > 1 {
return sb.And(conditions...), nil
}
return cond, nil
}
// buildArrayMembershipCondition handles array membership checks
func (c *conditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
arrayPath := node.FieldPath()
// create typed array out of a dynamic array
filteredDynamicExpr := func() string {
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
node.TerminalConfig.ValueType.StringValue(),
node.TerminalConfig.ValueType.StringValue(),
baseArrayDynamicExpr)
}
typedArrayExpr := func() string {
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
}
var arrayExpr string
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
arrayExpr = filteredDynamicExpr()
} else {
arrayExpr = typedArrayExpr()
}
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, "x", operator)
op, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
}
// recurseArrayHops recursively builds array traversal conditions
func (c *conditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
if current == nil {
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
}
if current.IsTerminal {
terminalCond, err := c.buildTerminalCondition(current, operator, value, sb)
if err != nil {
return "", err
}
return terminalCond, nil
}
currAlias := current.Alias()
fieldPath := current.FieldPath()
// Determine availability of Array(JSON) and Array(Dynamic) at this hop
hasArrayJSON := current.Branches[telemetrytypes.BranchJSON] != nil
hasArrayDynamic := current.Branches[telemetrytypes.BranchDynamic] != nil
// Then, at this hop, compute child per branch and wrap
branches := make([]string, 0, 2)
if hasArrayJSON {
jsonArrayExpr := fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')", fieldPath, current.MaxDynamicTypes, current.MaxDynamicPaths)
childGroupJSON, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchJSON], operator, value, sb)
if err != nil {
return "", err
}
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupJSON, jsonArrayExpr))
}
if hasArrayDynamic {
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", fieldPath)
dynFilteredExpr := fmt.Sprintf("arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
// Create the Query for Dynamic array
childGroupDyn, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchDynamic], operator, value, sb)
if err != nil {
return "", err
}
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupDyn, dynFilteredExpr))
}
if len(branches) == 1 {
return branches[0], nil
}
return fmt.Sprintf("(%s)", strings.Join(branches, " OR ")), nil
}
func (c *conditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, fieldExpr string, operator qbtypes.FilterOperator, value any) (string, error) {
switch operator {
case qbtypes.FilterOperatorEqual:
return sb.E(fieldExpr, value), nil
case qbtypes.FilterOperatorNotEqual:
return sb.NE(fieldExpr, value), nil
case qbtypes.FilterOperatorGreaterThan:
return sb.G(fieldExpr, value), nil
case qbtypes.FilterOperatorGreaterThanOrEq:
return sb.GE(fieldExpr, value), nil
case qbtypes.FilterOperatorLessThan:
return sb.LT(fieldExpr, value), nil
case qbtypes.FilterOperatorLessThanOrEq:
return sb.LE(fieldExpr, value), nil
case qbtypes.FilterOperatorLike:
return sb.Like(fieldExpr, value), nil
case qbtypes.FilterOperatorNotLike:
return sb.NotLike(fieldExpr, value), nil
case qbtypes.FilterOperatorILike:
return sb.ILike(fieldExpr, value), nil
case qbtypes.FilterOperatorNotILike:
return sb.NotILike(fieldExpr, value), nil
case qbtypes.FilterOperatorRegexp:
return fmt.Sprintf("match(%s, %s)", fieldExpr, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
return fmt.Sprintf("NOT match(%s, %s)", fieldExpr, sb.Var(value)), nil
case qbtypes.FilterOperatorContains:
return sb.ILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorNotContains:
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
values, ok := value.([]any)
if !ok {
values = []any{value}
}
conds := []string{}
for _, v := range values {
if operator == qbtypes.FilterOperatorIn {
conds = append(conds, sb.E(fieldExpr, v))
} else {
conds = append(conds, sb.NE(fieldExpr, v))
}
}
if operator == qbtypes.FilterOperatorIn {
return sb.Or(conds...), nil
}
return sb.And(conds...), nil
case qbtypes.FilterOperatorExists:
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
case qbtypes.FilterOperatorNotExists:
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
default:
return "", qbtypes.ErrUnsupportedOperator
}
}
// GroupByArrayJoinInfo contains information about array joins needed for GroupBy
type GroupByArrayJoinInfo struct {
ArrayJoinClauses []string // ARRAY JOIN clauses to add to FROM clause
TerminalExpr string // Terminal field expression for SELECT/GROUP BY
}
// BuildGroupBy builds GroupBy information for body JSON fields using arrayConcat pattern
func (c *conditionBuilder) BuildGroupBy(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*GroupByArrayJoinInfo, error) {
path := strings.TrimPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix)
plan, err := PlanJSON(ctx, key, qbtypes.FilterOperatorExists, nil, c.getTypes)
if err != nil {
return nil, err
}
if len(plan) == 0 {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
"Could not find any valid paths for: %s", path)
}
if plan[0].IsTerminal {
node := plan[0]
expr := fmt.Sprintf("dynamicElement(%s, '%s')", node.FieldPath(), node.TerminalConfig.ElemType.StringValue())
if key.Materialized {
if len(plan) < 2 {
return nil, errors.Newf(errors.TypeUnexpected, CodePromotedPlanMissing,
"plan length is less than 2 for promoted path: %s", path)
}
// promoted column first then body_json column
// TODO(Piyush): Change this in future for better performance
expr = fmt.Sprintf("coalesce(%s, %s)",
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
expr,
)
}
return &GroupByArrayJoinInfo{
ArrayJoinClauses: []string{},
TerminalExpr: expr,
}, nil
}
// Build arrayConcat pattern directly from the tree structure
arrayConcatExpr, err := c.buildArrayConcat(plan)
if err != nil {
return nil, err
}
// Create single ARRAY JOIN clause with arrayFlatten
arrayJoinClause := fmt.Sprintf("ARRAY JOIN %s AS `%s`", arrayConcatExpr, key.Name)
return &GroupByArrayJoinInfo{
ArrayJoinClauses: []string{arrayJoinClause},
TerminalExpr: fmt.Sprintf("`%s`", key.Name),
}, nil
}
// buildArrayConcat builds the arrayConcat pattern directly from the tree structure
func (c *conditionBuilder) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (string, error) {
if len(plan) == 0 {
return "", errors.Newf(errors.TypeInternal, CodeGroupByPlanEmpty, "group by plan is empty while building arrayConcat")
}
// Build arrayMap expressions for ALL available branches at the root level
var arrayMapExpressions []string
for _, node := range plan {
hasJSON := node.Branches[telemetrytypes.BranchJSON] != nil
hasDynamic := node.Branches[telemetrytypes.BranchDynamic] != nil
if hasJSON {
jsonExpr, err := c.buildArrayMap(node, telemetrytypes.BranchJSON)
if err != nil {
return "", err
}
arrayMapExpressions = append(arrayMapExpressions, jsonExpr)
}
if hasDynamic {
dynamicExpr, err := c.buildArrayMap(node, telemetrytypes.BranchDynamic)
if err != nil {
return "", err
}
arrayMapExpressions = append(arrayMapExpressions, dynamicExpr)
}
}
if len(arrayMapExpressions) == 0 {
return "", errors.Newf(errors.TypeInternal, CodeArrayMapExpressionsEmpty, "array map expressions are empty while building arrayConcat")
}
// Build the arrayConcat expression
arrayConcatExpr := fmt.Sprintf("arrayConcat(%s)", strings.Join(arrayMapExpressions, ", "))
// Wrap with arrayFlatten
arrayFlattenExpr := fmt.Sprintf("arrayFlatten(%s)", arrayConcatExpr)
return arrayFlattenExpr, nil
}
// buildArrayMap builds the arrayMap expression for a specific branch, handling all sub-branches
func (c *conditionBuilder) buildArrayMap(currentNode *telemetrytypes.JSONAccessNode, branchType telemetrytypes.JSONAccessBranchType) (string, error) {
if currentNode == nil {
return "", errors.Newf(errors.TypeInternal, CodeCurrentNodeNil, "current node is nil while building arrayMap")
}
nextNode := currentNode.Branches[branchType]
if nextNode == nil {
return "", errors.Newf(errors.TypeInternal, CodeNextNodeNil, "next node is nil while building arrayMap")
}
// Build the array expression for this level
var arrayExpr string
if branchType == telemetrytypes.BranchJSON {
// Array(JSON) branch
arrayExpr = fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')",
currentNode.FieldPath(), currentNode.MaxDynamicTypes, currentNode.MaxDynamicPaths)
} else {
// Array(Dynamic) branch - filter for JSON objects
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", currentNode.FieldPath())
arrayExpr = fmt.Sprintf("arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
}
// If this is the terminal level, return the simple arrayMap
if nextNode.IsTerminal {
dynamicElementExpr := fmt.Sprintf("dynamicElement(%s, '%s')", nextNode.FieldPath(),
nextNode.TerminalConfig.ElemType.StringValue(),
)
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), dynamicElementExpr, arrayExpr), nil
}
// For non-terminal nodes, we need to handle ALL possible branches at the next level
var nestedExpressions []string
hasJSON := nextNode.Branches[telemetrytypes.BranchJSON] != nil
hasDynamic := nextNode.Branches[telemetrytypes.BranchDynamic] != nil
if hasJSON {
jsonNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchJSON)
if err != nil {
return "", err
}
nestedExpressions = append(nestedExpressions, jsonNested)
}
if hasDynamic {
dynamicNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchDynamic)
if err != nil {
return "", err
}
nestedExpressions = append(nestedExpressions, dynamicNested)
}
// If we have multiple nested expressions, we need to concat them
var nestedExpr string
if len(nestedExpressions) == 1 {
nestedExpr = nestedExpressions[0]
} else if len(nestedExpressions) > 1 {
// This shouldn't happen in our current tree structure, but handle it just in case
nestedExpr = fmt.Sprintf("arrayConcat(%s)", strings.Join(nestedExpressions, ", "))
} else {
return "", errors.Newf(errors.TypeInternal, CodeNestedExpressionsEmpty, "nested expressions are empty while building arrayMap")
}
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), nestedExpr, arrayExpr), nil
}
func assumeNotNull(column string, elemType telemetrytypes.JSONDataType) string {
return fmt.Sprintf("assumeNotNull(dynamicElement(%s, '%s'))", column, elemType.StringValue())
}

File diff suppressed because one or more lines are too long

View File

@@ -23,6 +23,7 @@ type logQueryStatementBuilder struct {
aggExprRewriter qbtypes.AggExprRewriter
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -36,6 +37,7 @@ func NewLogQueryStatementBuilder(
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
aggExprRewriter qbtypes.AggExprRewriter,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *logQueryStatementBuilder {
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
@@ -48,6 +50,7 @@ func NewLogQueryStatementBuilder(
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
aggExprRewriter: aggExprRewriter,
fullTextColumn: fullTextColumn,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -168,25 +171,6 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
overallMatch = overallMatch || findMatch(IntrinsicFields)
}
if strings.Contains(k.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
k.Name = strings.TrimPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix)
fieldKeys, found := keys[k.Name]
if found && len(fieldKeys) > 0 {
k.FieldContext = fieldKeys[0].FieldContext
k.FieldDataType = fieldKeys[0].FieldDataType
k.Materialized = fieldKeys[0].Materialized
k.JSONDataType = fieldKeys[0].JSONDataType
k.Indexes = fieldKeys[0].Indexes
overallMatch = true // because we found a match
} else {
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
k.FieldContext = telemetrytypes.FieldContextBody
k.FieldDataType = telemetrytypes.FieldDataTypeString
k.JSONDataType = &telemetrytypes.String
}
}
if !overallMatch {
// check if all the key for the given field have been materialized, if so
// set the key to materialized
@@ -237,9 +221,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
cteArgs = append(cteArgs, args)
}
// Collect array join info for body JSON fields
var arrayJoinClauses []string
// Select timestamp and id by default
sb.Select(LogsV2TimestampColumn)
sb.SelectMore(LogsV2IDColumn)
@@ -253,10 +234,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
sb.SelectMore(LogsV2ScopeNameColumn)
sb.SelectMore(LogsV2ScopeVersionColumn)
sb.SelectMore(LogsV2BodyColumn)
if querybuilder.BodyJSONQueryEnabled {
sb.SelectMore(LogsV2BodyJSONColumn)
sb.SelectMore(LogsV2BodyPromotedColumn)
}
sb.SelectMore(LogsV2AttributesStringColumn)
sb.SelectMore(LogsV2AttributesNumberColumn)
sb.SelectMore(LogsV2AttributesBoolColumn)
@@ -269,7 +246,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn {
continue
}
// get column expression for the field - use array index directly to avoid pointer to loop variable
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
if err != nil {
@@ -279,12 +255,8 @@ func (b *logQueryStatementBuilder) buildListQuery(
}
}
// From table (inject ARRAY JOINs if collected)
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
// From table
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
@@ -358,17 +330,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
var allGroupByArgs []any
// Collect array join info for body JSON fields
var arrayJoinClauses []string
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
@@ -390,13 +358,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
}
// Add FROM clause
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
@@ -442,6 +404,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
// Stitch it all together: WITH … SELECT …
@@ -468,6 +431,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
// Stitch it all together: WITH … SELECT …
@@ -514,15 +478,11 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
// Collect array join info for body JSON fields
var arrayJoinClauses []string
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
@@ -548,12 +508,8 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
}
}
// From table (inject ARRAY JOINs if collected)
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
// From table
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
@@ -698,6 +654,7 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
start, end uint64,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
return b.resourceFilterStmtBuilder.Build(
ctx,
start,

View File

@@ -32,6 +32,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
cb,
mockMetadataStore,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
}
@@ -196,11 +197,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -212,6 +213,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -316,11 +318,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -332,6 +334,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -424,11 +427,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -440,11 +443,10 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
//
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
@@ -489,8 +491,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "status",
FieldContext: telemetrytypes.FieldContextBody,
Name: "body.status",
},
},
},
@@ -500,11 +501,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -516,6 +517,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -595,11 +597,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -611,6 +613,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)

View File

@@ -47,6 +47,9 @@ var (
//
// searchOperator: LIKE for pattern matching, EQUAL for exact match
// Returns: (paths, error)
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
@@ -132,7 +135,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
orClauses := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
// replace [*] with []
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrylogs.ArrayAnyIndex, telemetrylogs.ArraySep)
// Extract search text for body JSON keys
keyName := CleanPathPrefixes(fieldKeySelector.Name)
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
@@ -159,11 +162,13 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
return query, args, limit, nil
}
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
filteredPaths := []string{}
for _, path := range paths {
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
continue
}
filteredPaths = append(filteredPaths, path)
@@ -291,7 +296,7 @@ func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...str
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
path = CleanPathPrefixes(path)
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
}
@@ -451,7 +456,7 @@ func derefValue(v any) any {
// IsPathPromoted checks if a specific path is promoted
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
split := strings.Split(path, telemetrytypes.ArraySep)
split := strings.Split(path, telemetrylogs.ArraySep)
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
if err != nil {

View File

@@ -572,14 +572,6 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
}
if querybuilder.BodyJSONQueryEnabled {
bodyJSONPaths, finished, err := t.getBodyJSONPaths(ctx, fieldKeySelectors) // LIKE for pattern matching
if err != nil {
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
}
keys = append(keys, bodyJSONPaths...)
complete = complete && finished
}
return keys, complete, nil
}
@@ -986,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
FieldMapper: t.fm,
ConditionBuilder: t.conditionBuilder,
FieldKeys: keys,
}, 0, 0)
}, 0, 0)
if err == nil {
sb.AddWhereClause(whereClause.WhereClause)
} else {
@@ -1010,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
// search on attributes
key.FieldContext = telemetrytypes.FieldContextAttribute
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
// search on resource
key.FieldContext = telemetrytypes.FieldContextResource
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
key.FieldContext = origContext
} else {
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
@@ -1172,10 +1164,6 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
limit = 50
}
if strings.HasPrefix(fieldValueSelector.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
return t.ListJSONValues(ctx, fieldValueSelector.Name, limit)
}
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
if fieldValueSelector.Name != "" {

View File

@@ -32,8 +32,6 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
options.MaxIdleConns = config.Connection.MaxIdleConns
options.MaxOpenConns = config.Connection.MaxOpenConns
options.DialTimeout = config.Connection.DialTimeout
// This is to avoid the driver decoding issues with JSON columns
options.Settings["output_format_native_write_json_as_string"] = 1
chConn, err := clickhouse.Open(options)
if err != nil {

View File

@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
if err != nil {
return nil, err
}
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
if err != nil {
return nil, err
}
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
FieldKeys: keys,
SkipResourceFilter: true,
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return nil, err

View File

@@ -357,7 +357,7 @@ func TestStatementBuilder(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -525,7 +525,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -681,7 +681,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()

View File

@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
ConditionBuilder: b.stmtBuilder.cb,
FieldKeys: keys,
SkipResourceFilter: true,
}, b.start, b.end,
}, b.start, b.end,
)
if err != nil {
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
@@ -552,6 +552,7 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {
@@ -661,6 +662,7 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {
@@ -800,6 +802,7 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {

View File

@@ -390,7 +390,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(
@@ -506,7 +506,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(

View File

@@ -44,7 +44,7 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
mockMetadataStore,
)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),

View File

@@ -6,6 +6,7 @@ import (
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -32,7 +33,7 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
}
if strings.Contains(i.Path, telemetrytypes.ArraySep) || strings.Contains(i.Path, telemetrytypes.ArrayAnyIndex) {
if strings.Contains(i.Path, telemetrylogs.ArraySep) || strings.Contains(i.Path, telemetrylogs.ArrayAnyIndex) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
}
@@ -40,12 +41,12 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
}
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
if !strings.HasPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
}
// remove the "body." prefix from the path
i.Path = strings.TrimPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix)
i.Path = strings.TrimPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix)
isCardinal := keycheck.IsCardinal(i.Path)
if isCardinal {

View File

@@ -4,7 +4,6 @@ import (
"fmt"
"strings"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -18,13 +17,9 @@ var (
FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")}
)
const (
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
// e.g., "body.status" where "body." is the prefix
BodyJSONStringSearchPrefix = "body."
ArraySep = jsontypeexporter.ArraySeparator
ArrayAnyIndex = "[*]."
)
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
// e.g., "body.status" where "body." is the prefix
const BodyJSONStringSearchPrefix = `body.`
type TelemetryFieldKey struct {
Name string `json:"name"`