Compare commits

..

19 Commits

Author SHA1 Message Date
nityanandagohain
8be9a79d56 fix: use name from evolutions 2025-12-26 23:33:34 +05:30
nityanandagohain
471ad88971 fix: address changes and add tests 2025-12-26 23:25:43 +05:30
nityanandagohain
a5c46beeec Merge remote-tracking branch 'origin/main' into issue_3017 2025-12-23 18:21:40 +05:30
nityanandagohain
41f720950d fix: minor changes 2025-12-09 10:46:09 +07:00
nityanandagohain
d9bce4a3c6 fix: lint issues 2025-12-08 22:06:38 +07:00
nityanandagohain
a5ac40c33c fix: test 2025-12-08 21:40:30 +07:00
nityanandagohain
86b1366d4a fix: comments 2025-12-08 21:31:26 +07:00
nityanandagohain
eddb43a901 fix: aggregation 2025-12-08 21:30:07 +07:00
nityanandagohain
505cfe2314 fix: use orgId properly 2025-12-08 20:57:17 +07:00
nityanandagohain
6e54ee822a fix: use proper cache 2025-12-08 20:46:56 +07:00
nityanandagohain
d88cb8aba4 fix: minor changes 2025-12-08 20:18:34 +07:00
nityanandagohain
b823b2a1e1 fix: minor changes 2025-12-08 20:14:01 +07:00
nityanandagohain
7cfb7118a3 fix: update tests 2025-12-08 19:54:01 +07:00
nityanandagohain
59dfe7c0ed fix: remove goroutine 2025-12-08 19:11:22 +07:00
nityanandagohain
96b68b91c9 fix: update comment 2025-12-06 08:29:25 +05:30
nityanandagohain
be6ce8d4f1 fix: update fetch code 2025-12-06 08:27:53 +05:30
nityanandagohain
1fc58695c6 fix: tests 2025-12-06 06:47:29 +05:30
nityanandagohain
43450a187e Merge remote-tracking branch 'origin/main' into issue_3017 2025-12-06 05:15:26 +05:30
nityanandagohain
f4666d9c97 feat: time aware dynamic field mapper 2025-11-25 09:59:12 +05:30
86 changed files with 1044 additions and 3083 deletions

View File

@@ -853,7 +853,7 @@ paths:
get:
deprecated: false
description: This endpoints promotes and indexes paths
operationId: ListPromotedAndIndexedPaths
operationId: PromotePaths
responses:
"200":
content:
@@ -883,11 +883,13 @@ paths:
description: Internal Server Error
summary: Promote and index paths
tags:
- promoted_paths
- logs
- json_logs
post:
deprecated: false
description: This endpoints promotes and indexes paths
operationId: HandlePromoteAndIndexPaths
operationId: PromotePaths
requestBody:
content:
application/json:
@@ -913,7 +915,9 @@ paths:
description: Internal Server Error
summary: Promote and index paths
tags:
- promoted_paths
- logs
- json_logs
/api/v1/org/preferences:
get:
deprecated: false

View File

@@ -1,7 +0,0 @@
import axios from 'api';
import { AxiosResponse } from 'axios';
import { GlobalConfigDataProps } from 'types/api/globalConfig/types';
export const getGlobalConfig = (): Promise<
AxiosResponse<GlobalConfigDataProps>
> => axios.get(`/global/config`);

View File

@@ -1,29 +0,0 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
export const getMetricMetadata = async (
metricName: string,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
try {
const encodedMetricName = encodeURIComponent(metricName);
const response = await axios.get(
`/metrics/metadata?metricName=${encodedMetricName}`,
{
signal,
headers,
},
);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};

View File

@@ -560,10 +560,6 @@
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
}
}
@@ -573,10 +569,6 @@
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
.ant-select-arrow {

View File

@@ -169,10 +169,6 @@
.ant-select-selector {
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
}
}

View File

@@ -32,7 +32,6 @@ const ADD_ONS_KEYS = {
ORDER_BY: 'order_by',
LIMIT: 'limit',
LEGEND_FORMAT: 'legend_format',
REDUCE_TO: 'reduce_to',
};
const ADD_ONS_KEYS_TO_QUERY_PATH = {
@@ -41,14 +40,13 @@ const ADD_ONS_KEYS_TO_QUERY_PATH = {
[ADD_ONS_KEYS.ORDER_BY]: 'orderBy',
[ADD_ONS_KEYS.LIMIT]: 'limit',
[ADD_ONS_KEYS.LEGEND_FORMAT]: 'legend',
[ADD_ONS_KEYS.REDUCE_TO]: 'reduceTo',
};
const ADD_ONS = [
{
icon: <BarChart2 size={14} />,
label: 'Group By',
key: ADD_ONS_KEYS.GROUP_BY,
key: 'group_by',
description:
'Break down data by attributes like service name, endpoint, status code, or region. Essential for spotting patterns and comparing performance across different segments.',
docLink: 'https://signoz.io/docs/userguide/query-builder-v5/#grouping',
@@ -56,7 +54,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Having',
key: ADD_ONS_KEYS.HAVING,
key: 'having',
description:
'Filter grouped results based on aggregate conditions. Show only groups meeting specific criteria, like error rates > 5% or p99 latency > 500',
docLink:
@@ -65,7 +63,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Order By',
key: ADD_ONS_KEYS.ORDER_BY,
key: 'order_by',
description:
'Sort results to surface what matters most. Quickly identify slowest operations, most frequent errors, or highest resource consumers.',
docLink:
@@ -74,7 +72,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Limit',
key: ADD_ONS_KEYS.LIMIT,
key: 'limit',
description:
'Show only the top/bottom N results. Perfect for focusing on outliers, reducing noise, and improving dashboard performance.',
docLink:
@@ -83,7 +81,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Legend format',
key: ADD_ONS_KEYS.LEGEND_FORMAT,
key: 'legend_format',
description:
'Customize series labels using variables like {{service.name}}-{{endpoint}}. Makes charts readable at a glance during incident investigation.',
docLink:
@@ -94,7 +92,7 @@ const ADD_ONS = [
const REDUCE_TO = {
icon: <ScrollText size={14} />,
label: 'Reduce to',
key: ADD_ONS_KEYS.REDUCE_TO,
key: 'reduce_to',
description:
'Apply mathematical operations like sum, average, min, max, or percentiles to reduce multiple time series into a single value.',
docLink:
@@ -220,9 +218,10 @@ function QueryAddOns({
);
const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key));
// Filter and set selected views: add-ons that are both active and available
setSelectedViews(
filteredAddOns.filter(
ADD_ONS.filter(
(addOn) =>
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
),

View File

@@ -1,12 +1,6 @@
/* eslint-disable */
import {
fireEvent,
render,
screen,
userEvent,
waitFor,
within,
} from 'tests/test-utils';
import { fireEvent, render, screen } from '@testing-library/react';
import React from 'react';
import QueryAddOns from '../QueryV2/QueryAddOns/QueryAddOns';
import { PANEL_TYPES } from 'constants/queryBuilder';
@@ -61,7 +55,16 @@ jest.mock('../QueryV2/QueryAddOns/HavingFilter/HavingFilter', () => ({
),
}));
// ReduceToFilter is not mocked - we test the actual Ant Design Select component
jest.mock(
'container/QueryBuilder/filters/ReduceToFilter/ReduceToFilter',
() => ({
ReduceToFilter: ({ onChange }: any) => (
<button data-testid="reduce-to" onClick={() => onChange('sum')}>
ReduceToFilter
</button>
),
}),
);
function baseQuery(overrides: Partial<any> = {}): any {
return {
@@ -137,7 +140,7 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('order-by-content')).toBeInTheDocument();
});
it('limit input auto-opens when limit is set and changing it calls handler', async () => {
it('limit input auto-opens when limit is set and changing it calls handler', () => {
render(
<QueryAddOns
query={baseQuery({ limit: 5 })}
@@ -180,88 +183,4 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('limit-content')).toBeInTheDocument();
expect(limitInput.value).toBe('7');
});
it('shows reduce-to add-on when showReduceTo is true', () => {
render(
<QueryAddOns
query={baseQuery()}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('query-add-on-reduce_to')).toBeInTheDocument();
});
it('auto-opens reduce-to content when reduceTo is set', () => {
render(
<QueryAddOns
query={baseQuery({ reduceTo: 'sum' })}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
it('calls handleSetQueryData when reduce-to value changes', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const query = baseQuery({
reduceTo: 'avg',
aggregations: [{ id: 'a', operator: 'count', reduceTo: 'avg' }],
});
render(
<QueryAddOns
query={query}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
// Wait for the reduce-to content section to be visible (it auto-opens when reduceTo is set)
await waitFor(() => {
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
// Get the Select component by its role (combobox)
// The Select is within the reduce-to-content section
const reduceToContent = screen.getByTestId('reduce-to-content');
const selectCombobox = within(reduceToContent).getByRole('combobox');
// Open the dropdown by clicking on the combobox
await user.click(selectCombobox);
// Wait for the dropdown listbox to appear
await screen.findByRole('listbox');
// Find and click the "Sum" option
const sumOption = await screen.findByText('Sum of values in timeframe');
await user.click(sumOption);
// Verify the handler was called with the correct value
await waitFor(() => {
expect(mockHandleSetQueryData).toHaveBeenCalledWith(0, {
...query,
aggregations: [
{
...(query.aggregations?.[0] as any),
reduceTo: 'sum',
},
],
});
});
});
});

View File

@@ -1,18 +1,11 @@
import './styles.scss';
import { WarningFilled } from '@ant-design/icons';
import { Select, Tooltip } from 'antd';
import { Select } from 'antd';
import { DefaultOptionType } from 'antd/es/select';
import classNames from 'classnames';
import { useMemo } from 'react';
import { UniversalYAxisUnitMappings } from './constants';
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
import {
getUniversalNameFromMetricUnit,
getYAxisCategories,
mapMetricUnitToUniversalUnit,
} from './utils';
import { getYAxisCategories, mapMetricUnitToUniversalUnit } from './utils';
function YAxisUnitSelector({
value,
@@ -21,24 +14,9 @@ function YAxisUnitSelector({
loading = false,
'data-testid': dataTestId,
source,
initialValue,
}: YAxisUnitSelectorProps): JSX.Element {
const universalUnit = mapMetricUnitToUniversalUnit(value);
const incompatibleUnitMessage = useMemo(() => {
if (!initialValue || !value || loading) return '';
const initialUniversalUnit = mapMetricUnitToUniversalUnit(initialValue);
const currentUniversalUnit = mapMetricUnitToUniversalUnit(value);
if (initialUniversalUnit !== currentUniversalUnit) {
const initialUniversalUnitName = getUniversalNameFromMetricUnit(
initialValue,
);
const currentUniversalUnitName = getUniversalNameFromMetricUnit(value);
return `Unit mismatch. Saved unit is ${initialUniversalUnitName}, but ${currentUniversalUnitName} is selected.`;
}
return '';
}, [initialValue, value, loading]);
const handleSearch = (
searchTerm: string,
currentOption: DefaultOptionType | undefined,
@@ -71,16 +49,6 @@ function YAxisUnitSelector({
placeholder={placeholder}
filterOption={(input, option): boolean => handleSearch(input, option)}
loading={loading}
suffixIcon={
incompatibleUnitMessage ? (
<Tooltip title={incompatibleUnitMessage}>
<WarningFilled />
</Tooltip>
) : undefined
}
className={classNames({
'warning-state': incompatibleUnitMessage,
})}
data-testid={dataTestId}
>
{categories.map((category) => (

View File

@@ -91,36 +91,4 @@ describe('YAxisUnitSelector', () => {
expect(screen.getByText('Bytes (B)')).toBeInTheDocument();
expect(screen.getByText('Seconds (s)')).toBeInTheDocument();
});
it('shows warning message when incompatible unit is selected', () => {
render(
<YAxisUnitSelector
source={YAxisSource.ALERTS}
value="By"
onChange={mockOnChange}
initialValue="s"
/>,
);
const warningIcon = screen.getByLabelText('warning');
expect(warningIcon).toBeInTheDocument();
fireEvent.mouseOver(warningIcon);
return screen
.findByText(
'Unit mismatch. Saved unit is Seconds (s), but Bytes (B) is selected.',
)
.then((el) => expect(el).toBeInTheDocument());
});
it('does not show warning message when compatible unit is selected', () => {
render(
<YAxisUnitSelector
source={YAxisSource.ALERTS}
value="s"
onChange={mockOnChange}
initialValue="s"
/>,
);
const warningIcon = screen.queryByLabelText('warning');
expect(warningIcon).not.toBeInTheDocument();
});
});

View File

@@ -3,13 +3,3 @@
width: 220px;
}
}
.warning-state {
.ant-select-selector {
border-color: var(--bg-amber-400) !important;
}
.anticon {
color: var(--bg-amber-400) !important;
}
}

View File

@@ -6,7 +6,6 @@ export interface YAxisUnitSelectorProps {
disabled?: boolean;
'data-testid'?: string;
source: YAxisSource;
initialValue?: string;
}
export enum UniversalYAxisUnit {

View File

@@ -55,7 +55,6 @@ export const REACT_QUERY_KEY = {
GET_METRIC_DETAILS: 'GET_METRIC_DETAILS',
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
GET_INSPECT_METRICS_DETAILS: 'GET_INSPECT_METRICS_DETAILS',
GET_METRIC_METADATA: 'GET_METRIC_METADATA',
// Traces Funnels Query Keys
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',

View File

@@ -5,11 +5,9 @@ import { useCreateAlertState } from 'container/CreateAlertV2/context';
import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview';
import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import useGetYAxisUnit from 'hooks/useGetYAxisUnit';
import { useEffect, useState } from 'react';
import { useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { AlertDef } from 'types/api/alerts/def';
import { EQueryType } from 'types/common/dashboard';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -20,13 +18,7 @@ export interface ChartPreviewProps {
function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
const { currentQuery, panelType, stagedQuery } = useQueryBuilder();
const {
alertType,
thresholdState,
alertState,
setAlertState,
isEditMode,
} = useCreateAlertState();
const { thresholdState, alertState, setAlertState } = useCreateAlertState();
const { selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
@@ -35,25 +27,6 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
const yAxisUnit = alertState.yAxisUnit || '';
const fetchYAxisUnit =
!isEditMode && alertType === AlertTypes.METRICS_BASED_ALERT;
const selectedQueryName = thresholdState.selectedQuery;
const { yAxisUnit: initialYAxisUnit, isLoading } = useGetYAxisUnit(
selectedQueryName,
{
enabled: fetchYAxisUnit,
},
);
// Every time a new metric is selected, set the y-axis unit to its unit value if present
// Only for metrics-based alerts
useEffect(() => {
if (fetchYAxisUnit) {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: initialYAxisUnit });
}
}, [initialYAxisUnit, setAlertState, fetchYAxisUnit]);
const headline = (
<div className="chart-preview-headline">
<PlotTag
@@ -61,13 +34,11 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
panelType={panelType || PANEL_TYPES.TIME_SERIES}
/>
<YAxisUnitSelector
value={yAxisUnit}
initialValue={initialYAxisUnit}
value={alertState.yAxisUnit}
onChange={(value): void => {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
}}
source={YAxisSource.ALERTS}
loading={isLoading}
/>
</div>
);

View File

@@ -120,6 +120,7 @@ function FullView({
originalGraphType: selectedPanelType,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
return {
query: updatedQuery,
graphType: PANEL_TYPES.LIST,

View File

@@ -137,6 +137,7 @@ function GridCardGraph({
originalGraphType: widget.panelTypes,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
const initialDataSource = updatedQuery.builder.queryData[0].dataSource;
return {
query: updatedQuery,

View File

@@ -996,7 +996,6 @@
gap: 8px;
justify-content: space-between;
align-items: center;
width: 100%;
.ingestion-key-url-label {
font-size: 13px;

View File

@@ -40,7 +40,7 @@ import { initialQueryMeterWithType } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { INITIAL_ALERT_THRESHOLD_STATE } from 'container/CreateAlertV2/context/constants';
import dayjs from 'dayjs';
import { useGetGlobalConfig } from 'hooks/globalConfig/useGetGlobalConfig';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
@@ -302,11 +302,11 @@ function MultiIngestionSettings(): JSX.Element {
};
const {
data: globalConfig,
isLoading: isLoadingGlobalConfig,
isFetching: isFetchingGlobalConfig,
isError: isErrorGlobalConfig,
} = useGetGlobalConfig(!isEnterpriseSelfHostedUser);
data: deploymentsData,
isLoading: isLoadingDeploymentsData,
isFetching: isFetchingDeploymentsData,
isError: isErrorDeploymentsData,
} = useGetDeploymentsData(!isEnterpriseSelfHostedUser);
const {
mutate: createIngestionKey,
@@ -1409,10 +1409,10 @@ function MultiIngestionSettings(): JSX.Element {
</Typography.Text>
</header>
{!isErrorGlobalConfig &&
!isLoadingGlobalConfig &&
!isFetchingGlobalConfig &&
globalConfig && (
{!isErrorDeploymentsData &&
!isLoadingDeploymentsData &&
!isFetchingDeploymentsData &&
deploymentsData && (
<div className="ingestion-setup-details-links">
<div className="ingestion-key-url-container">
<div className="ingestion-key-url-label">Ingestion URL</div>
@@ -1421,10 +1421,29 @@ function MultiIngestionSettings(): JSX.Element {
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
handleCopyKey(`${globalConfig?.data.data.ingestion_url}`);
handleCopyKey(
`ingest.${deploymentsData?.data.data.cluster.region.dns}`,
);
}}
>
{globalConfig?.data.data.ingestion_url}
ingest.{deploymentsData?.data.data.cluster.region.dns}
<Copy className="copy-key-btn" size={12} />
</div>
</div>
<div className="ingestion-data-region-container">
<div className="ingestion-data-region-label">Region</div>
<div
className="ingestion-data-region-value"
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
handleCopyKey(deploymentsData?.data.data.cluster.region.name || '');
}}
>
<Typography.Text className="ingestion-data-region-value-text">
{deploymentsData?.data.data.cluster.region.name}
</Typography.Text>
<Copy className="copy-key-btn" size={12} />
</div>
</div>

View File

@@ -58,27 +58,6 @@
.explore-content {
padding: 0 8px;
.y-axis-unit-selector-container {
display: flex;
align-items: center;
gap: 10px;
padding-top: 10px;
margin-bottom: 10px;
.save-unit-container {
display: flex;
align-items: center;
gap: 10px;
.ant-btn {
border-radius: 2px;
.ant-typography {
font-size: 12px;
}
}
}
}
.ant-space {
margin-top: 10px;
margin-bottom: 20px;
@@ -96,14 +75,6 @@
.time-series-view {
min-width: 100%;
width: 100%;
position: relative;
.no-unit-warning {
position: absolute;
top: 30px;
right: 40px;
z-index: 1000;
}
}
.time-series-container {

View File

@@ -1,7 +1,7 @@
import './Explorer.styles.scss';
import * as Sentry from '@sentry/react';
import { Switch, Tooltip } from 'antd';
import { Switch } from 'antd';
import logEvent from 'api/common/logEvent';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import WarningPopover from 'components/WarningPopover/WarningPopover';
@@ -25,14 +25,10 @@ import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToD
import { v4 as uuid } from 'uuid';
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
import MetricDetails from '../MetricDetails/MetricDetails';
// import QuerySection from './QuerySection';
import TimeSeries from './TimeSeries';
import { ExplorerTabs } from './types';
import {
getMetricUnits,
splitQueryIntoOneChartPerQuery,
useGetMetrics,
} from './utils';
import { splitQueryIntoOneChartPerQuery } from './utils';
const ONE_CHART_PER_QUERY_ENABLED_KEY = 'isOneChartPerQueryEnabled';
@@ -44,34 +40,6 @@ function Explorer(): JSX.Element {
currentQuery,
} = useQueryBuilder();
const { safeNavigate } = useSafeNavigate();
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(false);
const metricNames = useMemo(() => {
const currentMetricNames: string[] = [];
stagedQuery?.builder.queryData.forEach((query) => {
if (query.aggregateAttribute?.key) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames;
}, [stagedQuery]);
const {
metrics,
isLoading: isMetricUnitsLoading,
isError: isMetricUnitsError,
} = useGetMetrics(metricNames);
const units = useMemo(() => getMetricUnits(metrics), [metrics]);
const areAllMetricUnitsSame = useMemo(
() =>
!isMetricUnitsLoading &&
!isMetricUnitsError &&
units.length > 0 &&
units.every((unit) => unit && unit === units[0]),
[units, isMetricUnitsLoading, isMetricUnitsError],
);
const [searchParams, setSearchParams] = useSearchParams();
const isOneChartPerQueryEnabled =
@@ -80,66 +48,7 @@ function Explorer(): JSX.Element {
const [showOneChartPerQuery, toggleShowOneChartPerQuery] = useState(
isOneChartPerQueryEnabled,
);
const [disableOneChartPerQuery, toggleDisableOneChartPerQuery] = useState(
false,
);
const [selectedTab] = useState<ExplorerTabs>(ExplorerTabs.TIME_SERIES);
const [yAxisUnit, setYAxisUnit] = useState<string | undefined>();
const unitsLength = useMemo(() => units.length, [units]);
const firstUnit = useMemo(() => units?.[0], [units]);
useEffect(() => {
// Set the y axis unit to the first metric unit if
// 1. There is one metric unit and it is not empty
// 2. All metric units are the same and not empty
// Else, set the y axis unit to empty if
// 1. There are more than one metric units and they are not the same
// 2. There are no metric units
// 3. There is exactly one metric unit but it is empty/undefined
if (unitsLength === 0) {
setYAxisUnit(undefined);
} else if (unitsLength === 1 && firstUnit) {
setYAxisUnit(firstUnit);
} else if (unitsLength === 1 && !firstUnit) {
setYAxisUnit(undefined);
} else if (areAllMetricUnitsSame) {
if (firstUnit) {
setYAxisUnit(firstUnit);
} else {
setYAxisUnit(undefined);
}
} else if (unitsLength > 1 && !areAllMetricUnitsSame) {
setYAxisUnit(undefined);
}
}, [unitsLength, firstUnit, areAllMetricUnitsSame]);
useEffect(() => {
// Don't apply logic during loading to avoid overwriting user preferences
if (isMetricUnitsLoading) {
return;
}
// Disable one chart per query if -
// 1. There are more than one metric
// 2. The metric units are not the same
if (units.length > 1 && !areAllMetricUnitsSame) {
toggleShowOneChartPerQuery(true);
toggleDisableOneChartPerQuery(true);
} else if (units.length <= 1) {
toggleShowOneChartPerQuery(false);
toggleDisableOneChartPerQuery(true);
} else {
// When units are the same and loading is complete, restore URL-based preference
toggleShowOneChartPerQuery(isOneChartPerQueryEnabled);
toggleDisableOneChartPerQuery(false);
}
}, [
units,
areAllMetricUnitsSame,
isMetricUnitsLoading,
isOneChartPerQueryEnabled,
]);
const handleToggleShowOneChartPerQuery = (): void => {
toggleShowOneChartPerQuery(!showOneChartPerQuery);
@@ -159,20 +68,15 @@ function Explorer(): JSX.Element {
[updateAllQueriesOperators],
);
const exportDefaultQuery = useMemo(() => {
const query = updateAllQueriesOperators(
currentQuery || initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
);
if (yAxisUnit && !query.unit) {
return {
...query,
unit: yAxisUnit,
};
}
return query;
}, [currentQuery, updateAllQueriesOperators, yAxisUnit]);
const exportDefaultQuery = useMemo(
() =>
updateAllQueriesOperators(
currentQuery || initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
),
[currentQuery, updateAllQueriesOperators],
);
useShareBuilderUrl({ defaultValue: defaultQuery });
@@ -186,16 +90,8 @@ function Explorer(): JSX.Element {
const widgetId = uuid();
let query = queryToExport || exportDefaultQuery;
if (yAxisUnit && !query.unit) {
query = {
...query,
unit: yAxisUnit,
};
}
const dashboardEditView = generateExportToDashboardLink({
query,
query: queryToExport || exportDefaultQuery,
panelType: PANEL_TYPES.TIME_SERIES,
dashboardId: dashboard.id,
widgetId,
@@ -203,33 +99,17 @@ function Explorer(): JSX.Element {
safeNavigate(dashboardEditView);
},
[exportDefaultQuery, safeNavigate, yAxisUnit],
[exportDefaultQuery, safeNavigate],
);
const splitedQueries = useMemo(
() =>
splitQueryIntoOneChartPerQuery(
stagedQuery || initialQueriesMap[DataSource.METRICS],
metricNames,
units,
),
[stagedQuery, metricNames, units],
[stagedQuery],
);
const [selectedMetricName, setSelectedMetricName] = useState<string | null>(
null,
);
const handleOpenMetricDetails = (metricName: string): void => {
setIsMetricDetailsOpen(true);
setSelectedMetricName(metricName);
};
const handleCloseMetricDetails = (): void => {
setIsMetricDetailsOpen(false);
setSelectedMetricName(null);
};
useEffect(() => {
logEvent(MetricsExplorerEvents.TabChanged, {
[MetricsExplorerEventKeys.Tab]: 'explorer',
@@ -243,44 +123,17 @@ function Explorer(): JSX.Element {
const [warning, setWarning] = useState<Warning | undefined>(undefined);
const oneChartPerQueryDisabledTooltip = useMemo(() => {
if (splitedQueries.length <= 1) {
return 'One chart per query cannot be toggled for a single query.';
}
if (units.length <= 1) {
return 'One chart per query cannot be toggled when there is only one metric.';
}
if (disableOneChartPerQuery) {
return 'One chart per query cannot be disabled for multiple queries with different units.';
}
return undefined;
}, [disableOneChartPerQuery, splitedQueries.length, units.length]);
// Show the y axis unit selector if -
// 1. There is only one metric
// 2. The metric has no saved unit
const showYAxisUnitSelector = useMemo(
() => !isMetricUnitsLoading && units.length === 1 && !units[0],
[units, isMetricUnitsLoading],
);
return (
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="metrics-explorer-explore-container">
<div className="explore-header">
<div className="explore-header-left-actions">
<span>1 chart/query</span>
<Tooltip
open={disableOneChartPerQuery ? undefined : false}
title={oneChartPerQueryDisabledTooltip}
>
<Switch
checked={showOneChartPerQuery}
onChange={handleToggleShowOneChartPerQuery}
disabled={disableOneChartPerQuery || splitedQueries.length <= 1}
size="small"
/>
</Tooltip>
<Switch
checked={showOneChartPerQuery}
onChange={handleToggleShowOneChartPerQuery}
size="small"
/>
</div>
<div className="explore-header-right-actions">
{!isEmpty(warning) && <WarningPopover warningData={warning} />}
@@ -321,16 +174,6 @@ function Explorer(): JSX.Element {
<TimeSeries
showOneChartPerQuery={showOneChartPerQuery}
setWarning={setWarning}
areAllMetricUnitsSame={areAllMetricUnitsSame}
isMetricUnitsLoading={isMetricUnitsLoading}
isMetricUnitsError={isMetricUnitsError}
metricUnits={units}
metricNames={metricNames}
metrics={metrics}
handleOpenMetricDetails={handleOpenMetricDetails}
yAxisUnit={yAxisUnit}
setYAxisUnit={setYAxisUnit}
showYAxisUnitSelector={showYAxisUnitSelector}
/>
)}
{/* TODO: Enable once we have resolved all related metrics issues */}
@@ -344,17 +187,9 @@ function Explorer(): JSX.Element {
query={exportDefaultQuery}
sourcepage={DataSource.METRICS}
onExport={handleExport}
isOneChartPerQuery={showOneChartPerQuery}
isOneChartPerQuery={false}
splitedQueries={splitedQueries}
/>
{isMetricDetailsOpen && (
<MetricDetails
metricName={selectedMetricName}
isOpen={isMetricDetailsOpen}
onClose={handleCloseMetricDetails}
isModalTimeSelection={false}
/>
)}
</Sentry.ErrorBoundary>
);
}

View File

@@ -1,18 +1,14 @@
import { Color } from '@signozhq/design-tokens';
import { Tooltip, Typography } from 'antd';
import { isAxiosError } from 'axios';
import classNames from 'classnames';
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters/BuilderUnitsFilter/BuilderUnits';
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
import { convertDataValueToMs } from 'container/TimeSeriesView/utils';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { AlertTriangle } from 'lucide-react';
import { useMemo } from 'react';
import { useMemo, useState } from 'react';
import { useQueries } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -28,13 +24,6 @@ import { splitQueryIntoOneChartPerQuery } from './utils';
function TimeSeries({
showOneChartPerQuery,
setWarning,
isMetricUnitsLoading,
metricUnits,
metricNames,
handleOpenMetricDetails,
yAxisUnit,
setYAxisUnit,
showYAxisUnitSelector,
}: TimeSeriesProps): JSX.Element {
const { stagedQuery, currentQuery } = useQueryBuilder();
@@ -67,14 +56,13 @@ function TimeSeries({
showOneChartPerQuery
? splitQueryIntoOneChartPerQuery(
stagedQuery || initialQueriesMap[DataSource.METRICS],
metricNames,
metricUnits,
)
: [stagedQuery || initialQueriesMap[DataSource.METRICS]],
// eslint-disable-next-line react-hooks/exhaustive-deps
[showOneChartPerQuery, stagedQuery, JSON.stringify(metricUnits)],
[showOneChartPerQuery, stagedQuery],
);
const [yAxisUnit, setYAxisUnit] = useState<string>('');
const queries = useQueries(
queryPayloads.map((payload, index) => ({
queryKey: [
@@ -138,148 +126,32 @@ function TimeSeries({
setYAxisUnit(value);
};
// TODO: Enable once we have resolved all related metrics v2 api issues
// Show the save unit button if
// 1. There is only one metric
// 2. The metric has no saved unit
// 3. The user has selected a unit
// const showSaveUnitButton = useMemo(
// () =>
// metricUnits.length === 1 &&
// Boolean(metrics?.[0]) &&
// !metricUnits[0] &&
// yAxisUnit,
// [metricUnits, metrics, yAxisUnit],
// );
// const {
// mutate: updateMetricMetadata,
// isLoading: isUpdatingMetricMetadata,
// } = useUpdateMetricMetadata();
// const handleSaveUnit = (): void => {
// updateMetricMetadata(
// {
// metricName: metricNames[0],
// payload: {
// unit: yAxisUnit,
// description: metrics[0]?.description ?? '',
// metricType: metrics[0]?.type as MetricType,
// temporality: metrics[0]?.temporality,
// },
// },
// {
// onSuccess: () => {
// notifications.success({
// message: 'Unit saved successfully',
// });
// queryClient.invalidateQueries([
// REACT_QUERY_KEY.GET_METRIC_DETAILS,
// metricNames[0],
// ]);
// },
// onError: () => {
// notifications.error({
// message: 'Failed to save unit',
// });
// },
// },
// );
// };
return (
<>
<div className="y-axis-unit-selector-container">
{showYAxisUnitSelector && (
<>
<YAxisUnitSelector
onChange={onUnitChangeHandler}
value={yAxisUnit}
source={YAxisSource.EXPLORER}
data-testid="y-axis-unit-selector"
/>
{/* TODO: Enable once we have resolved all related metrics v2 api issues */}
{/* {showSaveUnitButton && (
<div className="save-unit-container">
<Typography.Text>
Save the selected unit for this metric?
</Typography.Text>
<Button
type="primary"
size="small"
disabled={isUpdatingMetricMetadata}
onClick={handleSaveUnit}
>
<Typography.Paragraph>Yes</Typography.Paragraph>
</Button>
</div>
)} */}
</>
)}
</div>
<BuilderUnitsFilter onChange={onUnitChangeHandler} yAxisUnit={yAxisUnit} />
<div
className={classNames({
'time-series-container': changeLayoutForOneChartPerQuery,
})}
>
{responseData.map((datapoint, index) => {
const isQueryDataItem = index < metricNames.length;
const metricName = isQueryDataItem ? metricNames[index] : undefined;
const metricUnit = isQueryDataItem ? metricUnits[index] : undefined;
// Show the no unit warning if -
// 1. The metric query is not loading
// 2. The metric units are not loading
// 3. There are more than one metric
// 4. The current metric unit is empty
// 5. Is a queryData item
const isMetricUnitEmpty =
isQueryDataItem &&
!queries[index].isLoading &&
!isMetricUnitsLoading &&
metricUnits.length > 1 &&
!metricUnit &&
metricName;
const currentYAxisUnit = yAxisUnit || metricUnit;
return (
<div
className="time-series-view"
// eslint-disable-next-line react/no-array-index-key
key={index}
>
{isMetricUnitEmpty && metricName && (
<Tooltip
className="no-unit-warning"
title={
<Typography.Text>
This metric does not have a unit. Please set one for it in the{' '}
<Typography.Link
onClick={(): void => handleOpenMetricDetails(metricName)}
>
metric details
</Typography.Link>{' '}
page.
</Typography.Text>
}
>
<AlertTriangle size={16} color={Color.BG_AMBER_400} />
</Tooltip>
)}
<TimeSeriesView
isFilterApplied={false}
isError={queries[index].isError}
isLoading={queries[index].isLoading || isMetricUnitsLoading}
data={datapoint}
yAxisUnit={currentYAxisUnit}
dataSource={DataSource.METRICS}
error={queries[index].error as APIError}
setWarning={setWarning}
/>
</div>
);
})}
{responseData.map((datapoint, index) => (
<div
className="time-series-view"
// eslint-disable-next-line react/no-array-index-key
key={index}
>
<TimeSeriesView
isFilterApplied={false}
isError={queries[index].isError}
isLoading={queries[index].isLoading}
data={datapoint}
yAxisUnit={yAxisUnit}
dataSource={DataSource.METRICS}
error={queries[index].error as APIError}
setWarning={setWarning}
/>
</div>
))}
</div>
</>
);

View File

@@ -1,6 +1,4 @@
import { render, screen } from '@testing-library/react';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import * as useOptionsMenuHooks from 'container/OptionsMenu';
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
@@ -14,18 +12,13 @@ import { MemoryRouter } from 'react-router-dom';
import { useSearchParams } from 'react-router-dom-v5-compat';
import store from 'store';
import { LicenseEvent } from 'types/api/licensesV3/getActive';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import { DataSource } from 'types/common/queryBuilder';
import Explorer from '../Explorer';
import * as useGetMetricsHooks from '../utils';
const mockSetSearchParams = jest.fn();
const queryClient = new QueryClient();
const mockUpdateAllQueriesOperators = jest
.fn()
.mockReturnValue(initialQueriesMap[DataSource.METRICS]);
const mockUpdateAllQueriesOperators = jest.fn();
const mockUseQueryBuilderData = {
handleRunQuery: jest.fn(),
stagedQuery: initialQueriesMap[DataSource.METRICS],
@@ -133,30 +126,6 @@ jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
} as any);
const Y_AXIS_UNIT_SELECTOR_TEST_ID = 'y-axis-unit-selector';
const mockMetric: MetricMetadata = {
type: MetricType.SUM,
description: 'metric1 description',
unit: 'metric1 unit',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
};
function renderExplorer(): void {
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
}
describe('Explorer', () => {
beforeEach(() => {
jest.clearAllMocks();
@@ -173,7 +142,17 @@ describe('Explorer', () => {
mockSetSearchParams,
]);
renderExplorer();
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(mockUpdateAllQueriesOperators).toHaveBeenCalledWith(
initialQueriesMap[DataSource.METRICS],
@@ -187,13 +166,18 @@ describe('Explorer', () => {
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).toBeChecked();
@@ -204,132 +188,20 @@ describe('Explorer', () => {
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).not.toBeChecked();
});
it('should not render y axis unit selector for single metric which has a unit', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
});
it('should not render y axis unit selector for mutliple metrics with same unit', () => {
(useSearchParams as jest.Mock).mockReturnValueOnce([
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
});
it('should hide y axis unit selector for multiple metrics with different units', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
// One chart per query toggle should be disabled
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).toBeDisabled();
});
it('should render empty y axis unit selector for a single metric with no unit', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [
{
type: MetricType.SUM,
description: 'metric1 description',
unit: '',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
},
],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).toBeInTheDocument();
expect(yAxisUnitSelector).toHaveTextContent('Please select a unit');
});
it('one chart per query should be off and disabled when there is only one query', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric],
});
renderExplorer();
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).not.toBeChecked();
expect(oneChartPerQueryToggle).toBeDisabled();
});
it('one chart per query should enabled by default when there are multiple metrics with the same unit', () => {
const mockQueryData = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric1',
},
};
const mockStagedQueryWithMultipleQueries = {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [mockQueryData, mockQueryData],
},
};
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
...mockUseQueryBuilderData,
stagedQuery: mockStagedQueryWithMultipleQueries,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).toBeEnabled();
});
});

View File

@@ -1,180 +0,0 @@
import { render, RenderResult, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { UpdateMetricMetadataResponse } from 'api/metricsExplorer/updateMetricMetadata';
import * as useUpdateMetricMetadataHooks from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import { UseUpdateMetricMetadataProps } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import { UseMutationResult } from 'react-query';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import TimeSeries from '../TimeSeries';
import { TimeSeriesProps } from '../types';
type MockUpdateMetricMetadata = UseMutationResult<
SuccessResponse<UpdateMetricMetadataResponse> | ErrorResponse,
Error,
UseUpdateMetricMetadataProps
>;
const mockUpdateMetricMetadata = jest.fn();
jest
.spyOn(useUpdateMetricMetadataHooks, 'useUpdateMetricMetadata')
.mockReturnValue(({
mutate: mockUpdateMetricMetadata,
isLoading: false,
} as Partial<MockUpdateMetricMetadata>) as MockUpdateMetricMetadata);
jest.mock('container/TimeSeriesView/TimeSeriesView', () => ({
__esModule: true,
default: jest.fn().mockReturnValue(
<div role="img" aria-label="warning">
TimeSeriesView
</div>,
),
}));
jest.mock('react-query', () => ({
...jest.requireActual('react-query'),
useQueryClient: jest.fn().mockReturnValue({
invalidateQueries: jest.fn(),
}),
useQueries: jest.fn().mockImplementation((queries: any[]) =>
queries.map(() => ({
data: undefined,
isLoading: false,
isError: false,
error: undefined,
})),
),
}));
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: jest.fn().mockReturnValue({
globalTime: {
selectedTime: '5min',
maxTime: 1713738000000,
minTime: 1713734400000,
},
}),
}));
const mockMetric: MetricMetadata = {
type: MetricType.SUM,
description: 'metric1 description',
unit: 'metric1 unit',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
};
const mockSetWarning = jest.fn();
const mockSetIsMetricDetailsOpen = jest.fn();
const mockSetYAxisUnit = jest.fn();
function renderTimeSeries(
overrides: Partial<TimeSeriesProps> = {},
): RenderResult {
return render(
<TimeSeries
showOneChartPerQuery={false}
setWarning={mockSetWarning}
areAllMetricUnitsSame={false}
isMetricUnitsLoading={false}
metricUnits={[]}
metricNames={[]}
metrics={[]}
isMetricUnitsError={false}
handleOpenMetricDetails={mockSetIsMetricDetailsOpen}
yAxisUnit="count"
setYAxisUnit={mockSetYAxisUnit}
showYAxisUnitSelector={false}
// eslint-disable-next-line react/jsx-props-no-spreading
{...overrides}
/>,
);
}
describe('TimeSeries', () => {
it('should render a warning icon when a metric has no unit among multiple metrics', () => {
const user = userEvent.setup();
const { container } = renderTimeSeries({
metricUnits: ['', 'count'],
metricNames: ['metric1', 'metric2'],
metrics: [undefined, undefined],
});
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
user.hover(alertIcon);
waitFor(() =>
expect(
screen.findByText('This metric does not have a unit'),
).toBeInTheDocument(),
);
});
it('clicking on warning icon tooltip should open metric details modal', async () => {
const user = userEvent.setup();
const { container } = renderTimeSeries({
metricUnits: ['', 'count'],
metricNames: ['metric1', 'metric2'],
metrics: [mockMetric, mockMetric],
yAxisUnit: 'seconds',
});
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
user.hover(alertIcon);
const metricDetailsLink = await screen.findByText('metric details');
user.click(metricDetailsLink);
waitFor(() =>
expect(mockSetIsMetricDetailsOpen).toHaveBeenCalledWith('metric1'),
);
});
// TODO: Unskip this test once the save unit button is implemented
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
it.skip('shows Save unit button when metric had no unit but one is selected', () => {
const { findByText, getByRole } = renderTimeSeries({
metricUnits: [undefined],
metricNames: ['metric1'],
metrics: [mockMetric],
yAxisUnit: 'seconds',
});
expect(
findByText('Save the selected unit for this metric?'),
).toBeInTheDocument();
const yesButton = getByRole('button', { name: 'Yes' });
expect(yesButton).toBeInTheDocument();
expect(yesButton).toBeEnabled();
});
// TODO: Unskip this test once the save unit button is implemented
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
it.skip('clicking on save unit button shoould upated metric metadata', () => {
const user = userEvent.setup();
const { getByRole } = renderTimeSeries({
metricUnits: [''],
metricNames: ['metric1'],
metrics: [mockMetric],
yAxisUnit: 'seconds',
});
const yesButton = getByRole('button', { name: /Yes/i });
user.click(yesButton);
expect(mockUpdateMetricMetadata).toHaveBeenCalledWith(
{
metricName: 'metric1',
payload: expect.objectContaining({ unit: 'seconds' }),
},
expect.objectContaining({
onSuccess: expect.any(Function),
onError: expect.any(Function),
}),
);
});
});

View File

@@ -1,161 +0,0 @@
import { renderHook } from '@testing-library/react';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { initialQueriesMap } from 'constants/queryBuilder';
import * as useGetMultipleMetricsHook from 'hooks/metricsExplorer/useGetMultipleMetrics';
import { UseQueryResult } from 'react-query';
import { SuccessResponseV2 } from 'types/api';
import {
MetricMetadata,
MetricMetadataResponse,
} from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
IBuilderFormula,
IBuilderQuery,
Query,
} from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import {
getMetricUnits,
splitQueryIntoOneChartPerQuery,
useGetMetrics,
} from '../utils';
const MOCK_QUERY_DATA_1: IBuilderQuery = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric1',
},
};
const MOCK_QUERY_DATA_2: IBuilderQuery = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric2',
},
};
const MOCK_FORMULA_DATA: IBuilderFormula = {
expression: '1 + 1',
disabled: false,
queryName: 'Mock Formula',
legend: 'Mock Legend',
};
const MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA: Query = {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [MOCK_QUERY_DATA_1, MOCK_QUERY_DATA_2],
queryFormulas: [MOCK_FORMULA_DATA, MOCK_FORMULA_DATA],
},
};
describe('splitQueryIntoOneChartPerQuery', () => {
it('should split a query with multiple queryData to multiple distinct queries, each with a single queryData', () => {
const result = splitQueryIntoOneChartPerQuery(
MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA,
['metric1', 'metric2'],
[undefined, 'unit2'],
);
expect(result).toHaveLength(4);
// Verify query 1 has the correct data
expect(result[0].builder.queryData).toHaveLength(1);
expect(result[0].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_1);
expect(result[0].builder.queryFormulas).toHaveLength(0);
expect(result[0].unit).toBeUndefined();
// Verify query 2 has the correct data
expect(result[1].builder.queryData).toHaveLength(1);
expect(result[1].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_2);
expect(result[1].builder.queryFormulas).toHaveLength(0);
expect(result[1].unit).toBe('unit2');
// Verify query 3 has the correct data
expect(result[2].builder.queryFormulas).toHaveLength(1);
expect(result[2].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
expect(result[2].builder.queryData).toHaveLength(2); // 2 disabled queries
expect(result[2].builder.queryData[0].disabled).toBe(true);
expect(result[2].builder.queryData[1].disabled).toBe(true);
expect(result[2].unit).toBeUndefined();
// Verify query 4 has the correct data
expect(result[3].builder.queryFormulas).toHaveLength(1);
expect(result[3].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
expect(result[3].builder.queryData).toHaveLength(2); // 2 disabled queries
expect(result[3].builder.queryData[0].disabled).toBe(true);
expect(result[3].builder.queryData[1].disabled).toBe(true);
expect(result[3].unit).toBeUndefined();
});
});
const MOCK_METRIC_METADATA: MetricMetadata = {
description: 'Metric 1 description',
unit: 'unit1',
type: MetricType.GAUGE,
temporality: Temporality.DELTA,
isMonotonic: true,
};
describe('useGetMetrics', () => {
beforeEach(() => {
jest
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
.mockReturnValue([
({
isLoading: false,
isError: false,
data: {
httpStatusCode: 200,
data: {
status: 'success',
data: MOCK_METRIC_METADATA,
},
},
} as Partial<
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
]);
});
it('should return the correct metrics data', () => {
const { result } = renderHook(() => useGetMetrics(['metric1']));
expect(result.current.metrics).toHaveLength(1);
expect(result.current.metrics[0]).toBeDefined();
expect(result.current.metrics[0]).toEqual(MOCK_METRIC_METADATA);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return array of undefined values of correct length when metrics data is not yet loaded', () => {
jest
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
.mockReturnValue([
({
isLoading: true,
isError: false,
} as Partial<
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
]);
const { result } = renderHook(() => useGetMetrics(['metric1']));
expect(result.current.metrics).toHaveLength(1);
expect(result.current.metrics[0]).toBeUndefined();
});
});
describe('getMetricUnits', () => {
it('should return the same unit for units that are not known to the universal unit mapper', () => {
const result = getMetricUnits([MOCK_METRIC_METADATA]);
expect(result).toHaveLength(1);
expect(result[0]).toEqual(MOCK_METRIC_METADATA.unit);
});
it('should return universal unit for units that are known to the universal unit mapper', () => {
const result = getMetricUnits([{ ...MOCK_METRIC_METADATA, unit: 'seconds' }]);
expect(result).toHaveLength(1);
expect(result[0]).toBe('s');
});
});

View File

@@ -3,7 +3,6 @@ import { Dispatch, SetStateAction } from 'react';
import { UseQueryResult } from 'react-query';
import { SuccessResponse, Warning } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
export enum ExplorerTabs {
TIME_SERIES = 'time-series',
@@ -13,16 +12,6 @@ export enum ExplorerTabs {
export interface TimeSeriesProps {
showOneChartPerQuery: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
areAllMetricUnitsSame: boolean;
isMetricUnitsLoading: boolean;
isMetricUnitsError: boolean;
metricUnits: (string | undefined)[];
metricNames: string[];
metrics: (MetricMetadata | undefined)[];
handleOpenMetricDetails: (metricName: string) => void;
yAxisUnit: string | undefined;
setYAxisUnit: (unit: string) => void;
showYAxisUnitSelector: boolean;
}
export interface RelatedMetricsProps {

View File

@@ -1,40 +1,20 @@
import { mapMetricUnitToUniversalUnit } from 'components/YAxisUnitSelector/utils';
import { useGetMultipleMetrics } from 'hooks/metricsExplorer/useGetMultipleMetrics';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { v4 as uuid } from 'uuid';
/**
* Split a query with multiple queryData to multiple distinct queries, each with a single queryData.
* @param query - The query to split
* @param units - The units of the metrics, can be undefined if the metric has no unit
* @returns The split queries
*/
export const splitQueryIntoOneChartPerQuery = (
query: Query,
metricNames: string[],
units: (string | undefined)[],
): Query[] => {
export const splitQueryIntoOneChartPerQuery = (query: Query): Query[] => {
const queries: Query[] = [];
query.builder.queryData.forEach((currentQuery) => {
if (currentQuery.aggregateAttribute?.key) {
const metricIndex = metricNames.indexOf(
currentQuery.aggregateAttribute?.key,
);
const unit = metricIndex >= 0 ? units[metricIndex] : undefined;
const newQuery = {
...query,
id: uuid(),
builder: {
...query.builder,
queryData: [currentQuery],
queryFormulas: [],
},
unit,
};
queries.push(newQuery);
}
const newQuery = {
...query,
id: uuid(),
builder: {
...query.builder,
queryData: [currentQuery],
queryFormulas: [],
},
};
queries.push(newQuery);
});
query.builder.queryFormulas.forEach((currentFormula) => {
@@ -55,43 +35,3 @@ export const splitQueryIntoOneChartPerQuery = (
return queries;
};
/**
* Hook to get data for multiple metrics with a synchronous loading and error state
* @param metricNames - The names of the metrics to get
* @param isEnabled - Whether the hook is enabled
* @returns The loading state, the metrics data, and the error state
*/
export function useGetMetrics(
metricNames: string[],
isEnabled = true,
): {
isLoading: boolean;
isError: boolean;
metrics: (MetricMetadata | undefined)[];
} {
const metricsData = useGetMultipleMetrics(metricNames, {
enabled: metricNames.length > 0 && isEnabled,
});
return {
isLoading: metricsData.some((metric) => metric.isLoading),
metrics: metricsData
.map((metric) => metric.data?.data)
.map((data) => data?.data),
isError: metricsData.some((metric) => metric.isError),
};
}
/**
* To get the units of the metrics in the universal unit standard.
* If the unit is not known to the universal unit mapper, it will return the unit as is.
* @param metrics - The metrics to get the units for
* @returns The units of the metrics, can be undefined if the metric has no unit
*/
export function getMetricUnits(
metrics: (MetricMetadata | undefined)[],
): (string | undefined)[] {
return metrics
.map((metric) => metric?.unit)
.map((unit) => mapMetricUnitToUniversalUnit(unit) || undefined);
}

View File

@@ -131,8 +131,8 @@ function MetricDetails({
>
Open in Explorer
</Button>
{/* Show the inspect button if the metric type is GAUGE */}
{showInspectFeature && openInspectModal && (
{/* Show the based on the feature flag. Will remove before releasing the feature */}
{showInspectFeature && (
<Button
className="inspect-metrics-button"
aria-label="Inspect Metric"

View File

@@ -11,7 +11,7 @@ export interface MetricDetailsProps {
isOpen: boolean;
metricName: string | null;
isModalTimeSelection: boolean;
openInspectModal?: (metricName: string) => void;
openInspectModal: (metricName: string) => void;
}
export interface DashboardsAndAlertsPopoverProps {

View File

@@ -370,6 +370,10 @@ function NewWidget({
// this has been moved here from the left container
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
const updatedQuery = cloneDeep(stagedQuery || initialQueriesMap.metrics);
if (updatedQuery?.builder?.queryData?.[0]) {
updatedQuery.builder.queryData[0].pageSize = 10;
}
if (selectedWidget) {
if (selectedGraph === PANEL_TYPES.LIST) {
return {
@@ -415,12 +419,16 @@ function NewWidget({
useEffect(() => {
if (stagedQuery) {
setIsLoadingPanelData(false);
const updatedStagedQuery = cloneDeep(stagedQuery);
if (updatedStagedQuery?.builder?.queryData?.[0]) {
updatedStagedQuery.builder.queryData[0].pageSize = 10;
}
setRequestData((prev) => ({
...prev,
selectedTime: selectedTime.enum || prev.selectedTime,
globalSelectedInterval: customGlobalSelectedInterval,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery,
query: updatedStagedQuery,
fillGaps: selectedWidget.fillSpans || false,
isLogScale: selectedWidget.isLogScale || false,
formatForWeb:

View File

@@ -1,10 +1,17 @@
import { Skeleton, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { AxiosError } from 'axios';
import { useGetGlobalConfig } from 'hooks/globalConfig/useGetGlobalConfig';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
import { useNotifications } from 'hooks/useNotifications';
import { ArrowUpRight, Copy, Info, Key, TriangleAlert } from 'lucide-react';
import {
ArrowUpRight,
Copy,
Info,
Key,
MapPin,
TriangleAlert,
} from 'lucide-react';
import { useEffect, useState } from 'react';
import { useCopyToClipboard } from 'react-use';
import { IngestionKeyProps } from 'types/api/ingestionKeys/types';
@@ -52,11 +59,11 @@ export default function OnboardingIngestionDetails(): JSX.Element {
});
const {
data: globalConfig,
isLoading: isLoadingGlobalConfig,
isFetching: isFetchingGlobalConfig,
isError: isErrorGlobalConfig,
} = useGetGlobalConfig(true);
data: deploymentsData,
isLoading: isLoadingDeploymentsData,
isFetching: isFetchingDeploymentsData,
isError: isDeploymentsDataError,
} = useGetDeploymentsData(true);
const handleCopyKey = (text: string): void => {
handleCopyToClipboard(text);
@@ -107,8 +114,8 @@ export default function OnboardingIngestionDetails(): JSX.Element {
<div className="ingestion-key-details-section-key">
{isIngestionKeysLoading ||
isLoadingGlobalConfig ||
isFetchingGlobalConfig ? (
isLoadingDeploymentsData ||
isFetchingDeploymentsData ? (
<div className="skeleton-container">
<Skeleton.Input active className="skeleton-input" />
<Skeleton.Input active className="skeleton-input" />
@@ -117,16 +124,16 @@ export default function OnboardingIngestionDetails(): JSX.Element {
</div>
) : (
<div className="ingestion-key-region-details-section">
{!isErrorGlobalConfig &&
!isLoadingGlobalConfig &&
!isFetchingGlobalConfig && (
{!isDeploymentsDataError &&
!isLoadingDeploymentsData &&
!isFetchingDeploymentsData && (
<div className="ingestion-region-container">
<Typography.Text className="ingestion-region-label">
Ingestion URL
<MapPin size={14} /> Region
</Typography.Text>
<Typography.Text className="ingestion-region-value-copy">
{globalConfig?.data?.data?.ingestion_url}
{deploymentsData?.data?.data?.cluster.region.name}
<Copy
size={14}
@@ -137,7 +144,9 @@ export default function OnboardingIngestionDetails(): JSX.Element {
{},
);
handleCopyKey(`${globalConfig?.data?.data?.ingestion_url}`);
handleCopyKey(
deploymentsData?.data?.data?.cluster.region.name || '',
);
}}
/>
</Typography.Text>

View File

@@ -206,10 +206,6 @@
.ant-select-selector {
border-color: var(--bg-vanilla-300);
background: var(--bg-vanilla-300);
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
.ant-input-number {

View File

@@ -1,5 +1,7 @@
import { Select } from 'antd';
import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { useEffect, useState } from 'react';
import { MetricAggregateOperator } from 'types/common/queryBuilder';
interface SpaceAggregationOptionsProps {
panelType: PANEL_TYPES | null;
@@ -20,13 +22,39 @@ export default function SpaceAggregationOptions({
operators,
qbVersion,
}: SpaceAggregationOptionsProps): JSX.Element {
const placeHolderText =
panelType === PANEL_TYPES.VALUE || qbVersion === 'v3' ? 'Sum' : 'Sum By';
const [defaultValue, setDefaultValue] = useState(
selectedValue || placeHolderText,
);
useEffect(() => {
if (!selectedValue) {
if (
aggregatorAttributeType === ATTRIBUTE_TYPES.HISTOGRAM ||
aggregatorAttributeType === ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
setDefaultValue(MetricAggregateOperator.P90);
onSelect(MetricAggregateOperator.P90);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.SUM) {
setDefaultValue(MetricAggregateOperator.SUM);
onSelect(MetricAggregateOperator.SUM);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.GAUGE) {
setDefaultValue(MetricAggregateOperator.AVG);
onSelect(MetricAggregateOperator.AVG);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [aggregatorAttributeType]);
return (
<div
className="spaceAggregationOptionsContainer"
key={aggregatorAttributeType}
>
<Select
defaultValue={selectedValue}
defaultValue={defaultValue}
style={{ minWidth: '5.625rem' }}
disabled={disabled}
onChange={onSelect}

View File

@@ -1,16 +0,0 @@
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.option-renderer-tooltip {
pointer-events: none;
}

View File

@@ -1,4 +1,4 @@
import './OptionRenderer.styles.scss';
import './QueryBuilderSearch.styles.scss';
import { Tooltip } from 'antd';
@@ -13,11 +13,7 @@ function OptionRenderer({
return (
<span className="option">
{type ? (
<Tooltip
title={`${value}`}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<Tooltip title={`${value}`} placement="topLeft">
<div className="selectOptionContainer">
<div className="option-value">{value}</div>
<div className="option-meta-data-container">
@@ -33,11 +29,7 @@ function OptionRenderer({
</div>
</Tooltip>
) : (
<Tooltip
title={label}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<Tooltip title={label} placement="topLeft">
<span>{label}</span>
</Tooltip>
)}

View File

@@ -5,6 +5,19 @@
gap: 12px;
}
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.logs-popup {
&.hide-scroll {
.rc-virtual-list-holder {

View File

@@ -1,88 +0,0 @@
import { render, screen } from '@testing-library/react';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { ReduceToFilter } from './ReduceToFilter';
const mockOnChange = jest.fn();
function baseQuery(overrides: Partial<IBuilderQuery> = {}): IBuilderQuery {
return {
dataSource: 'traces',
aggregations: [],
groupBy: [],
orderBy: [],
legend: '',
limit: null,
having: { expression: '' },
...overrides,
} as IBuilderQuery;
}
describe('ReduceToFilter', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('initializes with default avg when no reduceTo is set', () => {
render(<ReduceToFilter query={baseQuery()} onChange={mockOnChange} />);
expect(screen.getByTestId('reduce-to')).toBeInTheDocument();
expect(
screen.getByText('Average of values in timeframe'),
).toBeInTheDocument();
});
it('initializes from query.aggregations[0].reduceTo', () => {
render(
<ReduceToFilter
query={baseQuery({
aggregations: [{ reduceTo: 'sum' } as any],
aggregateAttribute: { key: 'test', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Sum of values in timeframe')).toBeInTheDocument();
});
it('initializes from query.reduceTo when aggregations[0].reduceTo is not set', () => {
render(
<ReduceToFilter
query={baseQuery({
reduceTo: 'max',
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Max of values in timeframe')).toBeInTheDocument();
});
it('updates to sum when aggregateAttribute.type is SUM', async () => {
const { rerender } = render(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
rerender(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test2', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
const reduceToFilterText = (await screen.findByText(
'Sum of values in timeframe',
)) as HTMLElement;
expect(reduceToFilterText).toBeInTheDocument();
});
});

View File

@@ -1,7 +1,6 @@
import { Select } from 'antd';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { REDUCE_TO_VALUES } from 'constants/queryBuilder';
import { memo, useEffect, useRef, useState } from 'react';
import { memo } from 'react';
import { MetricAggregation } from 'types/api/v5/queryRange';
// ** Types
import { ReduceOperators } from 'types/common/queryBuilder';
@@ -13,46 +12,19 @@ export const ReduceToFilter = memo(function ReduceToFilter({
query,
onChange,
}: ReduceToFilterProps): JSX.Element {
const isMounted = useRef<boolean>(false);
const [currentValue, setCurrentValue] = useState<
SelectOption<ReduceOperators, string>
>(REDUCE_TO_VALUES[2]); // default to avg
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
const currentValue =
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[0];
const handleChange = (
newValue: SelectOption<ReduceOperators, string>,
): void => {
setCurrentValue(newValue);
onChange(newValue.value);
};
useEffect(
() => {
if (!isMounted.current) {
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
setCurrentValue(
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[2],
);
isMounted.current = true;
return;
}
const aggregationAttributeType = query.aggregateAttribute?.type as
| MetricType
| undefined;
if (aggregationAttributeType === MetricType.SUM) {
handleChange(REDUCE_TO_VALUES[1]);
} else {
handleChange(REDUCE_TO_VALUES[2]);
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[query.aggregateAttribute?.key],
);
return (
<Select
placeholder="Reduce to"

View File

@@ -1,13 +0,0 @@
import { getGlobalConfig } from 'api/globalConfig/globalConfig';
import { AxiosError, AxiosResponse } from 'axios';
import { useQuery, UseQueryResult } from 'react-query';
import { GlobalConfigDataProps } from 'types/api/globalConfig/types';
export const useGetGlobalConfig = (
isEnabled: boolean,
): UseQueryResult<AxiosResponse<GlobalConfigDataProps>, AxiosError> =>
useQuery<AxiosResponse<GlobalConfigDataProps>, AxiosError>({
queryKey: ['getGlobalConfig'],
queryFn: () => getGlobalConfig(),
enabled: isEnabled,
});

View File

@@ -1,32 +0,0 @@
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useQueries, UseQueryOptions, UseQueryResult } from 'react-query';
import { SuccessResponseV2 } from 'types/api';
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
type QueryResult = UseQueryResult<
SuccessResponseV2<MetricMetadataResponse>,
Error
>;
type UseGetMultipleMetrics = (
metricNames: string[],
options?: UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>,
headers?: Record<string, string>,
) => QueryResult[];
export const useGetMultipleMetrics: UseGetMultipleMetrics = (
metricNames,
options,
headers,
) =>
useQueries(
metricNames.map(
(metricName) =>
({
queryKey: [REACT_QUERY_KEY.GET_METRIC_METADATA, metricName],
queryFn: ({ signal }) => getMetricMetadata(metricName, signal, headers),
...options,
} as UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>),
),
);

View File

@@ -5,7 +5,7 @@ import updateMetricMetadata, {
import { useMutation, UseMutationResult } from 'react-query';
import { ErrorResponse, SuccessResponse } from 'types/api';
export interface UseUpdateMetricMetadataProps {
interface UseUpdateMetricMetadataProps {
metricName: string;
payload: UpdateMetricMetadataProps;
}

View File

@@ -188,7 +188,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
spaceAggregation: '',
},
],
}),
@@ -239,7 +239,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
spaceAggregation: '',
},
],
}),
@@ -315,7 +315,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.AVG,
metricName: 'new_gauge',
temporality: '',
spaceAggregation: MetricAggregateOperator.AVG,
spaceAggregation: '',
},
],
}),

View File

@@ -317,7 +317,7 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
spaceAggregation: '',
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
@@ -326,20 +326,7 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
];
} else if (
newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.HISTOGRAM ||
newQuery.aggregateAttribute?.type ===
ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
newQuery.aggregations = [
{
timeAggregation: '',
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.P90,
spaceAggregation: '',
},
];
} else {

View File

@@ -1,238 +0,0 @@
import { renderHook } from '@testing-library/react';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { useGetMetrics } from 'container/MetricsExplorer/Explorer/utils';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
import useGetYAxisUnit from './useGetYAxisUnit';
jest.mock('./queryBuilder/useQueryBuilder');
jest.mock('container/MetricsExplorer/Explorer/utils', () => ({
...jest.requireActual('container/MetricsExplorer/Explorer/utils'),
useGetMetrics: jest.fn(),
}));
const mockUseQueryBuilder = useQueryBuilder as jest.MockedFunction<
typeof useQueryBuilder
>;
const mockUseGetMetrics = useGetMetrics as jest.MockedFunction<
typeof useGetMetrics
>;
const MOCK_METRIC_1 = {
unit: UniversalYAxisUnit.BYTES,
} as MetricMetadata;
const MOCK_METRIC_2 = {
unit: UniversalYAxisUnit.SECONDS,
} as MetricMetadata;
const MOCK_METRIC_3 = {
unit: '',
} as MetricMetadata;
function createMockCurrentQuery(
queryType: EQueryType,
queryData: Query['builder']['queryData'] = [],
): Query {
return {
queryType,
promql: [],
builder: {
queryData,
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: [],
id: 'test-id',
};
}
describe('useGetYAxisUnit', () => {
beforeEach(() => {
jest.clearAllMocks();
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [],
});
mockUseQueryBuilder.mockReturnValue(({
currentQuery: undefined,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
});
it('should return undefined yAxisUnit and not call useGetMetrics when currentQuery is null', async () => {
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when queryType is PROM', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.PROM);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when queryType is CLICKHOUSE', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.CLICKHOUSE);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when dataSource is TRACES', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.TRACES,
aggregateAttribute: { key: 'trace_metric' },
} as Query['builder']['queryData'][0],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when dataSource is LOGS', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.LOGS,
aggregateAttribute: { key: 'log_metric' },
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should extract all metric names from queryData when no selected query name is provided', () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric1' },
queryName: 'query1',
} as Query['builder']['queryData'][number],
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric2' },
queryName: 'query2',
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
stagedQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderHook(() => useGetYAxisUnit());
expect(mockUseGetMetrics).toHaveBeenCalledWith(['metric1', 'metric2'], true);
});
it('should extract metric name for the selected query only when one is provided', () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric1' },
queryName: 'query1',
} as Query['builder']['queryData'][number],
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric2' },
queryName: 'query2',
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
stagedQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderHook(() => useGetYAxisUnit('query2'));
expect(mockUseGetMetrics).toHaveBeenCalledWith(['metric2'], true);
});
it('should return the unit when there is a single metric with a non-empty unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBe(UniversalYAxisUnit.BYTES);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return undefined when there is a single metric with no unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_3],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return the unit when all metrics have the same non-empty unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1, MOCK_METRIC_1],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBe(UniversalYAxisUnit.BYTES);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return undefined when metrics have different units', async () => {
mockUseGetMetrics.mockReturnValueOnce({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1, MOCK_METRIC_2],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
});

View File

@@ -1,108 +0,0 @@
import {
getMetricUnits,
useGetMetrics,
} from 'container/MetricsExplorer/Explorer/utils';
import { useEffect, useMemo, useState } from 'react';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
interface UseGetYAxisUnitResult {
yAxisUnit: string | undefined;
isLoading: boolean;
isError: boolean;
}
/**
* Hook to get the y-axis unit for a given metrics-based query.
* @param selectedQueryName - The name of the query to get the y-axis unit for.
* @param params.enabled - Active state of the hook.
* @returns `{ yAxisUnit, isLoading, isError }` The y-axis unit, loading state, and error state
*/
function useGetYAxisUnit(
selectedQueryName?: string,
params: {
enabled?: boolean;
} = {
enabled: true,
},
): UseGetYAxisUnitResult {
const { stagedQuery } = useQueryBuilder();
const [yAxisUnit, setYAxisUnit] = useState<string | undefined>();
const metricNames: string[] | null = useMemo(() => {
// If the query type is not QUERY_BUILDER, return null
if (stagedQuery?.queryType !== EQueryType.QUERY_BUILDER) {
return null;
}
// If the data source is not METRICS, return null
const dataSource = stagedQuery?.builder?.queryData?.[0]?.dataSource;
if (dataSource !== DataSource.METRICS) {
return null;
}
const currentMetricNames: string[] = [];
// If a selected query name is provided, return the metric name for that query only
if (selectedQueryName) {
stagedQuery?.builder?.queryData?.forEach((query) => {
if (
query.queryName === selectedQueryName &&
query.aggregateAttribute?.key
) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames.length ? currentMetricNames : null;
}
// Else, return all metric names
stagedQuery?.builder?.queryData?.forEach((query) => {
if (query.aggregateAttribute?.key) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames.length ? currentMetricNames : null;
}, [
selectedQueryName,
stagedQuery?.builder?.queryData,
stagedQuery?.queryType,
]);
const { metrics, isLoading, isError } = useGetMetrics(
metricNames ?? [],
!!metricNames && params?.enabled,
);
const units = useMemo(() => getMetricUnits(metrics), [metrics]);
const areAllMetricUnitsSame = useMemo(
() => units.every((unit) => unit === units[0]),
[units],
);
useEffect(() => {
// If there are no metrics, set the y-axis unit to undefined
if (units.length === 0) {
setYAxisUnit(undefined);
// If there is one metric and it has a non-empty unit, set the y-axis unit to it
} else if (units.length === 1 && units[0] !== '') {
setYAxisUnit(units[0]);
// If all metrics have the same non-empty unit, set the y-axis unit to it
} else if (areAllMetricUnitsSame) {
if (units[0] !== '') {
setYAxisUnit(units[0]);
} else {
setYAxisUnit(undefined);
}
// If there is more than one metric and they have different units, set the y-axis unit to undefined
} else if (units.length > 1 && !areAllMetricUnitsSame) {
setYAxisUnit(undefined);
// If there is one metric and it has an empty unit, set the y-axis unit to undefined
} else if (units.length === 1 && units[0] === '') {
setYAxisUnit(undefined);
}
}, [units, areAllMetricUnitsSame]);
return { yAxisUnit, isLoading, isError };
}
export default useGetYAxisUnit;

View File

@@ -15,7 +15,7 @@ function NoData(): JSX.Element {
<Typography.Text className="not-found-text-1">
Uh-oh! We cannot show the selected trace.
<span className="not-found-text-2">
This can happen in either of the two scenarios -
This can happen in either of the two scenraios -
</span>
</Typography.Text>
</section>

View File

@@ -1,9 +0,0 @@
export interface GlobalConfigData {
external_url: string;
ingestion_url: string;
}
export interface GlobalConfigDataProps {
status: string;
data: GlobalConfigData;
}

View File

@@ -1,15 +0,0 @@
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
export interface MetricMetadata {
description: string;
type: MetricType;
unit: string;
temporality: Temporality;
isMonotonic: boolean;
}
export interface MetricMetadataResponse {
status: string;
data: MetricMetadata;
}

View File

@@ -10,8 +10,8 @@ import (
func (provider *provider) addPromoteRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.EditAccess(provider.promoteHandler.HandlePromoteAndIndexPaths), handler.OpenAPIDef{
ID: "HandlePromoteAndIndexPaths",
Tags: []string{"logs"},
ID: "PromotePaths",
Tags: []string{"promoted_paths", "logs", "json_logs"},
Summary: "Promote and index paths",
Description: "This endpoints promotes and indexes paths",
Request: new([]*promotetypes.PromotePath),
@@ -25,8 +25,8 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
}
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.ViewAccess(provider.promoteHandler.ListPromotedAndIndexedPaths), handler.OpenAPIDef{
ID: "ListPromotedAndIndexedPaths",
Tags: []string{"logs"},
ID: "PromotePaths",
Tags: []string{"promoted_paths", "logs", "json_logs"},
Summary: "Promote and index paths",
Description: "This endpoints promotes and indexes paths",
Request: nil,

View File

@@ -100,8 +100,10 @@ func newProvider(
traceAggExprRewriter,
)
logsKeyEvolutionMetadata := telemetrylogs.NewKeyEvolutionMetadata(telemetryStore, cache, settings.Logger)
// Create log statement builder
logFieldMapper := telemetrylogs.NewFieldMapper()
logFieldMapper := telemetrylogs.NewFieldMapper(logsKeyEvolutionMetadata)
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
settings,

View File

@@ -51,6 +51,8 @@ func NewAggExprRewriter(
// and the args if the parametric aggregation function is used.
func (r *aggExprRewriter) Rewrite(
ctx context.Context,
startNs uint64,
endNs uint64,
expr string,
rateInterval uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
@@ -77,7 +79,12 @@ func (r *aggExprRewriter) Rewrite(
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
}
visitor := newExprVisitor(r.logger, keys,
visitor := newExprVisitor(
ctx,
startNs,
endNs,
r.logger,
keys,
r.fullTextColumn,
r.fieldMapper,
r.conditionBuilder,
@@ -98,6 +105,8 @@ func (r *aggExprRewriter) Rewrite(
// RewriteMulti rewrites a slice of expressions.
func (r *aggExprRewriter) RewriteMulti(
ctx context.Context,
startNs uint64,
endNs uint64,
exprs []string,
rateInterval uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
@@ -106,7 +115,7 @@ func (r *aggExprRewriter) RewriteMulti(
var errs []error
var chArgsList [][]any
for i, e := range exprs {
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
w, chArgs, err := r.Rewrite(ctx, startNs, endNs, e, rateInterval, keys)
if err != nil {
errs = append(errs, err)
out[i] = e
@@ -123,6 +132,9 @@ func (r *aggExprRewriter) RewriteMulti(
// exprVisitor walks FunctionExpr nodes and applies the mappers.
type exprVisitor struct {
ctx context.Context
startNs uint64
endNs uint64
chparser.DefaultASTVisitor
logger *slog.Logger
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
@@ -137,6 +149,9 @@ type exprVisitor struct {
}
func newExprVisitor(
ctx context.Context,
startNs uint64,
endNs uint64,
logger *slog.Logger,
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
@@ -146,6 +161,9 @@ func newExprVisitor(
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *exprVisitor {
return &exprVisitor{
ctx: ctx,
startNs: startNs,
endNs: endNs,
logger: logger,
fieldKeys: fieldKeys,
fullTextColumn: fullTextColumn,
@@ -190,7 +208,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
if aggFunc.FuncCombinator {
// Map the predicate (last argument)
origPred := args[len(args)-1].String()
whereClause, err := PrepareWhereClause(
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
@@ -199,7 +217,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
ConditionBuilder: v.conditionBuilder,
FullTextColumn: v.fullTextColumn,
JsonKeyToKey: v.jsonKeyToKey,
}, 0, 0,
}, v.startNs, v.endNs,
)
if err != nil {
return err
@@ -219,7 +237,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i := 0; i < len(args)-1; i++ {
origVal := args[i].String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
if err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
}
@@ -237,7 +255,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i, arg := range args {
orig := arg.String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
if err != nil {
return err
}

View File

@@ -19,6 +19,8 @@ import (
func CollisionHandledFinalExpr(
ctx context.Context,
startNs uint64,
endNs uint64,
field *telemetrytypes.TelemetryFieldKey,
fm qbtypes.FieldMapper,
cb qbtypes.ConditionBuilder,
@@ -45,7 +47,7 @@ func CollisionHandledFinalExpr(
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
sb := sqlbuilder.NewSelectBuilder()
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, startNs, endNs)
if err != nil {
return err
}
@@ -58,7 +60,7 @@ func CollisionHandledFinalExpr(
return nil
}
colName, err := fm.FieldFor(ctx, field)
colName, err := fm.FieldFor(ctx, startNs, endNs, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
@@ -93,7 +95,7 @@ func CollisionHandledFinalExpr(
if err != nil {
return "", nil, err
}
colName, _ = fm.FieldFor(ctx, key)
colName, _ = fm.FieldFor(ctx, startNs, endNs, key)
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
stmts = append(stmts, colName)
}

View File

@@ -48,8 +48,8 @@ func (b *defaultConditionBuilder) ConditionFor(
op qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
startNs uint64,
endNs uint64,
) (string, error) {
if key.FieldContext != telemetrytypes.FieldContextResource {
@@ -68,7 +68,7 @@ func (b *defaultConditionBuilder) ConditionFor(
keyIdxFilter := sb.Like(column.Name, keyIndexFilter(key))
valueForIndexFilter := valueForIndexFilter(op, key, value)
fieldName, err := b.fm.FieldFor(ctx, key)
fieldName, err := b.fm.FieldFor(ctx, startNs, endNs, key)
if err != nil {
return "", err
}

View File

@@ -47,6 +47,7 @@ func (m *defaultFieldMapper) ColumnFor(
func (m *defaultFieldMapper) FieldFor(
ctx context.Context,
tsStart, tsEnd uint64,
key *telemetrytypes.TelemetryFieldKey,
) (string, error) {
column, err := m.getColumn(ctx, key)
@@ -61,10 +62,11 @@ func (m *defaultFieldMapper) FieldFor(
func (m *defaultFieldMapper) ColumnExpressionFor(
ctx context.Context,
tsStart, tsEnd uint64,
key *telemetrytypes.TelemetryFieldKey,
_ map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, error) {
colName, err := m.FieldFor(ctx, key)
colName, err := m.FieldFor(ctx, tsStart, tsEnd, key)
if err != nil {
return "", err
}

View File

@@ -25,6 +25,7 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
func (c *conditionBuilder) conditionFor(
ctx context.Context,
startNs, endNs uint64,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
value any,
@@ -46,7 +47,7 @@ func (c *conditionBuilder) conditionFor(
return "", err
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
if err != nil {
return "", err
}
@@ -239,10 +240,10 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
startNs uint64,
endNs uint64,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
if err != nil {
return "", err
}
@@ -250,12 +251,12 @@ func (c *conditionBuilder) ConditionFor(
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
// with an exception for body json search
field, _ := c.fm.FieldFor(ctx, key)
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
if slices.Contains(maps.Keys(IntrinsicFields), field) && key.FieldContext != telemetrytypes.FieldContextBody {
return condition, nil
}
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
if err != nil {
return "", err
}

View File

@@ -6,6 +6,7 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -372,7 +373,8 @@ func TestConditionFor(t *testing.T) {
},
}
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
@@ -425,7 +427,8 @@ func TestConditionForMultipleKeys(t *testing.T) {
},
}
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
@@ -684,7 +687,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
}
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {

View File

@@ -4,11 +4,14 @@ import (
"context"
"fmt"
"strings"
"time"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
"golang.org/x/exp/maps"
@@ -55,10 +58,13 @@ var (
)
type fieldMapper struct {
evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore
}
func NewFieldMapper() qbtypes.FieldMapper {
return &fieldMapper{}
func NewFieldMapper(evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore) qbtypes.FieldMapper {
return &fieldMapper{
evolutionMetadataStore: evolutionMetadataStore,
}
}
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
@@ -100,7 +106,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
return nil, qbtypes.ErrColumnNotFound
}
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
column, err := m.getColumn(ctx, key)
if err != nil {
return "", err
@@ -112,17 +118,34 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
if key.FieldContext != telemetrytypes.FieldContextResource {
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
}
oldColumn := logsV2Columns["resources_string"]
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
// once clickHouse dependency is updated, we need to check if we can remove it.
baseColumn := logsV2Columns["resources_string"]
tsStartTime := time.Unix(0, int64(tsStart))
// Extract orgId from context
var orgID valuer.UUID
if claims, err := authtypes.ClaimsFromContext(ctx); err == nil {
orgID, err = valuer.NewUUID(claims.OrgID)
if err != nil {
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid orgId %s", claims.OrgID)
}
}
// get all evolution for the column
evolutions := m.evolutionMetadataStore.Get(ctx, orgID, baseColumn.Name)
// restricting now to just one entry where we know we changed from map to json
if len(evolutions) > 0 && evolutions[0].ReleaseTime.Before(tsStartTime) {
return fmt.Sprintf("%s.`%s`::String", evolutions[0].NewColumn, key.Name), nil
}
if key.Materialized {
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyName := telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
} else {
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
attrVal := fmt.Sprintf("%s['%s']", baseColumn.Name, key.Name)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, baseColumn.Name, key.Name, attrVal), nil
}
case schema.ColumnTypeEnumLowCardinality:
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
@@ -161,11 +184,12 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
func (m *fieldMapper) ColumnExpressionFor(
ctx context.Context,
tsStart, tsEnd uint64,
field *telemetrytypes.TelemetryFieldKey,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, error) {
colName, err := m.FieldFor(ctx, field)
colName, err := m.FieldFor(ctx, tsStart, tsEnd, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
@@ -175,7 +199,7 @@ func (m *fieldMapper) ColumnExpressionFor(
if _, ok := logsV2Columns[field.Name]; ok {
// if it is, attach the column name directly
field.FieldContext = telemetrytypes.FieldContextLog
colName, _ = m.FieldFor(ctx, field)
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
} else {
// - the context is not provided
// - there are not keys for the field
@@ -193,12 +217,12 @@ func (m *fieldMapper) ColumnExpressionFor(
}
} else if len(keysForField) == 1 {
// we have a single key for the field, use it
colName, _ = m.FieldFor(ctx, keysForField[0])
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
} else {
// select any non-empty value from the keys
args := []string{}
for _, key := range keysForField {
colName, _ = m.FieldFor(ctx, key)
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, key)
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
}
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))

View File

@@ -3,10 +3,14 @@ package telemetrylogs
import (
"context"
"testing"
"time"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -164,7 +168,8 @@ func TestGetColumn(t *testing.T) {
},
}
fm := NewFieldMapper()
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(mockStore)
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
@@ -229,7 +234,7 @@ func TestGetFieldKeyName(t *testing.T) {
expectedError: nil,
},
{
name: "Map column type - resource attribute",
name: "Map column type - resource attribute - json",
key: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
@@ -238,7 +243,7 @@ func TestGetFieldKeyName(t *testing.T) {
expectedError: nil,
},
{
name: "Map column type - resource attribute - Materialized",
name: "Map column type - resource attribute - Materialized - json",
key: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
@@ -261,8 +266,96 @@ func TestGetFieldKeyName(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
fm := NewFieldMapper()
result, err := fm.FieldFor(ctx, &tc.key)
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(mockStore)
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
if tc.expectedError != nil {
assert.Equal(t, tc.expectedError, err)
} else {
require.NoError(t, err)
assert.Equal(t, tc.expectedResult, result)
}
})
}
}
func TestFieldForWithEvolutionMetadata(t *testing.T) {
ctx := context.Background()
orgId := valuer.GenerateUUID()
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
OrgID: orgId.String(),
})
// Create a test release time
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
releaseTimeNano := uint64(releaseTime.UnixNano())
// Common test key
serviceNameKey := telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
}
// Common expected results
jsonOnlyResult := "resource.`service.name`::String"
fallbackResult := "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)"
// Set up stores once
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(mockKeyEvolutionMetadata(ctx, orgId, releaseTime))
storeWithoutMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
testCases := []struct {
name string
tsStart uint64
tsEnd uint64
key telemetrytypes.TelemetryFieldKey
mockStore *telemetrytypestest.MockKeyEvolutionMetadataStore
expectedResult string
expectedError error
}{
{
name: "Resource attribute - tsStart before release time (use fallback with multiIf)",
tsStart: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
key: serviceNameKey,
mockStore: storeWithMetadata,
expectedResult: fallbackResult,
expectedError: nil,
},
{
name: "Resource attribute - tsStart after release time (use new json column)",
tsStart: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
tsEnd: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
key: serviceNameKey,
mockStore: storeWithMetadata,
expectedResult: jsonOnlyResult,
expectedError: nil,
},
{
name: "Resource attribute - no evolution metadata (use fallback with multiIf)",
tsStart: releaseTimeNano,
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
key: serviceNameKey,
mockStore: storeWithoutMetadata,
expectedResult: fallbackResult,
expectedError: nil,
},
{
name: "Resource attribute - tsStart exactly at release time (use fallback with multiIf)",
tsStart: releaseTimeNano,
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
key: serviceNameKey,
mockStore: storeWithMetadata,
expectedResult: fallbackResult,
expectedError: nil,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
fm := NewFieldMapper(tc.mockStore)
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
if tc.expectedError != nil {
assert.Equal(t, tc.expectedError, err)

View File

@@ -5,12 +5,14 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/stretchr/testify/require"
)
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
@@ -33,7 +35,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)
@@ -46,7 +48,8 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
@@ -69,7 +72,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)

View File

@@ -7,13 +7,15 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
func TestFilterExprLogsBodyJSON(t *testing.T) {
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases

View File

@@ -9,13 +9,15 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogs(t *testing.T) {
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
@@ -2422,7 +2424,8 @@ func TestFilterExprLogs(t *testing.T) {
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogsConflictNegation(t *testing.T) {
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases

View File

@@ -1,149 +0,0 @@
package telemetrylogs
import (
"context"
"slices"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
var (
CodePlanIndexOutOfBounds = errors.MustNewCode("plan_index_out_of_bounds")
)
type JSONAccessPlanBuilder struct {
key *telemetrytypes.TelemetryFieldKey
value any
op qbtypes.FilterOperator
parts []string
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)
isPromoted bool
}
// buildPlan recursively builds the path plan tree
func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, parent *telemetrytypes.JSONAccessNode, isDynArrChild bool) (*telemetrytypes.JSONAccessNode, error) {
if index >= len(pb.parts) {
return nil, errors.NewInvalidInputf(CodePlanIndexOutOfBounds, "index is out of bounds")
}
part := pb.parts[index]
pathSoFar := strings.Join(pb.parts[:index+1], ArraySep)
isTerminal := index == len(pb.parts)-1
// Calculate progression parameters based on parent's values
var maxTypes, maxPaths int
if isDynArrChild {
// Child of Dynamic array - reset progression to base values (16, 256)
// This happens when we switch from Array(Dynamic) to Array(JSON)
maxTypes = 16
maxPaths = 256
} else if parent != nil {
// Child of JSON array - use parent's progression divided by 2 and 4
maxTypes = parent.MaxDynamicTypes / 2
maxPaths = parent.MaxDynamicPaths / 4
if maxTypes < 0 {
maxTypes = 0
}
if maxPaths < 0 {
maxPaths = 0
}
}
types, err := pb.getTypes(ctx, pathSoFar)
if err != nil {
return nil, err
}
// Create node for this path segment
node := &telemetrytypes.JSONAccessNode{
Name: part,
IsTerminal: isTerminal,
AvailableTypes: types,
Branches: make(map[telemetrytypes.JSONAccessBranchType]*telemetrytypes.JSONAccessNode),
Parent: parent,
MaxDynamicTypes: maxTypes,
MaxDynamicPaths: maxPaths,
}
hasJSON := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayJSON)
hasDynamic := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayDynamic)
// Configure terminal if this is the last part
if isTerminal {
valueType, _ := inferDataType(pb.value, pb.op, pb.key)
node.TerminalConfig = &telemetrytypes.TerminalConfig{
Key: pb.key,
ElemType: *pb.key.JSONDataType,
ValueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType],
}
} else {
if hasJSON {
node.Branches[telemetrytypes.BranchJSON], err = pb.buildPlan(ctx, index+1, node, false)
if err != nil {
return nil, err
}
}
if hasDynamic {
node.Branches[telemetrytypes.BranchDynamic], err = pb.buildPlan(ctx, index+1, node, true)
if err != nil {
return nil, err
}
}
}
return node, nil
}
// PlanJSON builds a tree structure representing the complete JSON path traversal
// that precomputes all possible branches and their types
func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbtypes.FilterOperator,
value any,
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error),
) (telemetrytypes.JSONAccessPlan, error) {
// if path is empty, return nil
if key.Name == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "path is empty")
}
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
// creation time in distributed_promoted_paths
path := strings.ReplaceAll(key.Name, ArrayAnyIndex, ArraySep)
parts := strings.Split(path, ArraySep)
pb := &JSONAccessPlanBuilder{
key: key,
op: op,
value: value,
parts: parts,
getTypes: getTypes,
isPromoted: key.Materialized,
}
plans := telemetrytypes.JSONAccessPlan{}
node, err := pb.buildPlan(ctx, 0,
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn,
32, 0),
false,
)
if err != nil {
return nil, err
}
plans = append(plans, node)
if pb.isPromoted {
node, err := pb.buildPlan(ctx, 0,
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyPromotedColumn,
32, 1024),
true,
)
if err != nil {
return nil, err
}
plans = append(plans, node)
}
return plans, nil
}

View File

@@ -1,880 +0,0 @@
package telemetrylogs
import (
"context"
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/require"
"gopkg.in/yaml.v3"
)
// ============================================================================
// Helper Functions for Test Data Creation
// ============================================================================
// makeKey creates a TelemetryFieldKey for testing
func makeKey(name string, dataType telemetrytypes.JSONDataType, materialized bool) *telemetrytypes.TelemetryFieldKey {
return &telemetrytypes.TelemetryFieldKey{
Name: name,
JSONDataType: &dataType,
Materialized: materialized,
}
}
// makeGetTypes creates a getTypes function from a map of path -> types
func makeGetTypes(typesMap map[string][]telemetrytypes.JSONDataType) func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
return func(_ context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
return typesMap[path], nil
}
}
// ============================================================================
// Helper Functions for Node Validation
// ============================================================================
// jsonAccessTestNode is a test-only, YAML-friendly view of JSONAccessNode.
// It intentionally omits Parent to avoid cycles and only keeps the fields
// that are useful for understanding / asserting the plan structure.
type jsonAccessTestNode struct {
Name string `yaml:"name"`
Column string `yaml:"column,omitempty"`
IsTerminal bool `yaml:"isTerminal,omitempty"`
MaxDynamicTypes int `yaml:"maxDynamicTypes,omitempty"`
MaxDynamicPaths int `yaml:"maxDynamicPaths,omitempty"`
ElemType string `yaml:"elemType,omitempty"`
ValueType string `yaml:"valueType,omitempty"`
AvailableTypes []string `yaml:"availableTypes,omitempty"`
Branches map[string]*jsonAccessTestNode `yaml:"branches,omitempty"`
}
// toTestNode converts a JSONAccessNode tree into jsonAccessTestNode so that
// it can be serialized to YAML for easy visual comparison in tests.
func toTestNode(n *telemetrytypes.JSONAccessNode) *jsonAccessTestNode {
if n == nil {
return nil
}
out := &jsonAccessTestNode{
Name: n.Name,
IsTerminal: n.IsTerminal,
MaxDynamicTypes: n.MaxDynamicTypes,
MaxDynamicPaths: n.MaxDynamicPaths,
}
// Column information for top-level plan nodes: their parent is the root,
// whose parent is nil.
if n.Parent != nil && n.Parent.Parent == nil {
out.Column = n.Parent.Name
}
// AvailableTypes as strings (using StringValue for stable representation)
if len(n.AvailableTypes) > 0 {
out.AvailableTypes = make([]string, 0, len(n.AvailableTypes))
for _, t := range n.AvailableTypes {
out.AvailableTypes = append(out.AvailableTypes, t.StringValue())
}
}
// Terminal config
if n.TerminalConfig != nil {
out.ElemType = n.TerminalConfig.ElemType.StringValue()
out.ValueType = n.TerminalConfig.ValueType.StringValue()
}
// Branches
if len(n.Branches) > 0 {
out.Branches = make(map[string]*jsonAccessTestNode, len(n.Branches))
for bt, child := range n.Branches {
out.Branches[bt.StringValue()] = toTestNode(child)
}
}
return out
}
// plansToYAML converts a slice of JSONAccessNode plans to a YAML string that
// can be compared against a per-test expectedTree.
func plansToYAML(t *testing.T, plans []*telemetrytypes.JSONAccessNode) string {
t.Helper()
testNodes := make([]*jsonAccessTestNode, 0, len(plans))
for _, p := range plans {
testNodes = append(testNodes, toTestNode(p))
}
got, err := yaml.Marshal(testNodes)
require.NoError(t, err)
return string(got)
}
// ============================================================================
// Test Cases for Node Methods
// ============================================================================
func TestNode_Alias(t *testing.T) {
tests := []struct {
name string
node *telemetrytypes.JSONAccessNode
expected string
}{
{
name: "Root node returns name as-is",
node: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
expected: LogsV2BodyJSONColumn,
},
{
name: "Node without parent returns backticked name",
node: &telemetrytypes.JSONAccessNode{
Name: "user",
Parent: nil,
},
expected: "`user`",
},
{
name: "Node with root parent uses dot separator",
node: &telemetrytypes.JSONAccessNode{
Name: "age",
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
},
expected: "`" + LogsV2BodyJSONColumn + ".age`",
},
{
name: "Node with non-root parent uses array separator",
node: &telemetrytypes.JSONAccessNode{
Name: "name",
Parent: &telemetrytypes.JSONAccessNode{
Name: "education",
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
},
},
expected: "`" + LogsV2BodyJSONColumn + ".education[].name`",
},
{
name: "Nested array path with multiple levels",
node: &telemetrytypes.JSONAccessNode{
Name: "type",
Parent: &telemetrytypes.JSONAccessNode{
Name: "awards",
Parent: &telemetrytypes.JSONAccessNode{
Name: "education",
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
},
},
},
expected: "`" + LogsV2BodyJSONColumn + ".education[].awards[].type`",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := tt.node.Alias()
require.Equal(t, tt.expected, result)
})
}
}
func TestNode_FieldPath(t *testing.T) {
tests := []struct {
name string
node *telemetrytypes.JSONAccessNode
expected string
}{
{
name: "Simple field path from root",
node: &telemetrytypes.JSONAccessNode{
Name: "user",
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
},
// FieldPath() always wraps the field name in backticks
expected: LogsV2BodyJSONColumn + ".`user`",
},
{
name: "Field path with backtick-required key",
node: &telemetrytypes.JSONAccessNode{
Name: "user-name", // requires backtick
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
},
expected: LogsV2BodyJSONColumn + ".`user-name`",
},
{
name: "Nested field path",
node: &telemetrytypes.JSONAccessNode{
Name: "age",
Parent: &telemetrytypes.JSONAccessNode{
Name: "user",
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
},
},
// FieldPath() always wraps the field name in backticks
expected: "`" + LogsV2BodyJSONColumn + ".user`.`age`",
},
{
name: "Array element field path",
node: &telemetrytypes.JSONAccessNode{
Name: "name",
Parent: &telemetrytypes.JSONAccessNode{
Name: "education",
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
},
},
// FieldPath() always wraps the field name in backticks
expected: "`" + LogsV2BodyJSONColumn + ".education`.`name`",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := tt.node.FieldPath()
require.Equal(t, tt.expected, result)
})
}
}
// ============================================================================
// Test Cases for PlanJSON
// ============================================================================
func TestPlanJSON_BasicStructure(t *testing.T) {
_, getTypes := testTypeSet()
tests := []struct {
name string
key *telemetrytypes.TelemetryFieldKey
expectErr bool
expectedYAML string
}{
{
name: "Simple path not promoted",
key: makeKey("user.name", telemetrytypes.String, false),
expectedYAML: `
- name: user.name
column: body_json
availableTypes:
- String
maxDynamicTypes: 16
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "Simple path promoted",
key: makeKey("user.name", telemetrytypes.String, true),
expectedYAML: `
- name: user.name
column: body_json
availableTypes:
- String
maxDynamicTypes: 16
isTerminal: true
elemType: String
valueType: String
- name: user.name
column: body_json_promoted
availableTypes:
- String
maxDynamicTypes: 16
maxDynamicPaths: 256
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "Empty path returns error",
key: makeKey("", telemetrytypes.String, false),
expectErr: true,
expectedYAML: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
plans, err := PlanJSON(context.Background(), tt.key, qbtypes.FilterOperatorEqual, "John", getTypes)
if tt.expectErr {
require.Error(t, err)
require.Nil(t, plans)
return
}
require.NoError(t, err)
got := plansToYAML(t, plans)
require.YAMLEq(t, tt.expectedYAML, got)
})
}
}
func TestPlanJSON_ArrayPaths(t *testing.T) {
_, getTypes := testTypeSet()
tests := []struct {
name string
path string
expectedYAML string
}{
{
name: "Single array level - JSON branch only",
path: "education[].name",
expectedYAML: `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: name
availableTypes:
- String
maxDynamicTypes: 8
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "Single array level - both JSON and Dynamic branches",
path: "education[].awards[].type",
expectedYAML: `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: awards
availableTypes:
- Array(Dynamic)
- Array(JSON)
maxDynamicTypes: 8
branches:
json:
name: type
availableTypes:
- String
maxDynamicTypes: 4
isTerminal: true
elemType: String
valueType: String
dynamic:
name: type
availableTypes:
- String
maxDynamicTypes: 16
maxDynamicPaths: 256
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "Deeply nested array path",
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
expectedYAML: `
- name: interests
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: entities
availableTypes:
- Array(JSON)
maxDynamicTypes: 8
branches:
json:
name: reviews
availableTypes:
- Array(JSON)
maxDynamicTypes: 4
branches:
json:
name: entries
availableTypes:
- Array(JSON)
maxDynamicTypes: 2
branches:
json:
name: metadata
availableTypes:
- Array(JSON)
maxDynamicTypes: 1
branches:
json:
name: positions
availableTypes:
- Array(JSON)
branches:
json:
name: name
availableTypes:
- String
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "ArrayAnyIndex replacement [*] to []",
path: "education[*].name",
expectedYAML: `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: name
availableTypes:
- String
maxDynamicTypes: 8
isTerminal: true
elemType: String
valueType: String
`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
key := makeKey(tt.path, telemetrytypes.String, false)
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
require.NoError(t, err)
require.NotNil(t, plans)
require.Len(t, plans, 1)
got := plansToYAML(t, plans)
require.YAMLEq(t, tt.expectedYAML, got)
})
}
}
func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
_, getTypes := testTypeSet()
path := "education[].awards[].type"
value := "sports"
t.Run("Non-promoted plan", func(t *testing.T) {
key := makeKey(path, telemetrytypes.String, false)
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
require.NoError(t, err)
require.Len(t, plans, 1)
expectedYAML := `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: awards
availableTypes:
- Array(Dynamic)
- Array(JSON)
maxDynamicTypes: 8
branches:
json:
name: type
availableTypes:
- String
maxDynamicTypes: 4
isTerminal: true
elemType: String
valueType: String
dynamic:
name: type
availableTypes:
- String
maxDynamicTypes: 16
maxDynamicPaths: 256
isTerminal: true
elemType: String
valueType: String
`
got := plansToYAML(t, plans)
require.YAMLEq(t, expectedYAML, got)
})
t.Run("Promoted plan", func(t *testing.T) {
key := makeKey(path, telemetrytypes.String, true)
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
require.NoError(t, err)
require.Len(t, plans, 2)
expectedYAML := `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: awards
availableTypes:
- Array(Dynamic)
- Array(JSON)
maxDynamicTypes: 8
branches:
json:
name: type
availableTypes:
- String
maxDynamicTypes: 4
isTerminal: true
elemType: String
valueType: String
dynamic:
name: type
availableTypes:
- String
maxDynamicTypes: 16
maxDynamicPaths: 256
isTerminal: true
elemType: String
valueType: String
- name: education
column: body_json_promoted
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
maxDynamicPaths: 256
branches:
json:
name: awards
availableTypes:
- Array(Dynamic)
- Array(JSON)
maxDynamicTypes: 8
maxDynamicPaths: 64
branches:
json:
name: type
availableTypes:
- String
maxDynamicTypes: 4
maxDynamicPaths: 16
isTerminal: true
elemType: String
valueType: String
dynamic:
name: type
availableTypes:
- String
maxDynamicTypes: 16
maxDynamicPaths: 256
isTerminal: true
elemType: String
valueType: String
`
got := plansToYAML(t, plans)
require.YAMLEq(t, expectedYAML, got)
})
}
func TestPlanJSON_EdgeCases(t *testing.T) {
_, getTypes := testTypeSet()
tests := []struct {
name string
path string
value any
expectedYAML string
}{
{
name: "Path with no available types",
path: "unknown.path",
value: "test",
expectedYAML: `
- name: unknown.path
column: body_json
maxDynamicTypes: 16
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "Very deep nesting - validates progression doesn't go negative",
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
value: "Engineer",
expectedYAML: `
- name: interests
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: entities
availableTypes:
- Array(JSON)
maxDynamicTypes: 8
branches:
json:
name: reviews
availableTypes:
- Array(JSON)
maxDynamicTypes: 4
branches:
json:
name: entries
availableTypes:
- Array(JSON)
maxDynamicTypes: 2
branches:
json:
name: metadata
availableTypes:
- Array(JSON)
maxDynamicTypes: 1
branches:
json:
name: positions
availableTypes:
- Array(JSON)
branches:
json:
name: name
availableTypes:
- String
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "Path with mixed scalar and array types",
path: "education[].type",
value: "high_school",
expectedYAML: `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: type
availableTypes:
- String
- Int64
maxDynamicTypes: 8
isTerminal: true
elemType: String
valueType: String
`,
},
{
name: "Exists with only array types available",
path: "education",
value: nil,
expectedYAML: `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
isTerminal: true
elemType: Array(JSON)
`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Choose key type based on path; operator does not affect the tree shape asserted here.
keyType := telemetrytypes.String
switch tt.path {
case "education":
keyType = telemetrytypes.ArrayJSON
case "education[].type":
keyType = telemetrytypes.String
}
key := makeKey(tt.path, keyType, false)
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, tt.value, getTypes)
require.NoError(t, err)
got := plansToYAML(t, plans)
require.YAMLEq(t, tt.expectedYAML, got)
})
}
}
func TestPlanJSON_TreeStructure(t *testing.T) {
_, getTypes := testTypeSet()
path := "education[].awards[].participated[].team[].branch"
key := makeKey(path, telemetrytypes.String, false)
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
require.NoError(t, err)
require.Len(t, plans, 1)
expectedYAML := `
- name: education
column: body_json
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
branches:
json:
name: awards
availableTypes:
- Array(Dynamic)
- Array(JSON)
maxDynamicTypes: 8
branches:
json:
name: participated
availableTypes:
- Array(Dynamic)
- Array(JSON)
maxDynamicTypes: 4
branches:
json:
name: team
availableTypes:
- Array(JSON)
maxDynamicTypes: 2
branches:
json:
name: branch
availableTypes:
- String
maxDynamicTypes: 1
isTerminal: true
elemType: String
valueType: String
dynamic:
name: team
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
maxDynamicPaths: 256
branches:
json:
name: branch
availableTypes:
- String
maxDynamicTypes: 8
maxDynamicPaths: 64
isTerminal: true
elemType: String
valueType: String
dynamic:
name: participated
availableTypes:
- Array(Dynamic)
- Array(JSON)
maxDynamicTypes: 16
maxDynamicPaths: 256
branches:
json:
name: team
availableTypes:
- Array(JSON)
maxDynamicTypes: 8
maxDynamicPaths: 64
branches:
json:
name: branch
availableTypes:
- String
maxDynamicTypes: 4
maxDynamicPaths: 16
isTerminal: true
elemType: String
valueType: String
dynamic:
name: team
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
maxDynamicPaths: 256
branches:
json:
name: branch
availableTypes:
- String
maxDynamicTypes: 8
maxDynamicPaths: 64
isTerminal: true
elemType: String
valueType: String
`
got := plansToYAML(t, plans)
require.YAMLEq(t, expectedYAML, got)
}
// ============================================================================
// Test Data Setup
// ============================================================================
// testTypeSet returns a map of path->types and a getTypes function for testing
// This represents the type information available in the test JSON structure
//
// TODO(Piyush): Remove this unparam nolint
// nolint:unparam
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
types := map[string][]telemetrytypes.JSONDataType{
"user.name": {telemetrytypes.String},
"user.age": {telemetrytypes.Int64, telemetrytypes.String},
"user.height": {telemetrytypes.Float64},
"education": {telemetrytypes.ArrayJSON},
"education[].name": {telemetrytypes.String},
"education[].type": {telemetrytypes.String, telemetrytypes.Int64},
"education[].internal_type": {telemetrytypes.String},
"education[].metadata.location": {telemetrytypes.String},
"education[].parameters": {telemetrytypes.ArrayFloat64, telemetrytypes.ArrayDynamic},
"education[].duration": {telemetrytypes.String},
"education[].mode": {telemetrytypes.String},
"education[].year": {telemetrytypes.Int64},
"education[].field": {telemetrytypes.String},
"education[].awards": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
"education[].awards[].name": {telemetrytypes.String},
"education[].awards[].rank": {telemetrytypes.Int64},
"education[].awards[].medal": {telemetrytypes.String},
"education[].awards[].type": {telemetrytypes.String},
"education[].awards[].semester": {telemetrytypes.Int64},
"education[].awards[].participated": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
"education[].awards[].participated[].type": {telemetrytypes.String},
"education[].awards[].participated[].field": {telemetrytypes.String},
"education[].awards[].participated[].project_type": {telemetrytypes.String},
"education[].awards[].participated[].project_name": {telemetrytypes.String},
"education[].awards[].participated[].race_type": {telemetrytypes.String},
"education[].awards[].participated[].team_based": {telemetrytypes.Bool},
"education[].awards[].participated[].team_name": {telemetrytypes.String},
"education[].awards[].participated[].team": {telemetrytypes.ArrayJSON},
"education[].awards[].participated[].team[].name": {telemetrytypes.String},
"education[].awards[].participated[].team[].branch": {telemetrytypes.String},
"education[].awards[].participated[].team[].semester": {telemetrytypes.Int64},
"interests": {telemetrytypes.ArrayJSON},
"interests[].type": {telemetrytypes.String},
"interests[].entities": {telemetrytypes.ArrayJSON},
"interests[].entities.application_date": {telemetrytypes.String},
"interests[].entities[].reviews": {telemetrytypes.ArrayJSON},
"interests[].entities[].reviews[].given_by": {telemetrytypes.String},
"interests[].entities[].reviews[].remarks": {telemetrytypes.String},
"interests[].entities[].reviews[].weight": {telemetrytypes.Float64},
"interests[].entities[].reviews[].passed": {telemetrytypes.Bool},
"interests[].entities[].reviews[].type": {telemetrytypes.String},
"interests[].entities[].reviews[].analysis_type": {telemetrytypes.Int64},
"interests[].entities[].reviews[].entries": {telemetrytypes.ArrayJSON},
"interests[].entities[].reviews[].entries[].subject": {telemetrytypes.String},
"interests[].entities[].reviews[].entries[].status": {telemetrytypes.String},
"interests[].entities[].reviews[].entries[].metadata": {telemetrytypes.ArrayJSON},
"interests[].entities[].reviews[].entries[].metadata[].company": {telemetrytypes.String},
"interests[].entities[].reviews[].entries[].metadata[].experience": {telemetrytypes.Int64},
"interests[].entities[].reviews[].entries[].metadata[].unit": {telemetrytypes.String},
"interests[].entities[].reviews[].entries[].metadata[].positions": {telemetrytypes.ArrayJSON},
"interests[].entities[].reviews[].entries[].metadata[].positions[].name": {telemetrytypes.String},
"interests[].entities[].reviews[].entries[].metadata[].positions[].duration": {telemetrytypes.Int64, telemetrytypes.Float64},
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {telemetrytypes.String},
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {telemetrytypes.ArrayInt64, telemetrytypes.ArrayString},
"message": {telemetrytypes.String},
}
return types, makeGetTypes(types)
}

View File

@@ -0,0 +1,158 @@
package telemetrylogs
import (
"context"
"encoding/json"
"fmt"
"log/slog"
"time"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types/cachetypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
)
const (
// KeyEvolutionMetadataTableName is the table name for key evolution metadata
KeyEvolutionMetadataTableName = "distributed_column_key_evolution_metadata"
// KeyEvolutionMetadataDBName is the database name for key evolution metadata
KeyEvolutionMetadataDBName = "signoz_logs"
// KeyEvolutionMetadataCacheKeyPrefix is the prefix for cache keys
KeyEvolutionMetadataCacheKeyPrefix = "key_evolution_metadata:"
base_column = "base_column"
base_column_type = "base_column_type"
new_column = "new_column"
new_column_type = "new_column_type"
release_time = "release_time"
)
// CachedKeyEvolutionMetadata is a cacheable type for storing key evolution metadata
type CachedKeyEvolutionMetadata struct {
Keys []*telemetrytypes.KeyEvolutionMetadataKey `json:"keys"`
}
var _ cachetypes.Cacheable = (*CachedKeyEvolutionMetadata)(nil)
func (c *CachedKeyEvolutionMetadata) MarshalBinary() ([]byte, error) {
return json.Marshal(c)
}
func (c *CachedKeyEvolutionMetadata) UnmarshalBinary(data []byte) error {
return json.Unmarshal(data, c)
}
// Each key can have multiple evolution entries, allowing for multiple column transitions over time.
// The cache is organized by orgId, then by key name.
type KeyEvolutionMetadata struct {
cache cache.Cache
telemetryStore telemetrystore.TelemetryStore
logger *slog.Logger
}
var _ telemetrytypes.KeyEvolutionMetadataStore = (*KeyEvolutionMetadata)(nil)
func NewKeyEvolutionMetadata(telemetryStore telemetrystore.TelemetryStore, cache cache.Cache, logger *slog.Logger) *KeyEvolutionMetadata {
return &KeyEvolutionMetadata{
cache: cache,
telemetryStore: telemetryStore,
logger: logger,
}
}
func (k *KeyEvolutionMetadata) fetchFromClickHouse(ctx context.Context, orgID valuer.UUID, key string) []*telemetrytypes.KeyEvolutionMetadataKey {
store := k.telemetryStore
logger := k.logger
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
// Build query to fetch all key evolution metadata
sb := sqlbuilder.NewSelectBuilder()
sb.Select(
base_column,
base_column_type,
new_column,
new_column_type,
release_time,
)
sb.From(fmt.Sprintf("%s.%s", KeyEvolutionMetadataDBName, KeyEvolutionMetadataTableName))
sb.OrderBy(base_column, release_time)
sb.Where(sb.E(base_column, key))
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
rows, err := store.ClickhouseDB().Query(ctx, query, args...)
if err != nil {
logger.WarnContext(ctx, "Failed to fetch key evolution metadata from ClickHouse", "error", err)
return nil
}
defer rows.Close()
// Group metadata by base_column
metadataByKey := make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
for rows.Next() {
var (
baseColumn string
baseColumnType string
newColumn string
newColumnType string
releaseTime uint64
)
if err := rows.Scan(&baseColumn, &baseColumnType, &newColumn, &newColumnType, &releaseTime); err != nil {
logger.WarnContext(ctx, "Failed to scan key evolution metadata row", "error", err)
continue
}
key := &telemetrytypes.KeyEvolutionMetadataKey{
BaseColumn: baseColumn,
BaseColumnType: baseColumnType,
NewColumn: newColumn,
NewColumnType: newColumnType,
ReleaseTime: time.Unix(0, int64(releaseTime)),
}
metadataByKey[baseColumn] = append(metadataByKey[baseColumn], key)
}
if err := rows.Err(); err != nil {
logger.WarnContext(ctx, "Error iterating key evolution metadata rows", "error", err)
return nil
}
return metadataByKey[key]
}
// Get retrieves all metadata keys for the given key name and orgId from cache.
// Returns an empty slice if the key is not found in cache.
func (k *KeyEvolutionMetadata) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
cachedData := &CachedKeyEvolutionMetadata{}
if err := k.cache.Get(ctx, orgId, cacheKey, cachedData); err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
k.logger.ErrorContext(ctx, "Failed to get key evolution metadata from cache", "error", err)
return nil
}
// Cache miss - fetch from ClickHouse and try again
metadata := k.fetchFromClickHouse(ctx, orgId, keyName)
if metadata != nil {
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
cachedData = &CachedKeyEvolutionMetadata{Keys: metadata}
if err := k.cache.Set(ctx, orgId, cacheKey, cachedData, 24*time.Hour); err != nil {
k.logger.WarnContext(ctx, "Failed to set key evolution metadata in cache", "key", keyName, "error", err)
}
}
}
return cachedData.Keys
}

View File

@@ -0,0 +1,213 @@
package telemetrylogs
import (
"context"
"log/slog"
"testing"
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/cache/cachetest"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
cmock "github.com/srikanthccv/ClickHouse-go-mock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
var (
clickHouseQueryPattern = "SELECT.*base_column.*FROM.*distributed_column_key_evolution_metadata.*WHERE.*base_column.*=.*"
clickHouseColumns = []cmock.ColumnType{
{Name: base_column, Type: "String"},
{Name: base_column_type, Type: "String"},
{Name: new_column, Type: "String"},
{Name: new_column_type, Type: "String"},
{Name: release_time, Type: "UInt64"},
}
)
func newTestCache(t *testing.T) cache.Cache {
t.Helper()
testCache, err := cachetest.New(cache.Config{
Provider: "memory",
Memory: cache.Memory{
NumCounters: 10 * 1000,
MaxCost: 1 << 26,
},
})
require.NoError(t, err)
return testCache
}
func newTestTelemetryStore() *telemetrystoretest.Provider {
return telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherRegexp)
}
func newKeyEvolutionMetadata(telemetryStore telemetrystore.TelemetryStore, cache cache.Cache) *KeyEvolutionMetadata {
return NewKeyEvolutionMetadata(telemetryStore, cache, slog.Default())
}
func createMockRows(values [][]any) *cmock.Rows {
return cmock.NewRows(clickHouseColumns, values)
}
func assertMetadataEqual(t *testing.T, expected, actual *telemetrytypes.KeyEvolutionMetadataKey) {
t.Helper()
assert.Equal(t, expected.BaseColumn, actual.BaseColumn)
assert.Equal(t, expected.BaseColumnType, actual.BaseColumnType)
assert.Equal(t, expected.NewColumn, actual.NewColumn)
assert.Equal(t, expected.NewColumnType, actual.NewColumnType)
assert.Equal(t, expected.ReleaseTime, actual.ReleaseTime)
}
func TestKeyEvolutionMetadata_Get_CacheHit(t *testing.T) {
ctx := context.Background()
orgId := valuer.GenerateUUID()
keyName := "service.name"
testCache := newTestCache(t)
telemetryStore := newTestTelemetryStore()
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
expectedMetadata := []*telemetrytypes.KeyEvolutionMetadataKey{
{
BaseColumn: "resources_string",
BaseColumnType: "Map(LowCardinality(String), String)",
NewColumn: "resource",
NewColumnType: "JSON(max_dynamic_paths=100)",
ReleaseTime: releaseTime,
},
}
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
cachedData := &CachedKeyEvolutionMetadata{Keys: expectedMetadata}
err := testCache.Set(ctx, orgId, cacheKey, cachedData, 24*time.Hour)
require.NoError(t, err)
result := kem.Get(ctx, orgId, keyName)
require.Len(t, result, 1)
assertMetadataEqual(t, expectedMetadata[0], result[0])
}
func TestKeyEvolutionMetadata_Get_CacheMiss_FetchFromClickHouse(t *testing.T) {
ctx := context.Background()
orgId := valuer.GenerateUUID()
keyName := "resources_string"
testCache := newTestCache(t)
telemetryStore := newTestTelemetryStore()
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
values := [][]any{
{
"resources_string",
"Map(LowCardinality(String), String)",
"resource",
"JSON(max_dynamic_paths=100)",
uint64(releaseTime.UnixNano()),
},
}
rows := createMockRows(values)
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnRows(rows)
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
result := kem.Get(ctx, orgId, keyName)
require.Len(t, result, 1)
assert.Equal(t, "resources_string", result[0].BaseColumn)
assert.Equal(t, "Map(LowCardinality(String), String)", result[0].BaseColumnType)
assert.Equal(t, "resource", result[0].NewColumn)
assert.Equal(t, "JSON(max_dynamic_paths=100)", result[0].NewColumnType)
assert.Equal(t, releaseTime.UnixNano(), result[0].ReleaseTime.UnixNano())
// Verify data was cached
var cachedData CachedKeyEvolutionMetadata
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
err := testCache.Get(ctx, orgId, cacheKey, &cachedData)
require.NoError(t, err)
require.Len(t, cachedData.Keys, 1)
assert.Equal(t, result[0].BaseColumn, cachedData.Keys[0].BaseColumn)
}
func TestKeyEvolutionMetadata_Get_MultipleMetadataEntries(t *testing.T) {
ctx := context.Background()
orgId := valuer.GenerateUUID()
keyName := "resources_string"
testCache := newTestCache(t)
telemetryStore := newTestTelemetryStore()
releaseTime1 := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
releaseTime2 := time.Date(2024, 2, 15, 10, 0, 0, 0, time.UTC)
values := [][]any{
{
"resources_string",
"Map(LowCardinality(String), String)",
"resource",
"JSON(max_dynamic_paths=100)",
uint64(releaseTime1.UnixNano()),
},
{
"resources_string",
"Map(LowCardinality(String), String)",
"resource_v2",
"JSON(max_dynamic_paths=100, max_dynamic_path_depth=10)",
uint64(releaseTime2.UnixNano()),
},
}
rows := createMockRows(values)
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnRows(rows)
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
result := kem.Get(ctx, orgId, keyName)
require.Len(t, result, 2)
assert.Equal(t, "resources_string", result[0].BaseColumn)
assert.Equal(t, "resource", result[0].NewColumn)
assert.Equal(t, "JSON(max_dynamic_paths=100)", result[0].NewColumnType)
assert.Equal(t, releaseTime1.UnixNano(), result[0].ReleaseTime.UnixNano())
assert.Equal(t, "resource_v2", result[1].NewColumn)
assert.Equal(t, "JSON(max_dynamic_paths=100, max_dynamic_path_depth=10)", result[1].NewColumnType)
assert.Equal(t, releaseTime2.UnixNano(), result[1].ReleaseTime.UnixNano())
}
func TestKeyEvolutionMetadata_Get_EmptyResultFromClickHouse(t *testing.T) {
ctx := context.Background()
orgId := valuer.GenerateUUID()
keyName := "resources_string"
testCache := newTestCache(t)
telemetryStore := newTestTelemetryStore()
rows := createMockRows([][]any{})
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnRows(rows)
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
result := kem.Get(ctx, orgId, keyName)
assert.Empty(t, result)
}
func TestKeyEvolutionMetadata_Get_ClickHouseQueryError(t *testing.T) {
ctx := context.Background()
orgId := valuer.GenerateUUID()
keyName := "resources_string"
testCache := newTestCache(t)
telemetryStore := newTestTelemetryStore()
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnError(assert.AnError)
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
result := kem.Get(ctx, orgId, keyName)
assert.Empty(t, result)
}

View File

@@ -247,7 +247,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
continue
}
// get column expression for the field - use array index directly to avoid pointer to loop variable
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
if err != nil {
return nil, err
}
@@ -267,7 +267,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
// Add order by
for _, orderBy := range query.Order {
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
if err != nil {
return nil, err
}
@@ -333,7 +333,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
if err != nil {
return nil, err
}
@@ -347,7 +347,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
allAggChArgs := make([]any, 0)
for i, agg := range query.Aggregations {
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
ctx, agg.Expression,
ctx, start, end, agg.Expression,
uint64(query.StepInterval.Seconds()),
keys,
)
@@ -479,7 +479,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
if err != nil {
return nil, err
}
@@ -496,7 +496,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
for idx := range query.Aggregations {
aggExpr := query.Aggregations[idx]
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
ctx, aggExpr.Expression,
ctx, start, end, aggExpr.Expression,
rateInterval,
keys,
)

View File

@@ -8,9 +8,11 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/require"
)
@@ -38,7 +40,14 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
}
func TestStatementBuilderTimeSeries(t *testing.T) {
// Create a test release time
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
releaseTimeNano := uint64(releaseTime.UnixNano())
cases := []struct {
startTs uint64
endTs uint64
name string
requestType qbtypes.RequestType
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
@@ -46,14 +55,16 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
expectedErr error
}{
{
name: "Time series with limit",
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
name: "Time series with limit and count distinct on service.name",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Aggregations: []qbtypes.LogAggregation{
{
Expression: "count()",
Expression: "count_distinct(service.name)",
},
},
Filter: &qbtypes.Filter{
@@ -69,20 +80,22 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
},
expectedErr: nil,
},
{
name: "Time series with OR b/w resource attr and attribute filter",
startTs: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
name: "Time series with OR b/w resource attr and attribute filter and count distinct on service.name",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Aggregations: []qbtypes.LogAggregation{
{
Expression: "count()",
Expression: "count_distinct(service.name)",
},
},
Filter: &qbtypes.Filter{
@@ -98,12 +111,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1705224600), uint64(1705485600), "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600), 10, "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600)},
},
expectedErr: nil,
},
{
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
name: "Time series with limit + custom order by",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
@@ -137,12 +152,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
},
expectedErr: nil,
},
{
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
name: "Time series with group by on materialized column",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
@@ -169,10 +186,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `materialized.key.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`materialized.key.name`) GLOBAL IN (SELECT `materialized.key.name` FROM __limit_cte) GROUP BY ts, `materialized.key.name`",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
},
},
{
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
name: "Time series with materialised column using or with regex operator",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
@@ -190,13 +209,20 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
Args: []any{uint64(1747945619), uint64(1747983448), "redis.*", true, "memcached", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
Args: []any{uint64(1705397400), uint64(1705485600), "redis.*", true, "memcached", true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
},
expectedErr: nil,
},
}
fm := NewFieldMapper()
ctx := context.Background()
orgId := valuer.GenerateUUID()
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
OrgID: orgId.String(),
})
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(mockKeyEvolutionMetadata(ctx, orgId, releaseTime))
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
@@ -220,7 +246,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
q, err := statementBuilder.Build(ctx, c.startTs, c.endTs, c.requestType, c.query, nil)
if c.expectedErr != nil {
require.Error(t, err)
@@ -317,7 +343,8 @@ func TestStatementBuilderListQuery(t *testing.T) {
},
}
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
@@ -426,7 +453,8 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
},
}
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
@@ -500,7 +528,8 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
},
}
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
@@ -596,7 +625,8 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
},
}
fm := NewFieldMapper()
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
fm := NewFieldMapper(storeWithMetadata)
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()

View File

@@ -1,9 +1,12 @@
package telemetrylogs
import (
"context"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// Helper function to limit string length for display
@@ -951,3 +954,23 @@ func buildCompleteFieldKeyMapCollision() map[string][]*telemetrytypes.TelemetryF
}
return keysMap
}
// mockKeyEvolutionMetadata builds a mock org-scoped key evolution metadata map for a column evolution.
func mockKeyEvolutionMetadata(ctx context.Context, orgId valuer.UUID, releaseTime time.Time) map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey {
metadata := make(map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
// Make sure to initialize the inner map before assignment to prevent 'assignment to entry in nil map'
orgIdStr := orgId.String()
if _, exists := metadata[orgIdStr]; !exists {
metadata[orgIdStr] = make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
}
newKeyEvolutionMetadataEntry := telemetrytypes.KeyEvolutionMetadataKey{
BaseColumn: "resources_string",
BaseColumnType: "Map(LowCardinality(String), String)",
NewColumn: "resource",
NewColumnType: "JSON(max_dynamic_paths=100)",
ReleaseTime: releaseTime,
}
metadata[orgIdStr]["resources_string"] = []*telemetrytypes.KeyEvolutionMetadataKey{&newKeyEvolutionMetadataEntry}
return metadata
}

View File

@@ -245,7 +245,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
}
defer rows.Close()
indexes := make(map[string][]schemamigrator.Index)
indexesMap := make(map[string][]schemamigrator.Index)
for rows.Next() {
var name string
var typeFull string
@@ -254,7 +254,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
if err := rows.Scan(&name, &typeFull, &expr, &granularity); err != nil {
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to scan string indexed column")
}
indexes[name] = append(indexes[name], schemamigrator.Index{
indexesMap[name] = append(indexesMap[name], schemamigrator.Index{
Name: name,
Type: typeFull,
Expression: expr,
@@ -262,7 +262,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
})
}
return indexes, nil
return indexesMap, nil
}
func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {

View File

@@ -25,8 +25,7 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
tsStart, tsEnd uint64,
) (string, error) {
switch operator {
@@ -45,7 +44,7 @@ func (c *conditionBuilder) ConditionFor(
return "", nil
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
tblFieldName, err := c.fm.FieldFor(ctx, tsStart, tsEnd, key)
if err != nil {
// if we don't have a table field name, we can't build a condition for related values
return "", nil

View File

@@ -53,7 +53,7 @@ func TestConditionFor(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedError != nil {

View File

@@ -51,7 +51,7 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
return column, nil
}
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
column, err := m.getColumn(ctx, key)
if err != nil {
return "", err
@@ -69,11 +69,12 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
func (m *fieldMapper) ColumnExpressionFor(
ctx context.Context,
startNs, endNs uint64,
field *telemetrytypes.TelemetryFieldKey,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, error) {
colName, err := m.FieldFor(ctx, field)
colName, err := m.FieldFor(ctx, startNs, endNs, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
@@ -83,7 +84,7 @@ func (m *fieldMapper) ColumnExpressionFor(
if _, ok := attributeMetadataColumns[field.Name]; ok {
// if it is, attach the column name directly
field.FieldContext = telemetrytypes.FieldContextSpan
colName, _ = m.FieldFor(ctx, field)
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
} else {
// - the context is not provided
// - there are not keys for the field
@@ -101,12 +102,12 @@ func (m *fieldMapper) ColumnExpressionFor(
}
} else if len(keysForField) == 1 {
// we have a single key for the field, use it
colName, _ = m.FieldFor(ctx, keysForField[0])
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
} else {
// select any non-empty value from the keys
args := []string{}
for _, key := range keysForField {
colName, _ = m.FieldFor(ctx, key)
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
}
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))

View File

@@ -145,6 +145,8 @@ func TestGetFieldKeyName(t *testing.T) {
testCases := []struct {
name string
tsStart uint64
tsEnd uint64
key telemetrytypes.TelemetryFieldKey
expectedResult string
expectedError error
@@ -203,7 +205,7 @@ func TestGetFieldKeyName(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
result, err := fm.FieldFor(ctx, &tc.key)
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
if tc.expectedError != nil {
assert.Equal(t, tc.expectedError, err)

View File

@@ -942,18 +942,18 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
FieldDataType: fieldValueSelector.FieldDataType,
}
selectColumn, err := t.fm.FieldFor(ctx, key)
selectColumn, err := t.fm.FieldFor(ctx, 0, 0, key)
if err != nil {
// we don't have a explicit column to select from the related metadata table
// so we will select either from resource_attributes or attributes table
// in that order
resourceColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
resourceColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
Name: key.Name,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
})
attributeColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
attributeColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
Name: key.Name,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
@@ -978,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
FieldMapper: t.fm,
ConditionBuilder: t.conditionBuilder,
FieldKeys: keys,
}, 0, 0)
}, 0, 0)
if err == nil {
sb.AddWhereClause(whereClause.WhereClause)
} else {
@@ -1002,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
// search on attributes
key.FieldContext = telemetrytypes.FieldContextAttribute
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
// search on resource
key.FieldContext = telemetrytypes.FieldContextResource
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
key.FieldContext = origContext
} else {
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}

View File

@@ -120,7 +120,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
stepSec,
))
for _, g := range query.GroupBy {
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
if err != nil {
return "", []any{}, err
}
@@ -148,7 +148,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return "", []any{}, err
}
@@ -200,7 +200,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
))
for _, g := range query.GroupBy {
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
if err != nil {
return "", nil, err
}
@@ -231,7 +231,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return "", nil, err
}
@@ -270,7 +270,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
stepSec,
))
for _, g := range query.GroupBy {
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
if err != nil {
return "", nil, err
}
@@ -295,7 +295,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return "", nil, err
}

View File

@@ -23,6 +23,8 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
func (c *conditionBuilder) conditionFor(
ctx context.Context,
startNs uint64,
endNs uint64,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
value any,
@@ -39,7 +41,7 @@ func (c *conditionBuilder) conditionFor(
value = querybuilder.FormatValueForContains(value)
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
if err != nil {
return "", err
}
@@ -139,10 +141,10 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
startNs uint64,
endNs uint64,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
if err != nil {
return "", err
}

View File

@@ -65,7 +65,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
return nil, qbtypes.ErrColumnNotFound
}
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
column, err := m.getColumn(ctx, key)
if err != nil {
return "", err
@@ -92,11 +92,12 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
func (m *fieldMapper) ColumnExpressionFor(
ctx context.Context,
startNs, endNs uint64,
field *telemetrytypes.TelemetryFieldKey,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, error) {
colName, err := m.FieldFor(ctx, field)
colName, err := m.FieldFor(ctx, startNs, endNs, field)
if err != nil {
return "", err
}

View File

@@ -207,7 +207,7 @@ func TestGetFieldKeyName(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
result, err := fm.FieldFor(ctx, &tc.key)
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
if tc.expectedError != nil {
assert.Equal(t, tc.expectedError, err)

View File

@@ -359,7 +359,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
sb.Select("fingerprint")
for _, g := range query.GroupBy {
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
if err != nil {
return "", nil, err
}

View File

@@ -29,6 +29,8 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
func (c *conditionBuilder) conditionFor(
ctx context.Context,
startNs uint64,
endNs uint64,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
value any,
@@ -52,7 +54,7 @@ func (c *conditionBuilder) conditionFor(
}
// then ask the mapper for the actual SQL reference
tblFieldName, err := c.fm.FieldFor(ctx, key)
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
if err != nil {
return "", err
}
@@ -239,20 +241,20 @@ func (c *conditionBuilder) ConditionFor(
value any,
sb *sqlbuilder.SelectBuilder,
startNs uint64,
_ uint64,
endNs uint64,
) (string, error) {
if c.isSpanScopeField(key.Name) {
return c.buildSpanScopeCondition(key, operator, value, startNs)
}
condition, err := c.conditionFor(ctx, key, operator, value, sb)
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
if err != nil {
return "", err
}
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
field, _ := c.fm.FieldFor(ctx, key)
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
slices.Contains(maps.Keys(CalculatedFields), field) ||
@@ -260,7 +262,7 @@ func (c *conditionBuilder) ConditionFor(
return condition, nil
}
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
if err != nil {
return "", err
}

View File

@@ -225,6 +225,7 @@ func (m *defaultFieldMapper) ColumnFor(
// otherwise it returns qbtypes.ErrColumnNotFound
func (m *defaultFieldMapper) FieldFor(
ctx context.Context,
startNs, endNs uint64,
key *telemetrytypes.TelemetryFieldKey,
) (string, error) {
// Special handling for span scope fields
@@ -297,11 +298,12 @@ func (m *defaultFieldMapper) FieldFor(
// if it exists otherwise it returns qbtypes.ErrColumnNotFound
func (m *defaultFieldMapper) ColumnExpressionFor(
ctx context.Context,
startNs, endNs uint64,
field *telemetrytypes.TelemetryFieldKey,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, error) {
colName, err := m.FieldFor(ctx, field)
colName, err := m.FieldFor(ctx, startNs, endNs, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
@@ -311,7 +313,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
if _, ok := indexV3Columns[field.Name]; ok {
// if it is, attach the column name directly
field.FieldContext = telemetrytypes.FieldContextSpan
colName, _ = m.FieldFor(ctx, field)
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
} else {
// - the context is not provided
// - there are not keys for the field
@@ -329,12 +331,12 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
}
} else if len(keysForField) == 1 {
// we have a single key for the field, use it
colName, _ = m.FieldFor(ctx, keysForField[0])
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
} else {
// select any non-empty value from the keys
args := []string{}
for _, key := range keysForField {
colName, _ = m.FieldFor(ctx, key)
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
}
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))

View File

@@ -92,7 +92,7 @@ func TestGetFieldKeyName(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
fm := NewFieldMapper()
result, err := fm.FieldFor(ctx, &tc.key)
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
if tc.expectedError != nil {
assert.Equal(t, tc.expectedError, err)

View File

@@ -293,7 +293,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
// TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs?
for _, field := range selectedFields {
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &field, keys)
if err != nil {
return nil, err
}
@@ -311,7 +311,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
// Add order by
for _, orderBy := range query.Order {
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
if err != nil {
return nil, err
}
@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
if err != nil {
return nil, err
}
@@ -509,7 +509,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
allAggChArgs := make([]any, 0)
for i, agg := range query.Aggregations {
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
ctx, agg.Expression,
ctx, start, end, agg.Expression,
uint64(query.StepInterval.Seconds()),
keys,
)
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
if err != nil {
return nil, err
}
@@ -654,7 +654,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
for idx := range query.Aggregations {
aggExpr := query.Aggregations[idx]
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
ctx, aggExpr.Expression,
ctx, start, end, aggExpr.Expression,
rateInterval,
keys,
)
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
FieldKeys: keys,
SkipResourceFilter: true,
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return nil, err

View File

@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
ConditionBuilder: b.stmtBuilder.cb,
FieldKeys: keys,
SkipResourceFilter: true,
}, b.start, b.end,
}, b.start, b.end,
)
if err != nil {
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
@@ -450,7 +450,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
if selectedFields[field.Name] {
continue
}
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &field, keys)
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &field, keys)
if err != nil {
b.stmtBuilder.logger.WarnContext(ctx, "failed to map select field",
"field", field.Name, "error", err)
@@ -465,7 +465,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
// Add order by support using ColumnExpressionFor
orderApplied := false
for _, orderBy := range b.operator.Order {
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &orderBy.Key.TelemetryFieldKey, keys)
if err != nil {
return nil, err
}
@@ -547,6 +547,8 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
for _, gb := range b.operator.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(
ctx,
b.start,
b.end,
&gb.TelemetryFieldKey,
b.stmtBuilder.fm,
b.stmtBuilder.cb,
@@ -572,6 +574,8 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
for i, agg := range b.operator.Aggregations {
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
ctx,
b.start,
b.end,
agg.Expression,
uint64(b.operator.StepInterval.Seconds()),
keys,
@@ -657,6 +661,8 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
for _, gb := range b.operator.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(
ctx,
b.start,
b.end,
&gb.TelemetryFieldKey,
b.stmtBuilder.fm,
b.stmtBuilder.cb,
@@ -684,6 +690,8 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
for i, agg := range b.operator.Aggregations {
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
ctx,
b.start,
b.end,
agg.Expression,
rateInterval,
keys,
@@ -797,6 +805,8 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
for _, gb := range b.operator.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(
ctx,
b.start,
b.end,
&gb.TelemetryFieldKey,
b.stmtBuilder.fm,
b.stmtBuilder.cb,
@@ -822,6 +832,8 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
for i, agg := range b.operator.Aggregations {
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
ctx,
b.start,
b.end,
agg.Expression,
uint64((b.end-b.start)/querybuilder.NsToSeconds),
keys,

View File

@@ -21,11 +21,11 @@ type JsonKeyToFieldFunc func(context.Context, *telemetrytypes.TelemetryFieldKey,
// FieldMapper maps the telemetry field key to the table field name.
type FieldMapper interface {
// FieldFor returns the field name for the given key.
FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error)
FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error)
// ColumnFor returns the column for the given key.
ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error)
// ColumnExpressionFor returns the column expression for the given key.
ColumnExpressionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
ColumnExpressionFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
}
// ConditionBuilder builds the condition for the filter.
@@ -37,8 +37,8 @@ type ConditionBuilder interface {
type AggExprRewriter interface {
// Rewrite rewrites the aggregation expression to be used in the query.
Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
Rewrite(ctx context.Context, startNs, endNs uint64, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
RewriteMulti(ctx context.Context, startNs, endNs uint64, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
}
type Statement struct {

View File

@@ -0,0 +1,20 @@
package telemetrytypes
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/valuer"
)
type KeyEvolutionMetadataKey struct {
BaseColumn string
BaseColumnType string
NewColumn string
NewColumnType string
ReleaseTime time.Time
}
type KeyEvolutionMetadataStore interface {
Get(ctx context.Context, orgId valuer.UUID, keyName string) []*KeyEvolutionMetadataKey
}

View File

@@ -1,82 +0,0 @@
package telemetrytypes
import (
"fmt"
"strings"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
"github.com/SigNoz/signoz/pkg/valuer"
)
type JSONAccessBranchType struct {
valuer.String
}
var (
BranchJSON = JSONAccessBranchType{valuer.NewString("json")}
BranchDynamic = JSONAccessBranchType{valuer.NewString("dynamic")}
)
type JSONAccessPlan = []*JSONAccessNode
type TerminalConfig struct {
Key *TelemetryFieldKey
ElemType JSONDataType
ValueType JSONDataType
}
// Node is now a tree structure representing the complete JSON path traversal
// that precomputes all possible branches and their types
type JSONAccessNode struct {
// Node information
Name string
IsTerminal bool
isRoot bool // marked true for only body_json and body_json_promoted
// Precomputed type information (single source of truth)
AvailableTypes []JSONDataType
// Array type branches (Array(JSON) vs Array(Dynamic))
Branches map[JSONAccessBranchType]*JSONAccessNode
// Terminal configuration
TerminalConfig *TerminalConfig
// Parent reference for traversal
Parent *JSONAccessNode
// JSON progression parameters (precomputed during planning)
MaxDynamicTypes int
MaxDynamicPaths int
}
func NewRootJSONAccessNode(name string, maxDynamicTypes, maxDynamicPaths int) *JSONAccessNode {
return &JSONAccessNode{
Name: name,
isRoot: true,
MaxDynamicTypes: maxDynamicTypes,
MaxDynamicPaths: maxDynamicPaths,
}
}
func (n *JSONAccessNode) Alias() string {
if n.isRoot {
return n.Name
} else if n.Parent == nil {
return fmt.Sprintf("`%s`", n.Name)
}
parentAlias := strings.TrimLeft(n.Parent.Alias(), "`")
parentAlias = strings.TrimRight(parentAlias, "`")
sep := jsontypeexporter.ArraySeparator
if n.Parent.isRoot {
sep = "."
}
return fmt.Sprintf("`%s%s%s`", parentAlias, sep, n.Name)
}
func (n *JSONAccessNode) FieldPath() string {
key := "`" + n.Name + "`"
return n.Parent.Alias() + "." + key
}

View File

@@ -0,0 +1,40 @@
package telemetrytypestest
import (
"context"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// MockKeyEvolutionMetadataStore implements the KeyEvolutionMetadataStore interface for testing purposes
type MockKeyEvolutionMetadataStore struct {
metadata map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey // orgId -> keyName -> metadata
}
// NewMockKeyEvolutionMetadataStore creates a new instance of MockKeyEvolutionMetadataStore with initialized maps
func NewMockKeyEvolutionMetadataStore(metadata map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey) *MockKeyEvolutionMetadataStore {
return &MockKeyEvolutionMetadataStore{
metadata: metadata,
}
}
// Get retrieves all metadata keys for the given key name and orgId.
// Returns an empty slice if the key is not found.
func (m *MockKeyEvolutionMetadataStore) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
if m.metadata == nil {
return nil
}
orgMetadata, orgExists := m.metadata[orgId.String()]
if !orgExists {
return nil
}
keys, exists := orgMetadata[keyName]
if !exists {
return nil
}
// Return a copy to prevent external modification
result := make([]*telemetrytypes.KeyEvolutionMetadataKey, len(keys))
copy(result, keys)
return result
}