mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-27 09:22:12 +00:00
Compare commits
7 Commits
fix/qb-lim
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ae268d867 | ||
|
|
9d78d67461 | ||
|
|
055d0ba90d | ||
|
|
09dc95cfe9 | ||
|
|
d218cd5733 | ||
|
|
f6da9adb86 | ||
|
|
c82f54b548 |
@@ -853,7 +853,7 @@ paths:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
operationId: ListPromotedAndIndexedPaths
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
@@ -883,13 +883,11 @@ paths:
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
operationId: HandlePromoteAndIndexPaths
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
@@ -915,9 +913,7 @@ paths:
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
/api/v1/org/preferences:
|
||||
get:
|
||||
deprecated: false
|
||||
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricMetadata.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricMetadata.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
|
||||
|
||||
export const getMetricMetadata = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get(
|
||||
`/metrics/metadata?metricName=${encodedMetricName}`,
|
||||
{
|
||||
signal,
|
||||
headers,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
@@ -50,13 +50,6 @@
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
&[type='number']::-webkit-inner-spin-button,
|
||||
&[type='number']::-webkit-outer-spin-button {
|
||||
-webkit-appearance: none;
|
||||
-moz-appearance: none;
|
||||
appearance: none;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
|
||||
@@ -560,6 +560,10 @@
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
|
||||
|
||||
.ant-select-selection-item {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -569,6 +573,10 @@
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
|
||||
|
||||
.ant-select-selection-item {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-arrow {
|
||||
|
||||
@@ -169,6 +169,10 @@
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
|
||||
.ant-select-selection-item {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ const ADD_ONS_KEYS = {
|
||||
ORDER_BY: 'order_by',
|
||||
LIMIT: 'limit',
|
||||
LEGEND_FORMAT: 'legend_format',
|
||||
REDUCE_TO: 'reduce_to',
|
||||
};
|
||||
|
||||
const ADD_ONS_KEYS_TO_QUERY_PATH = {
|
||||
@@ -40,13 +41,14 @@ const ADD_ONS_KEYS_TO_QUERY_PATH = {
|
||||
[ADD_ONS_KEYS.ORDER_BY]: 'orderBy',
|
||||
[ADD_ONS_KEYS.LIMIT]: 'limit',
|
||||
[ADD_ONS_KEYS.LEGEND_FORMAT]: 'legend',
|
||||
[ADD_ONS_KEYS.REDUCE_TO]: 'reduceTo',
|
||||
};
|
||||
|
||||
const ADD_ONS = [
|
||||
{
|
||||
icon: <BarChart2 size={14} />,
|
||||
label: 'Group By',
|
||||
key: 'group_by',
|
||||
key: ADD_ONS_KEYS.GROUP_BY,
|
||||
description:
|
||||
'Break down data by attributes like service name, endpoint, status code, or region. Essential for spotting patterns and comparing performance across different segments.',
|
||||
docLink: 'https://signoz.io/docs/userguide/query-builder-v5/#grouping',
|
||||
@@ -54,7 +56,7 @@ const ADD_ONS = [
|
||||
{
|
||||
icon: <ScrollText size={14} />,
|
||||
label: 'Having',
|
||||
key: 'having',
|
||||
key: ADD_ONS_KEYS.HAVING,
|
||||
description:
|
||||
'Filter grouped results based on aggregate conditions. Show only groups meeting specific criteria, like error rates > 5% or p99 latency > 500',
|
||||
docLink:
|
||||
@@ -63,7 +65,7 @@ const ADD_ONS = [
|
||||
{
|
||||
icon: <ScrollText size={14} />,
|
||||
label: 'Order By',
|
||||
key: 'order_by',
|
||||
key: ADD_ONS_KEYS.ORDER_BY,
|
||||
description:
|
||||
'Sort results to surface what matters most. Quickly identify slowest operations, most frequent errors, or highest resource consumers.',
|
||||
docLink:
|
||||
@@ -72,7 +74,7 @@ const ADD_ONS = [
|
||||
{
|
||||
icon: <ScrollText size={14} />,
|
||||
label: 'Limit',
|
||||
key: 'limit',
|
||||
key: ADD_ONS_KEYS.LIMIT,
|
||||
description:
|
||||
'Show only the top/bottom N results. Perfect for focusing on outliers, reducing noise, and improving dashboard performance.',
|
||||
docLink:
|
||||
@@ -81,7 +83,7 @@ const ADD_ONS = [
|
||||
{
|
||||
icon: <ScrollText size={14} />,
|
||||
label: 'Legend format',
|
||||
key: 'legend_format',
|
||||
key: ADD_ONS_KEYS.LEGEND_FORMAT,
|
||||
description:
|
||||
'Customize series labels using variables like {{service.name}}-{{endpoint}}. Makes charts readable at a glance during incident investigation.',
|
||||
docLink:
|
||||
@@ -92,7 +94,7 @@ const ADD_ONS = [
|
||||
const REDUCE_TO = {
|
||||
icon: <ScrollText size={14} />,
|
||||
label: 'Reduce to',
|
||||
key: 'reduce_to',
|
||||
key: ADD_ONS_KEYS.REDUCE_TO,
|
||||
description:
|
||||
'Apply mathematical operations like sum, average, min, max, or percentiles to reduce multiple time series into a single value.',
|
||||
docLink:
|
||||
@@ -218,10 +220,9 @@ function QueryAddOns({
|
||||
);
|
||||
|
||||
const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key));
|
||||
|
||||
// Filter and set selected views: add-ons that are both active and available
|
||||
setSelectedViews(
|
||||
ADD_ONS.filter(
|
||||
filteredAddOns.filter(
|
||||
(addOn) =>
|
||||
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
|
||||
),
|
||||
@@ -375,7 +376,6 @@ function QueryAddOns({
|
||||
<div className="add-on-content" data-testid="limit-content">
|
||||
<InputWithLabel
|
||||
label="Limit"
|
||||
type="number"
|
||||
onChange={handleChangeLimit}
|
||||
initialValue={query?.limit ?? undefined}
|
||||
placeholder="Enter limit"
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||
/* eslint-disable react/display-name */
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { jest } from '@jest/globals';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
import { Having, IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { UseQueryOperations } from 'types/common/operations.types';
|
||||
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
|
||||
|
||||
import { QueryV2 } from '../QueryV2';
|
||||
|
||||
// Local mocks for domain-specific heavy child components
|
||||
jest.mock(
|
||||
'../QueryAggregation/QueryAggregation',
|
||||
() =>
|
||||
function () {
|
||||
return <div>QueryAggregation</div>;
|
||||
},
|
||||
);
|
||||
jest.mock(
|
||||
'../MerticsAggregateSection/MetricsAggregateSection',
|
||||
() =>
|
||||
function () {
|
||||
return <div>MetricsAggregateSection</div>;
|
||||
},
|
||||
);
|
||||
// Mock hooks
|
||||
jest.mock('hooks/queryBuilder/useQueryBuilder');
|
||||
jest.mock('hooks/queryBuilder/useQueryBuilderOperations');
|
||||
|
||||
const mockedUseQueryBuilder = jest.mocked(useQueryBuilder);
|
||||
const mockedUseQueryOperations = jest.mocked(
|
||||
useQueryOperations,
|
||||
) as jest.MockedFunction<UseQueryOperations>;
|
||||
|
||||
describe('QueryV2 - base render', () => {
|
||||
beforeEach(() => {
|
||||
const mockCloneQuery = jest.fn() as jest.MockedFunction<
|
||||
(type: string, q: IBuilderQuery) => void
|
||||
>;
|
||||
|
||||
mockedUseQueryBuilder.mockReturnValue(({
|
||||
// Only fields used by QueryV2
|
||||
cloneQuery: mockCloneQuery,
|
||||
panelType: null,
|
||||
} as unknown) as QueryBuilderContextType);
|
||||
|
||||
mockedUseQueryOperations.mockReturnValue({
|
||||
isTracePanelType: false,
|
||||
isMetricsDataSource: false,
|
||||
operators: [],
|
||||
spaceAggregationOptions: [],
|
||||
listOfAdditionalFilters: [],
|
||||
handleChangeOperator: jest.fn(),
|
||||
handleSpaceAggregationChange: jest.fn(),
|
||||
handleChangeAggregatorAttribute: jest.fn(),
|
||||
handleChangeDataSource: jest.fn(),
|
||||
handleDeleteQuery: jest.fn(),
|
||||
handleChangeQueryData: (jest.fn() as unknown) as ReturnType<UseQueryOperations>['handleChangeQueryData'],
|
||||
handleChangeFormulaData: jest.fn(),
|
||||
handleQueryFunctionsUpdates: jest.fn(),
|
||||
listOfAdditionalFormulaFilters: [],
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders limit input when dataSource is logs', () => {
|
||||
const baseQuery: IBuilderQuery = {
|
||||
queryName: 'A',
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: '',
|
||||
aggregations: [],
|
||||
timeAggregation: '',
|
||||
spaceAggregation: '',
|
||||
temporality: '',
|
||||
functions: [],
|
||||
filter: undefined,
|
||||
filters: { items: [], op: 'AND' },
|
||||
groupBy: [],
|
||||
expression: '',
|
||||
disabled: false,
|
||||
having: [] as Having[],
|
||||
limit: 10,
|
||||
stepInterval: null,
|
||||
orderBy: [],
|
||||
legend: 'A',
|
||||
};
|
||||
|
||||
render(
|
||||
<QueryV2
|
||||
index={0}
|
||||
isAvailableToDisable
|
||||
query={baseQuery}
|
||||
version="v4"
|
||||
onSignalSourceChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
signalSourceChangeEnabled={false}
|
||||
queriesCount={1}
|
||||
showTraceOperator={false}
|
||||
hasTraceOperator={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Ensure the Limit add-on input is present and is of type number
|
||||
const limitInput = screen.getByPlaceholderText(
|
||||
'Enter limit',
|
||||
) as HTMLInputElement;
|
||||
expect(limitInput).toBeInTheDocument();
|
||||
expect(limitInput).toHaveAttribute('type', 'number');
|
||||
expect(limitInput).toHaveAttribute('name', 'limit');
|
||||
expect(limitInput).toHaveAttribute('data-testid', 'input-Limit');
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,12 @@
|
||||
/* eslint-disable */
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
import {
|
||||
fireEvent,
|
||||
render,
|
||||
screen,
|
||||
userEvent,
|
||||
waitFor,
|
||||
within,
|
||||
} from 'tests/test-utils';
|
||||
|
||||
import QueryAddOns from '../QueryV2/QueryAddOns/QueryAddOns';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
@@ -55,16 +61,7 @@ jest.mock('../QueryV2/QueryAddOns/HavingFilter/HavingFilter', () => ({
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/QueryBuilder/filters/ReduceToFilter/ReduceToFilter',
|
||||
() => ({
|
||||
ReduceToFilter: ({ onChange }: any) => (
|
||||
<button data-testid="reduce-to" onClick={() => onChange('sum')}>
|
||||
ReduceToFilter
|
||||
</button>
|
||||
),
|
||||
}),
|
||||
);
|
||||
// ReduceToFilter is not mocked - we test the actual Ant Design Select component
|
||||
|
||||
function baseQuery(overrides: Partial<any> = {}): any {
|
||||
return {
|
||||
@@ -140,7 +137,7 @@ describe('QueryAddOns', () => {
|
||||
expect(screen.getByTestId('order-by-content')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('limit input auto-opens when limit is set and changing it calls handler', () => {
|
||||
it('limit input auto-opens when limit is set and changing it calls handler', async () => {
|
||||
render(
|
||||
<QueryAddOns
|
||||
query={baseQuery({ limit: 5 })}
|
||||
@@ -183,4 +180,88 @@ describe('QueryAddOns', () => {
|
||||
expect(screen.getByTestId('limit-content')).toBeInTheDocument();
|
||||
expect(limitInput.value).toBe('7');
|
||||
});
|
||||
|
||||
it('shows reduce-to add-on when showReduceTo is true', () => {
|
||||
render(
|
||||
<QueryAddOns
|
||||
query={baseQuery()}
|
||||
version="v5"
|
||||
isListViewPanel={false}
|
||||
showReduceTo
|
||||
panelType={PANEL_TYPES.TIME_SERIES}
|
||||
index={0}
|
||||
isForTraceOperator={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('query-add-on-reduce_to')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('auto-opens reduce-to content when reduceTo is set', () => {
|
||||
render(
|
||||
<QueryAddOns
|
||||
query={baseQuery({ reduceTo: 'sum' })}
|
||||
version="v5"
|
||||
isListViewPanel={false}
|
||||
showReduceTo
|
||||
panelType={PANEL_TYPES.TIME_SERIES}
|
||||
index={0}
|
||||
isForTraceOperator={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls handleSetQueryData when reduce-to value changes', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const query = baseQuery({
|
||||
reduceTo: 'avg',
|
||||
aggregations: [{ id: 'a', operator: 'count', reduceTo: 'avg' }],
|
||||
});
|
||||
render(
|
||||
<QueryAddOns
|
||||
query={query}
|
||||
version="v5"
|
||||
isListViewPanel={false}
|
||||
showReduceTo
|
||||
panelType={PANEL_TYPES.TIME_SERIES}
|
||||
index={0}
|
||||
isForTraceOperator={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for the reduce-to content section to be visible (it auto-opens when reduceTo is set)
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Get the Select component by its role (combobox)
|
||||
// The Select is within the reduce-to-content section
|
||||
const reduceToContent = screen.getByTestId('reduce-to-content');
|
||||
const selectCombobox = within(reduceToContent).getByRole('combobox');
|
||||
|
||||
// Open the dropdown by clicking on the combobox
|
||||
await user.click(selectCombobox);
|
||||
|
||||
// Wait for the dropdown listbox to appear
|
||||
await screen.findByRole('listbox');
|
||||
|
||||
// Find and click the "Sum" option
|
||||
const sumOption = await screen.findByText('Sum of values in timeframe');
|
||||
await user.click(sumOption);
|
||||
|
||||
// Verify the handler was called with the correct value
|
||||
await waitFor(() => {
|
||||
expect(mockHandleSetQueryData).toHaveBeenCalledWith(0, {
|
||||
...query,
|
||||
aggregations: [
|
||||
{
|
||||
...(query.aggregations?.[0] as any),
|
||||
reduceTo: 'sum',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -55,6 +55,7 @@ export const REACT_QUERY_KEY = {
|
||||
GET_METRIC_DETAILS: 'GET_METRIC_DETAILS',
|
||||
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
|
||||
GET_INSPECT_METRICS_DETAILS: 'GET_INSPECT_METRICS_DETAILS',
|
||||
GET_METRIC_METADATA: 'GET_METRIC_METADATA',
|
||||
|
||||
// Traces Funnels Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
|
||||
@@ -120,7 +120,6 @@ function FullView({
|
||||
originalGraphType: selectedPanelType,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
return {
|
||||
query: updatedQuery,
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
|
||||
@@ -137,7 +137,6 @@ function GridCardGraph({
|
||||
originalGraphType: widget.panelTypes,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
const initialDataSource = updatedQuery.builder.queryData[0].dataSource;
|
||||
return {
|
||||
query: updatedQuery,
|
||||
|
||||
@@ -58,6 +58,27 @@
|
||||
.explore-content {
|
||||
padding: 0 8px;
|
||||
|
||||
.y-axis-unit-selector-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
|
||||
.save-unit-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
|
||||
.ant-btn {
|
||||
border-radius: 2px;
|
||||
.ant-typography {
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-space {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 20px;
|
||||
@@ -75,6 +96,14 @@
|
||||
.time-series-view {
|
||||
min-width: 100%;
|
||||
width: 100%;
|
||||
position: relative;
|
||||
|
||||
.no-unit-warning {
|
||||
position: absolute;
|
||||
top: 30px;
|
||||
right: 40px;
|
||||
z-index: 1000;
|
||||
}
|
||||
}
|
||||
|
||||
.time-series-container {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import './Explorer.styles.scss';
|
||||
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Switch } from 'antd';
|
||||
import { Switch, Tooltip } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
|
||||
import WarningPopover from 'components/WarningPopover/WarningPopover';
|
||||
@@ -25,10 +25,14 @@ import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToD
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
// import QuerySection from './QuerySection';
|
||||
import MetricDetails from '../MetricDetails/MetricDetails';
|
||||
import TimeSeries from './TimeSeries';
|
||||
import { ExplorerTabs } from './types';
|
||||
import { splitQueryIntoOneChartPerQuery } from './utils';
|
||||
import {
|
||||
getMetricUnits,
|
||||
splitQueryIntoOneChartPerQuery,
|
||||
useGetMetrics,
|
||||
} from './utils';
|
||||
|
||||
const ONE_CHART_PER_QUERY_ENABLED_KEY = 'isOneChartPerQueryEnabled';
|
||||
|
||||
@@ -40,6 +44,34 @@ function Explorer(): JSX.Element {
|
||||
currentQuery,
|
||||
} = useQueryBuilder();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(false);
|
||||
|
||||
const metricNames = useMemo(() => {
|
||||
const currentMetricNames: string[] = [];
|
||||
stagedQuery?.builder.queryData.forEach((query) => {
|
||||
if (query.aggregateAttribute?.key) {
|
||||
currentMetricNames.push(query.aggregateAttribute?.key);
|
||||
}
|
||||
});
|
||||
return currentMetricNames;
|
||||
}, [stagedQuery]);
|
||||
|
||||
const {
|
||||
metrics,
|
||||
isLoading: isMetricUnitsLoading,
|
||||
isError: isMetricUnitsError,
|
||||
} = useGetMetrics(metricNames);
|
||||
|
||||
const units = useMemo(() => getMetricUnits(metrics), [metrics]);
|
||||
|
||||
const areAllMetricUnitsSame = useMemo(
|
||||
() =>
|
||||
!isMetricUnitsLoading &&
|
||||
!isMetricUnitsError &&
|
||||
units.length > 0 &&
|
||||
units.every((unit) => unit && unit === units[0]),
|
||||
[units, isMetricUnitsLoading, isMetricUnitsError],
|
||||
);
|
||||
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const isOneChartPerQueryEnabled =
|
||||
@@ -48,7 +80,66 @@ function Explorer(): JSX.Element {
|
||||
const [showOneChartPerQuery, toggleShowOneChartPerQuery] = useState(
|
||||
isOneChartPerQueryEnabled,
|
||||
);
|
||||
const [disableOneChartPerQuery, toggleDisableOneChartPerQuery] = useState(
|
||||
false,
|
||||
);
|
||||
const [selectedTab] = useState<ExplorerTabs>(ExplorerTabs.TIME_SERIES);
|
||||
const [yAxisUnit, setYAxisUnit] = useState<string | undefined>();
|
||||
|
||||
const unitsLength = useMemo(() => units.length, [units]);
|
||||
const firstUnit = useMemo(() => units?.[0], [units]);
|
||||
|
||||
useEffect(() => {
|
||||
// Set the y axis unit to the first metric unit if
|
||||
// 1. There is one metric unit and it is not empty
|
||||
// 2. All metric units are the same and not empty
|
||||
// Else, set the y axis unit to empty if
|
||||
// 1. There are more than one metric units and they are not the same
|
||||
// 2. There are no metric units
|
||||
// 3. There is exactly one metric unit but it is empty/undefined
|
||||
if (unitsLength === 0) {
|
||||
setYAxisUnit(undefined);
|
||||
} else if (unitsLength === 1 && firstUnit) {
|
||||
setYAxisUnit(firstUnit);
|
||||
} else if (unitsLength === 1 && !firstUnit) {
|
||||
setYAxisUnit(undefined);
|
||||
} else if (areAllMetricUnitsSame) {
|
||||
if (firstUnit) {
|
||||
setYAxisUnit(firstUnit);
|
||||
} else {
|
||||
setYAxisUnit(undefined);
|
||||
}
|
||||
} else if (unitsLength > 1 && !areAllMetricUnitsSame) {
|
||||
setYAxisUnit(undefined);
|
||||
}
|
||||
}, [unitsLength, firstUnit, areAllMetricUnitsSame]);
|
||||
|
||||
useEffect(() => {
|
||||
// Don't apply logic during loading to avoid overwriting user preferences
|
||||
if (isMetricUnitsLoading) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Disable one chart per query if -
|
||||
// 1. There are more than one metric
|
||||
// 2. The metric units are not the same
|
||||
if (units.length > 1 && !areAllMetricUnitsSame) {
|
||||
toggleShowOneChartPerQuery(true);
|
||||
toggleDisableOneChartPerQuery(true);
|
||||
} else if (units.length <= 1) {
|
||||
toggleShowOneChartPerQuery(false);
|
||||
toggleDisableOneChartPerQuery(true);
|
||||
} else {
|
||||
// When units are the same and loading is complete, restore URL-based preference
|
||||
toggleShowOneChartPerQuery(isOneChartPerQueryEnabled);
|
||||
toggleDisableOneChartPerQuery(false);
|
||||
}
|
||||
}, [
|
||||
units,
|
||||
areAllMetricUnitsSame,
|
||||
isMetricUnitsLoading,
|
||||
isOneChartPerQueryEnabled,
|
||||
]);
|
||||
|
||||
const handleToggleShowOneChartPerQuery = (): void => {
|
||||
toggleShowOneChartPerQuery(!showOneChartPerQuery);
|
||||
@@ -68,15 +159,20 @@ function Explorer(): JSX.Element {
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const exportDefaultQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
currentQuery || initialQueriesMap[DataSource.METRICS],
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
DataSource.METRICS,
|
||||
),
|
||||
[currentQuery, updateAllQueriesOperators],
|
||||
);
|
||||
const exportDefaultQuery = useMemo(() => {
|
||||
const query = updateAllQueriesOperators(
|
||||
currentQuery || initialQueriesMap[DataSource.METRICS],
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
DataSource.METRICS,
|
||||
);
|
||||
if (yAxisUnit && !query.unit) {
|
||||
return {
|
||||
...query,
|
||||
unit: yAxisUnit,
|
||||
};
|
||||
}
|
||||
return query;
|
||||
}, [currentQuery, updateAllQueriesOperators, yAxisUnit]);
|
||||
|
||||
useShareBuilderUrl({ defaultValue: defaultQuery });
|
||||
|
||||
@@ -90,8 +186,16 @@ function Explorer(): JSX.Element {
|
||||
|
||||
const widgetId = uuid();
|
||||
|
||||
let query = queryToExport || exportDefaultQuery;
|
||||
if (yAxisUnit && !query.unit) {
|
||||
query = {
|
||||
...query,
|
||||
unit: yAxisUnit,
|
||||
};
|
||||
}
|
||||
|
||||
const dashboardEditView = generateExportToDashboardLink({
|
||||
query: queryToExport || exportDefaultQuery,
|
||||
query,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
dashboardId: dashboard.id,
|
||||
widgetId,
|
||||
@@ -99,17 +203,33 @@ function Explorer(): JSX.Element {
|
||||
|
||||
safeNavigate(dashboardEditView);
|
||||
},
|
||||
[exportDefaultQuery, safeNavigate],
|
||||
[exportDefaultQuery, safeNavigate, yAxisUnit],
|
||||
);
|
||||
|
||||
const splitedQueries = useMemo(
|
||||
() =>
|
||||
splitQueryIntoOneChartPerQuery(
|
||||
stagedQuery || initialQueriesMap[DataSource.METRICS],
|
||||
metricNames,
|
||||
units,
|
||||
),
|
||||
[stagedQuery],
|
||||
[stagedQuery, metricNames, units],
|
||||
);
|
||||
|
||||
const [selectedMetricName, setSelectedMetricName] = useState<string | null>(
|
||||
null,
|
||||
);
|
||||
|
||||
const handleOpenMetricDetails = (metricName: string): void => {
|
||||
setIsMetricDetailsOpen(true);
|
||||
setSelectedMetricName(metricName);
|
||||
};
|
||||
|
||||
const handleCloseMetricDetails = (): void => {
|
||||
setIsMetricDetailsOpen(false);
|
||||
setSelectedMetricName(null);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.TabChanged, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'explorer',
|
||||
@@ -123,17 +243,44 @@ function Explorer(): JSX.Element {
|
||||
|
||||
const [warning, setWarning] = useState<Warning | undefined>(undefined);
|
||||
|
||||
const oneChartPerQueryDisabledTooltip = useMemo(() => {
|
||||
if (splitedQueries.length <= 1) {
|
||||
return 'One chart per query cannot be toggled for a single query.';
|
||||
}
|
||||
if (units.length <= 1) {
|
||||
return 'One chart per query cannot be toggled when there is only one metric.';
|
||||
}
|
||||
if (disableOneChartPerQuery) {
|
||||
return 'One chart per query cannot be disabled for multiple queries with different units.';
|
||||
}
|
||||
return undefined;
|
||||
}, [disableOneChartPerQuery, splitedQueries.length, units.length]);
|
||||
|
||||
// Show the y axis unit selector if -
|
||||
// 1. There is only one metric
|
||||
// 2. The metric has no saved unit
|
||||
const showYAxisUnitSelector = useMemo(
|
||||
() => !isMetricUnitsLoading && units.length === 1 && !units[0],
|
||||
[units, isMetricUnitsLoading],
|
||||
);
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className="metrics-explorer-explore-container">
|
||||
<div className="explore-header">
|
||||
<div className="explore-header-left-actions">
|
||||
<span>1 chart/query</span>
|
||||
<Switch
|
||||
checked={showOneChartPerQuery}
|
||||
onChange={handleToggleShowOneChartPerQuery}
|
||||
size="small"
|
||||
/>
|
||||
<Tooltip
|
||||
open={disableOneChartPerQuery ? undefined : false}
|
||||
title={oneChartPerQueryDisabledTooltip}
|
||||
>
|
||||
<Switch
|
||||
checked={showOneChartPerQuery}
|
||||
onChange={handleToggleShowOneChartPerQuery}
|
||||
disabled={disableOneChartPerQuery || splitedQueries.length <= 1}
|
||||
size="small"
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="explore-header-right-actions">
|
||||
{!isEmpty(warning) && <WarningPopover warningData={warning} />}
|
||||
@@ -174,6 +321,16 @@ function Explorer(): JSX.Element {
|
||||
<TimeSeries
|
||||
showOneChartPerQuery={showOneChartPerQuery}
|
||||
setWarning={setWarning}
|
||||
areAllMetricUnitsSame={areAllMetricUnitsSame}
|
||||
isMetricUnitsLoading={isMetricUnitsLoading}
|
||||
isMetricUnitsError={isMetricUnitsError}
|
||||
metricUnits={units}
|
||||
metricNames={metricNames}
|
||||
metrics={metrics}
|
||||
handleOpenMetricDetails={handleOpenMetricDetails}
|
||||
yAxisUnit={yAxisUnit}
|
||||
setYAxisUnit={setYAxisUnit}
|
||||
showYAxisUnitSelector={showYAxisUnitSelector}
|
||||
/>
|
||||
)}
|
||||
{/* TODO: Enable once we have resolved all related metrics issues */}
|
||||
@@ -187,9 +344,17 @@ function Explorer(): JSX.Element {
|
||||
query={exportDefaultQuery}
|
||||
sourcepage={DataSource.METRICS}
|
||||
onExport={handleExport}
|
||||
isOneChartPerQuery={false}
|
||||
isOneChartPerQuery={showOneChartPerQuery}
|
||||
splitedQueries={splitedQueries}
|
||||
/>
|
||||
{isMetricDetailsOpen && (
|
||||
<MetricDetails
|
||||
metricName={selectedMetricName}
|
||||
isOpen={isMetricDetailsOpen}
|
||||
onClose={handleCloseMetricDetails}
|
||||
isModalTimeSelection={false}
|
||||
/>
|
||||
)}
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { isAxiosError } from 'axios';
|
||||
import classNames from 'classnames';
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters/BuilderUnitsFilter/BuilderUnits';
|
||||
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
|
||||
import { convertDataValueToMs } from 'container/TimeSeriesView/utils';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { AlertTriangle } from 'lucide-react';
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -24,6 +28,13 @@ import { splitQueryIntoOneChartPerQuery } from './utils';
|
||||
function TimeSeries({
|
||||
showOneChartPerQuery,
|
||||
setWarning,
|
||||
isMetricUnitsLoading,
|
||||
metricUnits,
|
||||
metricNames,
|
||||
handleOpenMetricDetails,
|
||||
yAxisUnit,
|
||||
setYAxisUnit,
|
||||
showYAxisUnitSelector,
|
||||
}: TimeSeriesProps): JSX.Element {
|
||||
const { stagedQuery, currentQuery } = useQueryBuilder();
|
||||
|
||||
@@ -56,13 +67,14 @@ function TimeSeries({
|
||||
showOneChartPerQuery
|
||||
? splitQueryIntoOneChartPerQuery(
|
||||
stagedQuery || initialQueriesMap[DataSource.METRICS],
|
||||
metricNames,
|
||||
metricUnits,
|
||||
)
|
||||
: [stagedQuery || initialQueriesMap[DataSource.METRICS]],
|
||||
[showOneChartPerQuery, stagedQuery],
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[showOneChartPerQuery, stagedQuery, JSON.stringify(metricUnits)],
|
||||
);
|
||||
|
||||
const [yAxisUnit, setYAxisUnit] = useState<string>('');
|
||||
|
||||
const queries = useQueries(
|
||||
queryPayloads.map((payload, index) => ({
|
||||
queryKey: [
|
||||
@@ -126,32 +138,148 @@ function TimeSeries({
|
||||
setYAxisUnit(value);
|
||||
};
|
||||
|
||||
// TODO: Enable once we have resolved all related metrics v2 api issues
|
||||
// Show the save unit button if
|
||||
// 1. There is only one metric
|
||||
// 2. The metric has no saved unit
|
||||
// 3. The user has selected a unit
|
||||
// const showSaveUnitButton = useMemo(
|
||||
// () =>
|
||||
// metricUnits.length === 1 &&
|
||||
// Boolean(metrics?.[0]) &&
|
||||
// !metricUnits[0] &&
|
||||
// yAxisUnit,
|
||||
// [metricUnits, metrics, yAxisUnit],
|
||||
// );
|
||||
|
||||
// const {
|
||||
// mutate: updateMetricMetadata,
|
||||
// isLoading: isUpdatingMetricMetadata,
|
||||
// } = useUpdateMetricMetadata();
|
||||
|
||||
// const handleSaveUnit = (): void => {
|
||||
// updateMetricMetadata(
|
||||
// {
|
||||
// metricName: metricNames[0],
|
||||
// payload: {
|
||||
// unit: yAxisUnit,
|
||||
// description: metrics[0]?.description ?? '',
|
||||
// metricType: metrics[0]?.type as MetricType,
|
||||
// temporality: metrics[0]?.temporality,
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// onSuccess: () => {
|
||||
// notifications.success({
|
||||
// message: 'Unit saved successfully',
|
||||
// });
|
||||
// queryClient.invalidateQueries([
|
||||
// REACT_QUERY_KEY.GET_METRIC_DETAILS,
|
||||
// metricNames[0],
|
||||
// ]);
|
||||
// },
|
||||
// onError: () => {
|
||||
// notifications.error({
|
||||
// message: 'Failed to save unit',
|
||||
// });
|
||||
// },
|
||||
// },
|
||||
// );
|
||||
// };
|
||||
|
||||
return (
|
||||
<>
|
||||
<BuilderUnitsFilter onChange={onUnitChangeHandler} yAxisUnit={yAxisUnit} />
|
||||
<div className="y-axis-unit-selector-container">
|
||||
{showYAxisUnitSelector && (
|
||||
<>
|
||||
<YAxisUnitSelector
|
||||
onChange={onUnitChangeHandler}
|
||||
value={yAxisUnit}
|
||||
source={YAxisSource.EXPLORER}
|
||||
data-testid="y-axis-unit-selector"
|
||||
/>
|
||||
{/* TODO: Enable once we have resolved all related metrics v2 api issues */}
|
||||
{/* {showSaveUnitButton && (
|
||||
<div className="save-unit-container">
|
||||
<Typography.Text>
|
||||
Save the selected unit for this metric?
|
||||
</Typography.Text>
|
||||
<Button
|
||||
type="primary"
|
||||
size="small"
|
||||
disabled={isUpdatingMetricMetadata}
|
||||
onClick={handleSaveUnit}
|
||||
>
|
||||
<Typography.Paragraph>Yes</Typography.Paragraph>
|
||||
</Button>
|
||||
</div>
|
||||
)} */}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<div
|
||||
className={classNames({
|
||||
'time-series-container': changeLayoutForOneChartPerQuery,
|
||||
})}
|
||||
>
|
||||
{responseData.map((datapoint, index) => (
|
||||
<div
|
||||
className="time-series-view"
|
||||
// eslint-disable-next-line react/no-array-index-key
|
||||
key={index}
|
||||
>
|
||||
<TimeSeriesView
|
||||
isFilterApplied={false}
|
||||
isError={queries[index].isError}
|
||||
isLoading={queries[index].isLoading}
|
||||
data={datapoint}
|
||||
yAxisUnit={yAxisUnit}
|
||||
dataSource={DataSource.METRICS}
|
||||
error={queries[index].error as APIError}
|
||||
setWarning={setWarning}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
{responseData.map((datapoint, index) => {
|
||||
const isQueryDataItem = index < metricNames.length;
|
||||
const metricName = isQueryDataItem ? metricNames[index] : undefined;
|
||||
const metricUnit = isQueryDataItem ? metricUnits[index] : undefined;
|
||||
|
||||
// Show the no unit warning if -
|
||||
// 1. The metric query is not loading
|
||||
// 2. The metric units are not loading
|
||||
// 3. There are more than one metric
|
||||
// 4. The current metric unit is empty
|
||||
// 5. Is a queryData item
|
||||
const isMetricUnitEmpty =
|
||||
isQueryDataItem &&
|
||||
!queries[index].isLoading &&
|
||||
!isMetricUnitsLoading &&
|
||||
metricUnits.length > 1 &&
|
||||
!metricUnit &&
|
||||
metricName;
|
||||
|
||||
const currentYAxisUnit = yAxisUnit || metricUnit;
|
||||
|
||||
return (
|
||||
<div
|
||||
className="time-series-view"
|
||||
// eslint-disable-next-line react/no-array-index-key
|
||||
key={index}
|
||||
>
|
||||
{isMetricUnitEmpty && metricName && (
|
||||
<Tooltip
|
||||
className="no-unit-warning"
|
||||
title={
|
||||
<Typography.Text>
|
||||
This metric does not have a unit. Please set one for it in the{' '}
|
||||
<Typography.Link
|
||||
onClick={(): void => handleOpenMetricDetails(metricName)}
|
||||
>
|
||||
metric details
|
||||
</Typography.Link>{' '}
|
||||
page.
|
||||
</Typography.Text>
|
||||
}
|
||||
>
|
||||
<AlertTriangle size={16} color={Color.BG_AMBER_400} />
|
||||
</Tooltip>
|
||||
)}
|
||||
<TimeSeriesView
|
||||
isFilterApplied={false}
|
||||
isError={queries[index].isError}
|
||||
isLoading={queries[index].isLoading || isMetricUnitsLoading}
|
||||
data={datapoint}
|
||||
yAxisUnit={currentYAxisUnit}
|
||||
dataSource={DataSource.METRICS}
|
||||
error={queries[index].error as APIError}
|
||||
setWarning={setWarning}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import * as useOptionsMenuHooks from 'container/OptionsMenu';
|
||||
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
|
||||
@@ -12,13 +14,18 @@ import { MemoryRouter } from 'react-router-dom';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import store from 'store';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
|
||||
|
||||
import Explorer from '../Explorer';
|
||||
import * as useGetMetricsHooks from '../utils';
|
||||
|
||||
const mockSetSearchParams = jest.fn();
|
||||
const queryClient = new QueryClient();
|
||||
const mockUpdateAllQueriesOperators = jest.fn();
|
||||
const mockUpdateAllQueriesOperators = jest
|
||||
.fn()
|
||||
.mockReturnValue(initialQueriesMap[DataSource.METRICS]);
|
||||
const mockUseQueryBuilderData = {
|
||||
handleRunQuery: jest.fn(),
|
||||
stagedQuery: initialQueriesMap[DataSource.METRICS],
|
||||
@@ -126,6 +133,30 @@ jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
|
||||
...mockUseQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const Y_AXIS_UNIT_SELECTOR_TEST_ID = 'y-axis-unit-selector';
|
||||
|
||||
const mockMetric: MetricMetadata = {
|
||||
type: MetricType.SUM,
|
||||
description: 'metric1 description',
|
||||
unit: 'metric1 unit',
|
||||
temporality: Temporality.CUMULATIVE,
|
||||
isMonotonic: true,
|
||||
};
|
||||
|
||||
function renderExplorer(): void {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<ErrorModalProvider>
|
||||
<Explorer />
|
||||
</ErrorModalProvider>
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
}
|
||||
|
||||
describe('Explorer', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
@@ -142,17 +173,7 @@ describe('Explorer', () => {
|
||||
mockSetSearchParams,
|
||||
]);
|
||||
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<ErrorModalProvider>
|
||||
<Explorer />
|
||||
</ErrorModalProvider>
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
renderExplorer();
|
||||
|
||||
expect(mockUpdateAllQueriesOperators).toHaveBeenCalledWith(
|
||||
initialQueriesMap[DataSource.METRICS],
|
||||
@@ -166,18 +187,13 @@ describe('Explorer', () => {
|
||||
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
|
||||
mockSetSearchParams,
|
||||
]);
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [mockMetric, mockMetric],
|
||||
});
|
||||
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<ErrorModalProvider>
|
||||
<Explorer />
|
||||
</ErrorModalProvider>
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
renderExplorer();
|
||||
|
||||
const toggle = screen.getByRole('switch');
|
||||
expect(toggle).toBeChecked();
|
||||
@@ -188,20 +204,132 @@ describe('Explorer', () => {
|
||||
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
|
||||
mockSetSearchParams,
|
||||
]);
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [mockMetric, mockMetric],
|
||||
});
|
||||
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<ErrorModalProvider>
|
||||
<Explorer />
|
||||
</ErrorModalProvider>
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
renderExplorer();
|
||||
|
||||
const toggle = screen.getByRole('switch');
|
||||
expect(toggle).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('should not render y axis unit selector for single metric which has a unit', () => {
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [mockMetric],
|
||||
});
|
||||
|
||||
renderExplorer();
|
||||
|
||||
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
|
||||
expect(yAxisUnitSelector).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not render y axis unit selector for mutliple metrics with same unit', () => {
|
||||
(useSearchParams as jest.Mock).mockReturnValueOnce([
|
||||
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
|
||||
mockSetSearchParams,
|
||||
]);
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [mockMetric, mockMetric],
|
||||
});
|
||||
|
||||
renderExplorer();
|
||||
|
||||
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
|
||||
expect(yAxisUnitSelector).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should hide y axis unit selector for multiple metrics with different units', () => {
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [mockMetric, mockMetric],
|
||||
});
|
||||
|
||||
renderExplorer();
|
||||
|
||||
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
|
||||
expect(yAxisUnitSelector).not.toBeInTheDocument();
|
||||
|
||||
// One chart per query toggle should be disabled
|
||||
const oneChartPerQueryToggle = screen.getByRole('switch');
|
||||
expect(oneChartPerQueryToggle).toBeDisabled();
|
||||
});
|
||||
|
||||
it('should render empty y axis unit selector for a single metric with no unit', () => {
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [
|
||||
{
|
||||
type: MetricType.SUM,
|
||||
description: 'metric1 description',
|
||||
unit: '',
|
||||
temporality: Temporality.CUMULATIVE,
|
||||
isMonotonic: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
renderExplorer();
|
||||
|
||||
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
|
||||
expect(yAxisUnitSelector).toBeInTheDocument();
|
||||
expect(yAxisUnitSelector).toHaveTextContent('Please select a unit');
|
||||
});
|
||||
|
||||
it('one chart per query should be off and disabled when there is only one query', () => {
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [mockMetric],
|
||||
});
|
||||
|
||||
renderExplorer();
|
||||
|
||||
const oneChartPerQueryToggle = screen.getByRole('switch');
|
||||
expect(oneChartPerQueryToggle).not.toBeChecked();
|
||||
expect(oneChartPerQueryToggle).toBeDisabled();
|
||||
});
|
||||
|
||||
it('one chart per query should enabled by default when there are multiple metrics with the same unit', () => {
|
||||
const mockQueryData = {
|
||||
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
|
||||
aggregateAttribute: {
|
||||
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
|
||||
.aggregateAttribute as BaseAutocompleteData),
|
||||
key: 'metric1',
|
||||
},
|
||||
};
|
||||
const mockStagedQueryWithMultipleQueries = {
|
||||
...initialQueriesMap[DataSource.METRICS],
|
||||
builder: {
|
||||
...initialQueriesMap[DataSource.METRICS].builder,
|
||||
queryData: [mockQueryData, mockQueryData],
|
||||
},
|
||||
};
|
||||
|
||||
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
|
||||
...mockUseQueryBuilderData,
|
||||
stagedQuery: mockStagedQueryWithMultipleQueries,
|
||||
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
|
||||
|
||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
metrics: [mockMetric, mockMetric],
|
||||
});
|
||||
|
||||
renderExplorer();
|
||||
|
||||
const oneChartPerQueryToggle = screen.getByRole('switch');
|
||||
expect(oneChartPerQueryToggle).toBeEnabled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,180 @@
|
||||
import { render, RenderResult, screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { UpdateMetricMetadataResponse } from 'api/metricsExplorer/updateMetricMetadata';
|
||||
import * as useUpdateMetricMetadataHooks from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import { UseUpdateMetricMetadataProps } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import { UseMutationResult } from 'react-query';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
|
||||
|
||||
import TimeSeries from '../TimeSeries';
|
||||
import { TimeSeriesProps } from '../types';
|
||||
|
||||
type MockUpdateMetricMetadata = UseMutationResult<
|
||||
SuccessResponse<UpdateMetricMetadataResponse> | ErrorResponse,
|
||||
Error,
|
||||
UseUpdateMetricMetadataProps
|
||||
>;
|
||||
const mockUpdateMetricMetadata = jest.fn();
|
||||
jest
|
||||
.spyOn(useUpdateMetricMetadataHooks, 'useUpdateMetricMetadata')
|
||||
.mockReturnValue(({
|
||||
mutate: mockUpdateMetricMetadata,
|
||||
isLoading: false,
|
||||
} as Partial<MockUpdateMetricMetadata>) as MockUpdateMetricMetadata);
|
||||
|
||||
jest.mock('container/TimeSeriesView/TimeSeriesView', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockReturnValue(
|
||||
<div role="img" aria-label="warning">
|
||||
TimeSeriesView
|
||||
</div>,
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('react-query', () => ({
|
||||
...jest.requireActual('react-query'),
|
||||
useQueryClient: jest.fn().mockReturnValue({
|
||||
invalidateQueries: jest.fn(),
|
||||
}),
|
||||
useQueries: jest.fn().mockImplementation((queries: any[]) =>
|
||||
queries.map(() => ({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: undefined,
|
||||
})),
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn().mockReturnValue({
|
||||
globalTime: {
|
||||
selectedTime: '5min',
|
||||
maxTime: 1713738000000,
|
||||
minTime: 1713734400000,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockMetric: MetricMetadata = {
|
||||
type: MetricType.SUM,
|
||||
description: 'metric1 description',
|
||||
unit: 'metric1 unit',
|
||||
temporality: Temporality.CUMULATIVE,
|
||||
isMonotonic: true,
|
||||
};
|
||||
|
||||
const mockSetWarning = jest.fn();
|
||||
const mockSetIsMetricDetailsOpen = jest.fn();
|
||||
const mockSetYAxisUnit = jest.fn();
|
||||
|
||||
function renderTimeSeries(
|
||||
overrides: Partial<TimeSeriesProps> = {},
|
||||
): RenderResult {
|
||||
return render(
|
||||
<TimeSeries
|
||||
showOneChartPerQuery={false}
|
||||
setWarning={mockSetWarning}
|
||||
areAllMetricUnitsSame={false}
|
||||
isMetricUnitsLoading={false}
|
||||
metricUnits={[]}
|
||||
metricNames={[]}
|
||||
metrics={[]}
|
||||
isMetricUnitsError={false}
|
||||
handleOpenMetricDetails={mockSetIsMetricDetailsOpen}
|
||||
yAxisUnit="count"
|
||||
setYAxisUnit={mockSetYAxisUnit}
|
||||
showYAxisUnitSelector={false}
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...overrides}
|
||||
/>,
|
||||
);
|
||||
}
|
||||
|
||||
describe('TimeSeries', () => {
|
||||
it('should render a warning icon when a metric has no unit among multiple metrics', () => {
|
||||
const user = userEvent.setup();
|
||||
const { container } = renderTimeSeries({
|
||||
metricUnits: ['', 'count'],
|
||||
metricNames: ['metric1', 'metric2'],
|
||||
metrics: [undefined, undefined],
|
||||
});
|
||||
|
||||
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
|
||||
user.hover(alertIcon);
|
||||
waitFor(() =>
|
||||
expect(
|
||||
screen.findByText('This metric does not have a unit'),
|
||||
).toBeInTheDocument(),
|
||||
);
|
||||
});
|
||||
|
||||
it('clicking on warning icon tooltip should open metric details modal', async () => {
|
||||
const user = userEvent.setup();
|
||||
const { container } = renderTimeSeries({
|
||||
metricUnits: ['', 'count'],
|
||||
metricNames: ['metric1', 'metric2'],
|
||||
metrics: [mockMetric, mockMetric],
|
||||
yAxisUnit: 'seconds',
|
||||
});
|
||||
|
||||
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
|
||||
user.hover(alertIcon);
|
||||
|
||||
const metricDetailsLink = await screen.findByText('metric details');
|
||||
user.click(metricDetailsLink);
|
||||
|
||||
waitFor(() =>
|
||||
expect(mockSetIsMetricDetailsOpen).toHaveBeenCalledWith('metric1'),
|
||||
);
|
||||
});
|
||||
|
||||
// TODO: Unskip this test once the save unit button is implemented
|
||||
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
|
||||
it.skip('shows Save unit button when metric had no unit but one is selected', () => {
|
||||
const { findByText, getByRole } = renderTimeSeries({
|
||||
metricUnits: [undefined],
|
||||
metricNames: ['metric1'],
|
||||
metrics: [mockMetric],
|
||||
yAxisUnit: 'seconds',
|
||||
});
|
||||
|
||||
expect(
|
||||
findByText('Save the selected unit for this metric?'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
const yesButton = getByRole('button', { name: 'Yes' });
|
||||
expect(yesButton).toBeInTheDocument();
|
||||
expect(yesButton).toBeEnabled();
|
||||
});
|
||||
|
||||
// TODO: Unskip this test once the save unit button is implemented
|
||||
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
|
||||
it.skip('clicking on save unit button shoould upated metric metadata', () => {
|
||||
const user = userEvent.setup();
|
||||
const { getByRole } = renderTimeSeries({
|
||||
metricUnits: [''],
|
||||
metricNames: ['metric1'],
|
||||
metrics: [mockMetric],
|
||||
yAxisUnit: 'seconds',
|
||||
});
|
||||
|
||||
const yesButton = getByRole('button', { name: /Yes/i });
|
||||
user.click(yesButton);
|
||||
|
||||
expect(mockUpdateMetricMetadata).toHaveBeenCalledWith(
|
||||
{
|
||||
metricName: 'metric1',
|
||||
payload: expect.objectContaining({ unit: 'seconds' }),
|
||||
},
|
||||
expect.objectContaining({
|
||||
onSuccess: expect.any(Function),
|
||||
onError: expect.any(Function),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,161 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import * as useGetMultipleMetricsHook from 'hooks/metricsExplorer/useGetMultipleMetrics';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
MetricMetadata,
|
||||
MetricMetadataResponse,
|
||||
} from 'types/api/metricsExplorer/v2/getMetricMetadata';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
Query,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import {
|
||||
getMetricUnits,
|
||||
splitQueryIntoOneChartPerQuery,
|
||||
useGetMetrics,
|
||||
} from '../utils';
|
||||
|
||||
const MOCK_QUERY_DATA_1: IBuilderQuery = {
|
||||
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
|
||||
aggregateAttribute: {
|
||||
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
|
||||
.aggregateAttribute as BaseAutocompleteData),
|
||||
key: 'metric1',
|
||||
},
|
||||
};
|
||||
|
||||
const MOCK_QUERY_DATA_2: IBuilderQuery = {
|
||||
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
|
||||
aggregateAttribute: {
|
||||
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
|
||||
.aggregateAttribute as BaseAutocompleteData),
|
||||
key: 'metric2',
|
||||
},
|
||||
};
|
||||
const MOCK_FORMULA_DATA: IBuilderFormula = {
|
||||
expression: '1 + 1',
|
||||
disabled: false,
|
||||
queryName: 'Mock Formula',
|
||||
legend: 'Mock Legend',
|
||||
};
|
||||
|
||||
const MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA: Query = {
|
||||
...initialQueriesMap[DataSource.METRICS],
|
||||
builder: {
|
||||
...initialQueriesMap[DataSource.METRICS].builder,
|
||||
queryData: [MOCK_QUERY_DATA_1, MOCK_QUERY_DATA_2],
|
||||
queryFormulas: [MOCK_FORMULA_DATA, MOCK_FORMULA_DATA],
|
||||
},
|
||||
};
|
||||
|
||||
describe('splitQueryIntoOneChartPerQuery', () => {
|
||||
it('should split a query with multiple queryData to multiple distinct queries, each with a single queryData', () => {
|
||||
const result = splitQueryIntoOneChartPerQuery(
|
||||
MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA,
|
||||
['metric1', 'metric2'],
|
||||
[undefined, 'unit2'],
|
||||
);
|
||||
expect(result).toHaveLength(4);
|
||||
// Verify query 1 has the correct data
|
||||
expect(result[0].builder.queryData).toHaveLength(1);
|
||||
expect(result[0].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_1);
|
||||
expect(result[0].builder.queryFormulas).toHaveLength(0);
|
||||
expect(result[0].unit).toBeUndefined();
|
||||
// Verify query 2 has the correct data
|
||||
expect(result[1].builder.queryData).toHaveLength(1);
|
||||
expect(result[1].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_2);
|
||||
expect(result[1].builder.queryFormulas).toHaveLength(0);
|
||||
expect(result[1].unit).toBe('unit2');
|
||||
// Verify query 3 has the correct data
|
||||
expect(result[2].builder.queryFormulas).toHaveLength(1);
|
||||
expect(result[2].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
|
||||
expect(result[2].builder.queryData).toHaveLength(2); // 2 disabled queries
|
||||
expect(result[2].builder.queryData[0].disabled).toBe(true);
|
||||
expect(result[2].builder.queryData[1].disabled).toBe(true);
|
||||
expect(result[2].unit).toBeUndefined();
|
||||
// Verify query 4 has the correct data
|
||||
expect(result[3].builder.queryFormulas).toHaveLength(1);
|
||||
expect(result[3].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
|
||||
expect(result[3].builder.queryData).toHaveLength(2); // 2 disabled queries
|
||||
expect(result[3].builder.queryData[0].disabled).toBe(true);
|
||||
expect(result[3].builder.queryData[1].disabled).toBe(true);
|
||||
expect(result[3].unit).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
const MOCK_METRIC_METADATA: MetricMetadata = {
|
||||
description: 'Metric 1 description',
|
||||
unit: 'unit1',
|
||||
type: MetricType.GAUGE,
|
||||
temporality: Temporality.DELTA,
|
||||
isMonotonic: true,
|
||||
};
|
||||
|
||||
describe('useGetMetrics', () => {
|
||||
beforeEach(() => {
|
||||
jest
|
||||
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
|
||||
.mockReturnValue([
|
||||
({
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
data: {
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: MOCK_METRIC_METADATA,
|
||||
},
|
||||
},
|
||||
} as Partial<
|
||||
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
|
||||
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return the correct metrics data', () => {
|
||||
const { result } = renderHook(() => useGetMetrics(['metric1']));
|
||||
expect(result.current.metrics).toHaveLength(1);
|
||||
expect(result.current.metrics[0]).toBeDefined();
|
||||
expect(result.current.metrics[0]).toEqual(MOCK_METRIC_METADATA);
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.isError).toBe(false);
|
||||
});
|
||||
|
||||
it('should return array of undefined values of correct length when metrics data is not yet loaded', () => {
|
||||
jest
|
||||
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
|
||||
.mockReturnValue([
|
||||
({
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
} as Partial<
|
||||
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
|
||||
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
|
||||
]);
|
||||
const { result } = renderHook(() => useGetMetrics(['metric1']));
|
||||
expect(result.current.metrics).toHaveLength(1);
|
||||
expect(result.current.metrics[0]).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMetricUnits', () => {
|
||||
it('should return the same unit for units that are not known to the universal unit mapper', () => {
|
||||
const result = getMetricUnits([MOCK_METRIC_METADATA]);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toEqual(MOCK_METRIC_METADATA.unit);
|
||||
});
|
||||
|
||||
it('should return universal unit for units that are known to the universal unit mapper', () => {
|
||||
const result = getMetricUnits([{ ...MOCK_METRIC_METADATA, unit: 'seconds' }]);
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBe('s');
|
||||
});
|
||||
});
|
||||
@@ -3,6 +3,7 @@ import { Dispatch, SetStateAction } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
|
||||
|
||||
export enum ExplorerTabs {
|
||||
TIME_SERIES = 'time-series',
|
||||
@@ -12,6 +13,16 @@ export enum ExplorerTabs {
|
||||
export interface TimeSeriesProps {
|
||||
showOneChartPerQuery: boolean;
|
||||
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
areAllMetricUnitsSame: boolean;
|
||||
isMetricUnitsLoading: boolean;
|
||||
isMetricUnitsError: boolean;
|
||||
metricUnits: (string | undefined)[];
|
||||
metricNames: string[];
|
||||
metrics: (MetricMetadata | undefined)[];
|
||||
handleOpenMetricDetails: (metricName: string) => void;
|
||||
yAxisUnit: string | undefined;
|
||||
setYAxisUnit: (unit: string) => void;
|
||||
showYAxisUnitSelector: boolean;
|
||||
}
|
||||
|
||||
export interface RelatedMetricsProps {
|
||||
|
||||
@@ -1,20 +1,40 @@
|
||||
import { mapMetricUnitToUniversalUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { useGetMultipleMetrics } from 'hooks/metricsExplorer/useGetMultipleMetrics';
|
||||
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export const splitQueryIntoOneChartPerQuery = (query: Query): Query[] => {
|
||||
/**
|
||||
* Split a query with multiple queryData to multiple distinct queries, each with a single queryData.
|
||||
* @param query - The query to split
|
||||
* @param units - The units of the metrics, can be undefined if the metric has no unit
|
||||
* @returns The split queries
|
||||
*/
|
||||
export const splitQueryIntoOneChartPerQuery = (
|
||||
query: Query,
|
||||
metricNames: string[],
|
||||
units: (string | undefined)[],
|
||||
): Query[] => {
|
||||
const queries: Query[] = [];
|
||||
|
||||
query.builder.queryData.forEach((currentQuery) => {
|
||||
const newQuery = {
|
||||
...query,
|
||||
id: uuid(),
|
||||
builder: {
|
||||
...query.builder,
|
||||
queryData: [currentQuery],
|
||||
queryFormulas: [],
|
||||
},
|
||||
};
|
||||
queries.push(newQuery);
|
||||
if (currentQuery.aggregateAttribute?.key) {
|
||||
const metricIndex = metricNames.indexOf(
|
||||
currentQuery.aggregateAttribute?.key,
|
||||
);
|
||||
const unit = metricIndex >= 0 ? units[metricIndex] : undefined;
|
||||
const newQuery = {
|
||||
...query,
|
||||
id: uuid(),
|
||||
builder: {
|
||||
...query.builder,
|
||||
queryData: [currentQuery],
|
||||
queryFormulas: [],
|
||||
},
|
||||
unit,
|
||||
};
|
||||
queries.push(newQuery);
|
||||
}
|
||||
});
|
||||
|
||||
query.builder.queryFormulas.forEach((currentFormula) => {
|
||||
@@ -35,3 +55,43 @@ export const splitQueryIntoOneChartPerQuery = (query: Query): Query[] => {
|
||||
|
||||
return queries;
|
||||
};
|
||||
|
||||
/**
|
||||
* Hook to get data for multiple metrics with a synchronous loading and error state
|
||||
* @param metricNames - The names of the metrics to get
|
||||
* @param isEnabled - Whether the hook is enabled
|
||||
* @returns The loading state, the metrics data, and the error state
|
||||
*/
|
||||
export function useGetMetrics(
|
||||
metricNames: string[],
|
||||
isEnabled = true,
|
||||
): {
|
||||
isLoading: boolean;
|
||||
isError: boolean;
|
||||
metrics: (MetricMetadata | undefined)[];
|
||||
} {
|
||||
const metricsData = useGetMultipleMetrics(metricNames, {
|
||||
enabled: metricNames.length > 0 && isEnabled,
|
||||
});
|
||||
return {
|
||||
isLoading: metricsData.some((metric) => metric.isLoading),
|
||||
metrics: metricsData
|
||||
.map((metric) => metric.data?.data)
|
||||
.map((data) => data?.data),
|
||||
isError: metricsData.some((metric) => metric.isError),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* To get the units of the metrics in the universal unit standard.
|
||||
* If the unit is not known to the universal unit mapper, it will return the unit as is.
|
||||
* @param metrics - The metrics to get the units for
|
||||
* @returns The units of the metrics, can be undefined if the metric has no unit
|
||||
*/
|
||||
export function getMetricUnits(
|
||||
metrics: (MetricMetadata | undefined)[],
|
||||
): (string | undefined)[] {
|
||||
return metrics
|
||||
.map((metric) => metric?.unit)
|
||||
.map((unit) => mapMetricUnitToUniversalUnit(unit) || undefined);
|
||||
}
|
||||
|
||||
@@ -131,8 +131,8 @@ function MetricDetails({
|
||||
>
|
||||
Open in Explorer
|
||||
</Button>
|
||||
{/* Show the based on the feature flag. Will remove before releasing the feature */}
|
||||
{showInspectFeature && (
|
||||
{/* Show the inspect button if the metric type is GAUGE */}
|
||||
{showInspectFeature && openInspectModal && (
|
||||
<Button
|
||||
className="inspect-metrics-button"
|
||||
aria-label="Inspect Metric"
|
||||
|
||||
@@ -11,7 +11,7 @@ export interface MetricDetailsProps {
|
||||
isOpen: boolean;
|
||||
metricName: string | null;
|
||||
isModalTimeSelection: boolean;
|
||||
openInspectModal: (metricName: string) => void;
|
||||
openInspectModal?: (metricName: string) => void;
|
||||
}
|
||||
|
||||
export interface DashboardsAndAlertsPopoverProps {
|
||||
|
||||
@@ -370,10 +370,6 @@ function NewWidget({
|
||||
// this has been moved here from the left container
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
const updatedQuery = cloneDeep(stagedQuery || initialQueriesMap.metrics);
|
||||
if (updatedQuery?.builder?.queryData?.[0]) {
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
}
|
||||
|
||||
if (selectedWidget) {
|
||||
if (selectedGraph === PANEL_TYPES.LIST) {
|
||||
return {
|
||||
@@ -419,16 +415,12 @@ function NewWidget({
|
||||
useEffect(() => {
|
||||
if (stagedQuery) {
|
||||
setIsLoadingPanelData(false);
|
||||
const updatedStagedQuery = cloneDeep(stagedQuery);
|
||||
if (updatedStagedQuery?.builder?.queryData?.[0]) {
|
||||
updatedStagedQuery.builder.queryData[0].pageSize = 10;
|
||||
}
|
||||
setRequestData((prev) => ({
|
||||
...prev,
|
||||
selectedTime: selectedTime.enum || prev.selectedTime,
|
||||
globalSelectedInterval: customGlobalSelectedInterval,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: updatedStagedQuery,
|
||||
query: stagedQuery,
|
||||
fillGaps: selectedWidget.fillSpans || false,
|
||||
isLogScale: selectedWidget.isLogScale || false,
|
||||
formatForWeb:
|
||||
|
||||
@@ -206,6 +206,10 @@
|
||||
.ant-select-selector {
|
||||
border-color: var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
|
||||
.ant-select-selection-item {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-input-number {
|
||||
|
||||
@@ -242,7 +242,6 @@ export function Formula({
|
||||
</div>
|
||||
<InputWithLabel
|
||||
label="Limit"
|
||||
type="number"
|
||||
onChange={(value): void => handleChangeLimit(Number(value))}
|
||||
initialValue={formula?.limit ?? undefined}
|
||||
placeholder="Enter limit"
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { Select } from 'antd';
|
||||
import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { MetricAggregateOperator } from 'types/common/queryBuilder';
|
||||
|
||||
interface SpaceAggregationOptionsProps {
|
||||
panelType: PANEL_TYPES | null;
|
||||
@@ -22,39 +20,13 @@ export default function SpaceAggregationOptions({
|
||||
operators,
|
||||
qbVersion,
|
||||
}: SpaceAggregationOptionsProps): JSX.Element {
|
||||
const placeHolderText =
|
||||
panelType === PANEL_TYPES.VALUE || qbVersion === 'v3' ? 'Sum' : 'Sum By';
|
||||
const [defaultValue, setDefaultValue] = useState(
|
||||
selectedValue || placeHolderText,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!selectedValue) {
|
||||
if (
|
||||
aggregatorAttributeType === ATTRIBUTE_TYPES.HISTOGRAM ||
|
||||
aggregatorAttributeType === ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
|
||||
) {
|
||||
setDefaultValue(MetricAggregateOperator.P90);
|
||||
onSelect(MetricAggregateOperator.P90);
|
||||
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.SUM) {
|
||||
setDefaultValue(MetricAggregateOperator.SUM);
|
||||
onSelect(MetricAggregateOperator.SUM);
|
||||
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.GAUGE) {
|
||||
setDefaultValue(MetricAggregateOperator.AVG);
|
||||
onSelect(MetricAggregateOperator.AVG);
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [aggregatorAttributeType]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="spaceAggregationOptionsContainer"
|
||||
key={aggregatorAttributeType}
|
||||
>
|
||||
<Select
|
||||
defaultValue={defaultValue}
|
||||
defaultValue={selectedValue}
|
||||
style={{ minWidth: '5.625rem' }}
|
||||
disabled={disabled}
|
||||
onChange={onSelect}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
.selectOptionContainer {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
overflow-x: auto;
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.2rem;
|
||||
height: 0.2rem;
|
||||
}
|
||||
}
|
||||
|
||||
.option-renderer-tooltip {
|
||||
pointer-events: none;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import './QueryBuilderSearch.styles.scss';
|
||||
import './OptionRenderer.styles.scss';
|
||||
|
||||
import { Tooltip } from 'antd';
|
||||
|
||||
@@ -13,7 +13,11 @@ function OptionRenderer({
|
||||
return (
|
||||
<span className="option">
|
||||
{type ? (
|
||||
<Tooltip title={`${value}`} placement="topLeft">
|
||||
<Tooltip
|
||||
title={`${value}`}
|
||||
placement="topLeft"
|
||||
rootClassName="option-renderer-tooltip"
|
||||
>
|
||||
<div className="selectOptionContainer">
|
||||
<div className="option-value">{value}</div>
|
||||
<div className="option-meta-data-container">
|
||||
@@ -29,7 +33,11 @@ function OptionRenderer({
|
||||
</div>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<Tooltip title={label} placement="topLeft">
|
||||
<Tooltip
|
||||
title={label}
|
||||
placement="topLeft"
|
||||
rootClassName="option-renderer-tooltip"
|
||||
>
|
||||
<span>{label}</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
@@ -5,19 +5,6 @@
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.selectOptionContainer {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
overflow-x: auto;
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.2rem;
|
||||
height: 0.2rem;
|
||||
}
|
||||
}
|
||||
|
||||
.logs-popup {
|
||||
&.hide-scroll {
|
||||
.rc-virtual-list-holder {
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { ReduceToFilter } from './ReduceToFilter';
|
||||
|
||||
const mockOnChange = jest.fn();
|
||||
|
||||
function baseQuery(overrides: Partial<IBuilderQuery> = {}): IBuilderQuery {
|
||||
return {
|
||||
dataSource: 'traces',
|
||||
aggregations: [],
|
||||
groupBy: [],
|
||||
orderBy: [],
|
||||
legend: '',
|
||||
limit: null,
|
||||
having: { expression: '' },
|
||||
...overrides,
|
||||
} as IBuilderQuery;
|
||||
}
|
||||
|
||||
describe('ReduceToFilter', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('initializes with default avg when no reduceTo is set', () => {
|
||||
render(<ReduceToFilter query={baseQuery()} onChange={mockOnChange} />);
|
||||
|
||||
expect(screen.getByTestId('reduce-to')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText('Average of values in timeframe'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('initializes from query.aggregations[0].reduceTo', () => {
|
||||
render(
|
||||
<ReduceToFilter
|
||||
query={baseQuery({
|
||||
aggregations: [{ reduceTo: 'sum' } as any],
|
||||
aggregateAttribute: { key: 'test', type: MetricType.SUM },
|
||||
})}
|
||||
onChange={mockOnChange}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('Sum of values in timeframe')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('initializes from query.reduceTo when aggregations[0].reduceTo is not set', () => {
|
||||
render(
|
||||
<ReduceToFilter
|
||||
query={baseQuery({
|
||||
reduceTo: 'max',
|
||||
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
|
||||
})}
|
||||
onChange={mockOnChange}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('Max of values in timeframe')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('updates to sum when aggregateAttribute.type is SUM', async () => {
|
||||
const { rerender } = render(
|
||||
<ReduceToFilter
|
||||
query={baseQuery({
|
||||
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
|
||||
})}
|
||||
onChange={mockOnChange}
|
||||
/>,
|
||||
);
|
||||
|
||||
rerender(
|
||||
<ReduceToFilter
|
||||
query={baseQuery({
|
||||
aggregateAttribute: { key: 'test2', type: MetricType.SUM },
|
||||
})}
|
||||
onChange={mockOnChange}
|
||||
/>,
|
||||
);
|
||||
|
||||
const reduceToFilterText = (await screen.findByText(
|
||||
'Sum of values in timeframe',
|
||||
)) as HTMLElement;
|
||||
expect(reduceToFilterText).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Select } from 'antd';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { REDUCE_TO_VALUES } from 'constants/queryBuilder';
|
||||
import { memo } from 'react';
|
||||
import { memo, useEffect, useRef, useState } from 'react';
|
||||
import { MetricAggregation } from 'types/api/v5/queryRange';
|
||||
// ** Types
|
||||
import { ReduceOperators } from 'types/common/queryBuilder';
|
||||
@@ -12,19 +13,46 @@ export const ReduceToFilter = memo(function ReduceToFilter({
|
||||
query,
|
||||
onChange,
|
||||
}: ReduceToFilterProps): JSX.Element {
|
||||
const reduceToValue =
|
||||
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
|
||||
|
||||
const currentValue =
|
||||
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
|
||||
REDUCE_TO_VALUES[0];
|
||||
const isMounted = useRef<boolean>(false);
|
||||
const [currentValue, setCurrentValue] = useState<
|
||||
SelectOption<ReduceOperators, string>
|
||||
>(REDUCE_TO_VALUES[2]); // default to avg
|
||||
|
||||
const handleChange = (
|
||||
newValue: SelectOption<ReduceOperators, string>,
|
||||
): void => {
|
||||
setCurrentValue(newValue);
|
||||
onChange(newValue.value);
|
||||
};
|
||||
|
||||
useEffect(
|
||||
() => {
|
||||
if (!isMounted.current) {
|
||||
const reduceToValue =
|
||||
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
|
||||
|
||||
setCurrentValue(
|
||||
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
|
||||
REDUCE_TO_VALUES[2],
|
||||
);
|
||||
isMounted.current = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const aggregationAttributeType = query.aggregateAttribute?.type as
|
||||
| MetricType
|
||||
| undefined;
|
||||
|
||||
if (aggregationAttributeType === MetricType.SUM) {
|
||||
handleChange(REDUCE_TO_VALUES[1]);
|
||||
} else {
|
||||
handleChange(REDUCE_TO_VALUES[2]);
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[query.aggregateAttribute?.key],
|
||||
);
|
||||
|
||||
return (
|
||||
<Select
|
||||
placeholder="Reduce to"
|
||||
|
||||
32
frontend/src/hooks/metricsExplorer/useGetMultipleMetrics.ts
Normal file
32
frontend/src/hooks/metricsExplorer/useGetMultipleMetrics.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQueries, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
|
||||
|
||||
type QueryResult = UseQueryResult<
|
||||
SuccessResponseV2<MetricMetadataResponse>,
|
||||
Error
|
||||
>;
|
||||
|
||||
type UseGetMultipleMetrics = (
|
||||
metricNames: string[],
|
||||
options?: UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>,
|
||||
headers?: Record<string, string>,
|
||||
) => QueryResult[];
|
||||
|
||||
export const useGetMultipleMetrics: UseGetMultipleMetrics = (
|
||||
metricNames,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQueries(
|
||||
metricNames.map(
|
||||
(metricName) =>
|
||||
({
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_METADATA, metricName],
|
||||
queryFn: ({ signal }) => getMetricMetadata(metricName, signal, headers),
|
||||
...options,
|
||||
} as UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>),
|
||||
),
|
||||
);
|
||||
@@ -5,7 +5,7 @@ import updateMetricMetadata, {
|
||||
import { useMutation, UseMutationResult } from 'react-query';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
interface UseUpdateMetricMetadataProps {
|
||||
export interface UseUpdateMetricMetadataProps {
|
||||
metricName: string;
|
||||
payload: UpdateMetricMetadataProps;
|
||||
}
|
||||
|
||||
@@ -188,7 +188,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
|
||||
timeAggregation: MetricAggregateOperator.RATE,
|
||||
metricName: 'new_sum_metric',
|
||||
temporality: '',
|
||||
spaceAggregation: '',
|
||||
spaceAggregation: MetricAggregateOperator.SUM,
|
||||
},
|
||||
],
|
||||
}),
|
||||
@@ -239,7 +239,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
|
||||
timeAggregation: MetricAggregateOperator.RATE,
|
||||
metricName: 'new_sum_metric',
|
||||
temporality: '',
|
||||
spaceAggregation: '',
|
||||
spaceAggregation: MetricAggregateOperator.SUM,
|
||||
},
|
||||
],
|
||||
}),
|
||||
@@ -315,7 +315,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
|
||||
timeAggregation: MetricAggregateOperator.AVG,
|
||||
metricName: 'new_gauge',
|
||||
temporality: '',
|
||||
spaceAggregation: '',
|
||||
spaceAggregation: MetricAggregateOperator.AVG,
|
||||
},
|
||||
],
|
||||
}),
|
||||
|
||||
@@ -317,7 +317,7 @@ export const useQueryOperations: UseQueryOperations = ({
|
||||
timeAggregation: MetricAggregateOperator.RATE,
|
||||
metricName: newQuery.aggregateAttribute?.key || '',
|
||||
temporality: '',
|
||||
spaceAggregation: '',
|
||||
spaceAggregation: MetricAggregateOperator.SUM,
|
||||
},
|
||||
];
|
||||
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
|
||||
@@ -326,7 +326,20 @@ export const useQueryOperations: UseQueryOperations = ({
|
||||
timeAggregation: MetricAggregateOperator.AVG,
|
||||
metricName: newQuery.aggregateAttribute?.key || '',
|
||||
temporality: '',
|
||||
spaceAggregation: '',
|
||||
spaceAggregation: MetricAggregateOperator.AVG,
|
||||
},
|
||||
];
|
||||
} else if (
|
||||
newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.HISTOGRAM ||
|
||||
newQuery.aggregateAttribute?.type ===
|
||||
ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
|
||||
) {
|
||||
newQuery.aggregations = [
|
||||
{
|
||||
timeAggregation: '',
|
||||
metricName: newQuery.aggregateAttribute?.key || '',
|
||||
temporality: '',
|
||||
spaceAggregation: MetricAggregateOperator.P90,
|
||||
},
|
||||
];
|
||||
} else {
|
||||
|
||||
@@ -15,7 +15,7 @@ function NoData(): JSX.Element {
|
||||
<Typography.Text className="not-found-text-1">
|
||||
Uh-oh! We cannot show the selected trace.
|
||||
<span className="not-found-text-2">
|
||||
This can happen in either of the two scenraios -
|
||||
This can happen in either of the two scenarios -
|
||||
</span>
|
||||
</Typography.Text>
|
||||
</section>
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
export interface MetricMetadata {
|
||||
description: string;
|
||||
type: MetricType;
|
||||
unit: string;
|
||||
temporality: Temporality;
|
||||
isMonotonic: boolean;
|
||||
}
|
||||
|
||||
export interface MetricMetadataResponse {
|
||||
status: string;
|
||||
data: MetricMetadata;
|
||||
}
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
|
||||
func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.EditAccess(provider.promoteHandler.HandlePromoteAndIndexPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
ID: "HandlePromoteAndIndexPaths",
|
||||
Tags: []string{"logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: new([]*promotetypes.PromotePath),
|
||||
@@ -25,8 +25,8 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.ViewAccess(provider.promoteHandler.ListPromotedAndIndexedPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
ID: "ListPromotedAndIndexedPaths",
|
||||
Tags: []string{"logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: nil,
|
||||
|
||||
149
pkg/telemetrylogs/json_access_pb.go
Normal file
149
pkg/telemetrylogs/json_access_pb.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
var (
|
||||
CodePlanIndexOutOfBounds = errors.MustNewCode("plan_index_out_of_bounds")
|
||||
)
|
||||
|
||||
type JSONAccessPlanBuilder struct {
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
value any
|
||||
op qbtypes.FilterOperator
|
||||
parts []string
|
||||
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)
|
||||
isPromoted bool
|
||||
}
|
||||
|
||||
// buildPlan recursively builds the path plan tree
|
||||
func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, parent *telemetrytypes.JSONAccessNode, isDynArrChild bool) (*telemetrytypes.JSONAccessNode, error) {
|
||||
if index >= len(pb.parts) {
|
||||
return nil, errors.NewInvalidInputf(CodePlanIndexOutOfBounds, "index is out of bounds")
|
||||
}
|
||||
|
||||
part := pb.parts[index]
|
||||
pathSoFar := strings.Join(pb.parts[:index+1], ArraySep)
|
||||
isTerminal := index == len(pb.parts)-1
|
||||
|
||||
// Calculate progression parameters based on parent's values
|
||||
var maxTypes, maxPaths int
|
||||
if isDynArrChild {
|
||||
// Child of Dynamic array - reset progression to base values (16, 256)
|
||||
// This happens when we switch from Array(Dynamic) to Array(JSON)
|
||||
maxTypes = 16
|
||||
maxPaths = 256
|
||||
} else if parent != nil {
|
||||
// Child of JSON array - use parent's progression divided by 2 and 4
|
||||
maxTypes = parent.MaxDynamicTypes / 2
|
||||
maxPaths = parent.MaxDynamicPaths / 4
|
||||
if maxTypes < 0 {
|
||||
maxTypes = 0
|
||||
}
|
||||
if maxPaths < 0 {
|
||||
maxPaths = 0
|
||||
}
|
||||
}
|
||||
|
||||
types, err := pb.getTypes(ctx, pathSoFar)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create node for this path segment
|
||||
node := &telemetrytypes.JSONAccessNode{
|
||||
Name: part,
|
||||
IsTerminal: isTerminal,
|
||||
AvailableTypes: types,
|
||||
Branches: make(map[telemetrytypes.JSONAccessBranchType]*telemetrytypes.JSONAccessNode),
|
||||
Parent: parent,
|
||||
MaxDynamicTypes: maxTypes,
|
||||
MaxDynamicPaths: maxPaths,
|
||||
}
|
||||
|
||||
hasJSON := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayJSON)
|
||||
hasDynamic := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayDynamic)
|
||||
|
||||
// Configure terminal if this is the last part
|
||||
if isTerminal {
|
||||
valueType, _ := inferDataType(pb.value, pb.op, pb.key)
|
||||
node.TerminalConfig = &telemetrytypes.TerminalConfig{
|
||||
Key: pb.key,
|
||||
ElemType: *pb.key.JSONDataType,
|
||||
ValueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType],
|
||||
}
|
||||
} else {
|
||||
if hasJSON {
|
||||
node.Branches[telemetrytypes.BranchJSON], err = pb.buildPlan(ctx, index+1, node, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if hasDynamic {
|
||||
node.Branches[telemetrytypes.BranchDynamic], err = pb.buildPlan(ctx, index+1, node, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
// PlanJSON builds a tree structure representing the complete JSON path traversal
|
||||
// that precomputes all possible branches and their types
|
||||
func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbtypes.FilterOperator,
|
||||
value any,
|
||||
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error),
|
||||
) (telemetrytypes.JSONAccessPlan, error) {
|
||||
// if path is empty, return nil
|
||||
if key.Name == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "path is empty")
|
||||
}
|
||||
|
||||
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
|
||||
// creation time in distributed_promoted_paths
|
||||
path := strings.ReplaceAll(key.Name, ArrayAnyIndex, ArraySep)
|
||||
parts := strings.Split(path, ArraySep)
|
||||
|
||||
pb := &JSONAccessPlanBuilder{
|
||||
key: key,
|
||||
op: op,
|
||||
value: value,
|
||||
parts: parts,
|
||||
getTypes: getTypes,
|
||||
isPromoted: key.Materialized,
|
||||
}
|
||||
plans := telemetrytypes.JSONAccessPlan{}
|
||||
|
||||
node, err := pb.buildPlan(ctx, 0,
|
||||
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn,
|
||||
32, 0),
|
||||
false,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
plans = append(plans, node)
|
||||
|
||||
if pb.isPromoted {
|
||||
node, err := pb.buildPlan(ctx, 0,
|
||||
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyPromotedColumn,
|
||||
32, 1024),
|
||||
true,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
plans = append(plans, node)
|
||||
}
|
||||
|
||||
return plans, nil
|
||||
}
|
||||
880
pkg/telemetrylogs/json_access_pb_test.go
Normal file
880
pkg/telemetrylogs/json_access_pb_test.go
Normal file
@@ -0,0 +1,880 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Test Data Creation
|
||||
// ============================================================================
|
||||
|
||||
// makeKey creates a TelemetryFieldKey for testing
|
||||
func makeKey(name string, dataType telemetrytypes.JSONDataType, materialized bool) *telemetrytypes.TelemetryFieldKey {
|
||||
return &telemetrytypes.TelemetryFieldKey{
|
||||
Name: name,
|
||||
JSONDataType: &dataType,
|
||||
Materialized: materialized,
|
||||
}
|
||||
}
|
||||
|
||||
// makeGetTypes creates a getTypes function from a map of path -> types
|
||||
func makeGetTypes(typesMap map[string][]telemetrytypes.JSONDataType) func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
return func(_ context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
return typesMap[path], nil
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Node Validation
|
||||
// ============================================================================
|
||||
|
||||
// jsonAccessTestNode is a test-only, YAML-friendly view of JSONAccessNode.
|
||||
// It intentionally omits Parent to avoid cycles and only keeps the fields
|
||||
// that are useful for understanding / asserting the plan structure.
|
||||
type jsonAccessTestNode struct {
|
||||
Name string `yaml:"name"`
|
||||
Column string `yaml:"column,omitempty"`
|
||||
IsTerminal bool `yaml:"isTerminal,omitempty"`
|
||||
MaxDynamicTypes int `yaml:"maxDynamicTypes,omitempty"`
|
||||
MaxDynamicPaths int `yaml:"maxDynamicPaths,omitempty"`
|
||||
ElemType string `yaml:"elemType,omitempty"`
|
||||
ValueType string `yaml:"valueType,omitempty"`
|
||||
AvailableTypes []string `yaml:"availableTypes,omitempty"`
|
||||
Branches map[string]*jsonAccessTestNode `yaml:"branches,omitempty"`
|
||||
}
|
||||
|
||||
// toTestNode converts a JSONAccessNode tree into jsonAccessTestNode so that
|
||||
// it can be serialized to YAML for easy visual comparison in tests.
|
||||
func toTestNode(n *telemetrytypes.JSONAccessNode) *jsonAccessTestNode {
|
||||
if n == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
out := &jsonAccessTestNode{
|
||||
Name: n.Name,
|
||||
IsTerminal: n.IsTerminal,
|
||||
MaxDynamicTypes: n.MaxDynamicTypes,
|
||||
MaxDynamicPaths: n.MaxDynamicPaths,
|
||||
}
|
||||
|
||||
// Column information for top-level plan nodes: their parent is the root,
|
||||
// whose parent is nil.
|
||||
if n.Parent != nil && n.Parent.Parent == nil {
|
||||
out.Column = n.Parent.Name
|
||||
}
|
||||
|
||||
// AvailableTypes as strings (using StringValue for stable representation)
|
||||
if len(n.AvailableTypes) > 0 {
|
||||
out.AvailableTypes = make([]string, 0, len(n.AvailableTypes))
|
||||
for _, t := range n.AvailableTypes {
|
||||
out.AvailableTypes = append(out.AvailableTypes, t.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
// Terminal config
|
||||
if n.TerminalConfig != nil {
|
||||
out.ElemType = n.TerminalConfig.ElemType.StringValue()
|
||||
out.ValueType = n.TerminalConfig.ValueType.StringValue()
|
||||
}
|
||||
|
||||
// Branches
|
||||
if len(n.Branches) > 0 {
|
||||
out.Branches = make(map[string]*jsonAccessTestNode, len(n.Branches))
|
||||
for bt, child := range n.Branches {
|
||||
out.Branches[bt.StringValue()] = toTestNode(child)
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// plansToYAML converts a slice of JSONAccessNode plans to a YAML string that
|
||||
// can be compared against a per-test expectedTree.
|
||||
func plansToYAML(t *testing.T, plans []*telemetrytypes.JSONAccessNode) string {
|
||||
t.Helper()
|
||||
|
||||
testNodes := make([]*jsonAccessTestNode, 0, len(plans))
|
||||
for _, p := range plans {
|
||||
testNodes = append(testNodes, toTestNode(p))
|
||||
}
|
||||
|
||||
got, err := yaml.Marshal(testNodes)
|
||||
require.NoError(t, err)
|
||||
return string(got)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Cases for Node Methods
|
||||
// ============================================================================
|
||||
|
||||
func TestNode_Alias(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
node *telemetrytypes.JSONAccessNode
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Root node returns name as-is",
|
||||
node: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
expected: LogsV2BodyJSONColumn,
|
||||
},
|
||||
{
|
||||
name: "Node without parent returns backticked name",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: nil,
|
||||
},
|
||||
expected: "`user`",
|
||||
},
|
||||
{
|
||||
name: "Node with root parent uses dot separator",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".age`",
|
||||
},
|
||||
{
|
||||
name: "Node with non-root parent uses array separator",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "name",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education[].name`",
|
||||
},
|
||||
{
|
||||
name: "Nested array path with multiple levels",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "type",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "awards",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education[].awards[].type`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tt.node.Alias()
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNode_FieldPath(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
node *telemetrytypes.JSONAccessNode
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Simple field path from root",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: LogsV2BodyJSONColumn + ".`user`",
|
||||
},
|
||||
{
|
||||
name: "Field path with backtick-required key",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user-name", // requires backtick
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
expected: LogsV2BodyJSONColumn + ".`user-name`",
|
||||
},
|
||||
{
|
||||
name: "Nested field path",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".user`.`age`",
|
||||
},
|
||||
{
|
||||
name: "Array element field path",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "name",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education`.`name`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tt.node.FieldPath()
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Cases for PlanJSON
|
||||
// ============================================================================
|
||||
|
||||
func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
expectErr bool
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Simple path not promoted",
|
||||
key: makeKey("user.name", telemetrytypes.String, false),
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Simple path promoted",
|
||||
key: makeKey("user.name", telemetrytypes.String, true),
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
- name: user.name
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Empty path returns error",
|
||||
key: makeKey("", telemetrytypes.String, false),
|
||||
expectErr: true,
|
||||
expectedYAML: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
plans, err := PlanJSON(context.Background(), tt.key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
require.Nil(t, plans)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Single array level - JSON branch only",
|
||||
path: "education[].name",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Single array level - both JSON and Dynamic branches",
|
||||
path: "education[].awards[].type",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Deeply nested array path",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: entities
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: reviews
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: entries
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: metadata
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 1
|
||||
branches:
|
||||
json:
|
||||
name: positions
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "ArrayAnyIndex replacement [*] to []",
|
||||
path: "education[*].name",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
key := makeKey(tt.path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, plans)
|
||||
require.Len(t, plans, 1)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
path := "education[].awards[].type"
|
||||
value := "sports"
|
||||
|
||||
t.Run("Non-promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 1)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
|
||||
t.Run("Promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, telemetrytypes.String, true)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 2)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
- name: education
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
maxDynamicPaths: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
}
|
||||
|
||||
func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
value any
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Path with no available types",
|
||||
path: "unknown.path",
|
||||
value: "test",
|
||||
expectedYAML: `
|
||||
- name: unknown.path
|
||||
column: body_json
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
value: "Engineer",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: entities
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: reviews
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: entries
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: metadata
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 1
|
||||
branches:
|
||||
json:
|
||||
name: positions
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
value: "high_school",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
- Int64
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
value: nil,
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: Array(JSON)
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Choose key type based on path; operator does not affect the tree shape asserted here.
|
||||
keyType := telemetrytypes.String
|
||||
switch tt.path {
|
||||
case "education":
|
||||
keyType = telemetrytypes.ArrayJSON
|
||||
case "education[].type":
|
||||
keyType = telemetrytypes.String
|
||||
}
|
||||
key := makeKey(tt.path, keyType, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, tt.value, getTypes)
|
||||
require.NoError(t, err)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
path := "education[].awards[].participated[].team[].branch"
|
||||
key := makeKey(path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 1)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: participated
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 1
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: participated
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
maxDynamicPaths: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Data Setup
|
||||
// ============================================================================
|
||||
|
||||
// testTypeSet returns a map of path->types and a getTypes function for testing
|
||||
// This represents the type information available in the test JSON structure
|
||||
//
|
||||
// TODO(Piyush): Remove this unparam nolint
|
||||
// nolint:unparam
|
||||
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
|
||||
types := map[string][]telemetrytypes.JSONDataType{
|
||||
"user.name": {telemetrytypes.String},
|
||||
"user.age": {telemetrytypes.Int64, telemetrytypes.String},
|
||||
"user.height": {telemetrytypes.Float64},
|
||||
"education": {telemetrytypes.ArrayJSON},
|
||||
"education[].name": {telemetrytypes.String},
|
||||
"education[].type": {telemetrytypes.String, telemetrytypes.Int64},
|
||||
"education[].internal_type": {telemetrytypes.String},
|
||||
"education[].metadata.location": {telemetrytypes.String},
|
||||
"education[].parameters": {telemetrytypes.ArrayFloat64, telemetrytypes.ArrayDynamic},
|
||||
"education[].duration": {telemetrytypes.String},
|
||||
"education[].mode": {telemetrytypes.String},
|
||||
"education[].year": {telemetrytypes.Int64},
|
||||
"education[].field": {telemetrytypes.String},
|
||||
"education[].awards": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].name": {telemetrytypes.String},
|
||||
"education[].awards[].rank": {telemetrytypes.Int64},
|
||||
"education[].awards[].medal": {telemetrytypes.String},
|
||||
"education[].awards[].type": {telemetrytypes.String},
|
||||
"education[].awards[].semester": {telemetrytypes.Int64},
|
||||
"education[].awards[].participated": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].participated[].type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].field": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].project_type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].project_name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].race_type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team_based": {telemetrytypes.Bool},
|
||||
"education[].awards[].participated[].team_name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team": {telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].participated[].team[].name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team[].branch": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team[].semester": {telemetrytypes.Int64},
|
||||
"interests": {telemetrytypes.ArrayJSON},
|
||||
"interests[].type": {telemetrytypes.String},
|
||||
"interests[].entities": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities.application_date": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].given_by": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].remarks": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].weight": {telemetrytypes.Float64},
|
||||
"interests[].entities[].reviews[].passed": {telemetrytypes.Bool},
|
||||
"interests[].entities[].reviews[].type": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].analysis_type": {telemetrytypes.Int64},
|
||||
"interests[].entities[].reviews[].entries": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].subject": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].status": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].company": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].experience": {telemetrytypes.Int64},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].unit": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].name": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].duration": {telemetrytypes.Int64, telemetrytypes.Float64},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {telemetrytypes.ArrayInt64, telemetrytypes.ArrayString},
|
||||
"message": {telemetrytypes.String},
|
||||
}
|
||||
|
||||
return types, makeGetTypes(types)
|
||||
}
|
||||
@@ -245,7 +245,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
indexesMap := make(map[string][]schemamigrator.Index)
|
||||
indexes := make(map[string][]schemamigrator.Index)
|
||||
for rows.Next() {
|
||||
var name string
|
||||
var typeFull string
|
||||
@@ -254,7 +254,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
if err := rows.Scan(&name, &typeFull, &expr, &granularity); err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to scan string indexed column")
|
||||
}
|
||||
indexesMap[name] = append(indexesMap[name], schemamigrator.Index{
|
||||
indexes[name] = append(indexes[name], schemamigrator.Index{
|
||||
Name: name,
|
||||
Type: typeFull,
|
||||
Expression: expr,
|
||||
@@ -262,7 +262,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
})
|
||||
}
|
||||
|
||||
return indexesMap, nil
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
|
||||
82
pkg/types/telemetrytypes/json_access_plan.go
Normal file
82
pkg/types/telemetrytypes/json_access_plan.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type JSONAccessBranchType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
BranchJSON = JSONAccessBranchType{valuer.NewString("json")}
|
||||
BranchDynamic = JSONAccessBranchType{valuer.NewString("dynamic")}
|
||||
)
|
||||
|
||||
type JSONAccessPlan = []*JSONAccessNode
|
||||
|
||||
type TerminalConfig struct {
|
||||
Key *TelemetryFieldKey
|
||||
ElemType JSONDataType
|
||||
ValueType JSONDataType
|
||||
}
|
||||
|
||||
// Node is now a tree structure representing the complete JSON path traversal
|
||||
// that precomputes all possible branches and their types
|
||||
type JSONAccessNode struct {
|
||||
// Node information
|
||||
Name string
|
||||
IsTerminal bool
|
||||
isRoot bool // marked true for only body_json and body_json_promoted
|
||||
|
||||
// Precomputed type information (single source of truth)
|
||||
AvailableTypes []JSONDataType
|
||||
|
||||
// Array type branches (Array(JSON) vs Array(Dynamic))
|
||||
Branches map[JSONAccessBranchType]*JSONAccessNode
|
||||
|
||||
// Terminal configuration
|
||||
TerminalConfig *TerminalConfig
|
||||
|
||||
// Parent reference for traversal
|
||||
Parent *JSONAccessNode
|
||||
|
||||
// JSON progression parameters (precomputed during planning)
|
||||
MaxDynamicTypes int
|
||||
MaxDynamicPaths int
|
||||
}
|
||||
|
||||
func NewRootJSONAccessNode(name string, maxDynamicTypes, maxDynamicPaths int) *JSONAccessNode {
|
||||
return &JSONAccessNode{
|
||||
Name: name,
|
||||
isRoot: true,
|
||||
MaxDynamicTypes: maxDynamicTypes,
|
||||
MaxDynamicPaths: maxDynamicPaths,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *JSONAccessNode) Alias() string {
|
||||
if n.isRoot {
|
||||
return n.Name
|
||||
} else if n.Parent == nil {
|
||||
return fmt.Sprintf("`%s`", n.Name)
|
||||
}
|
||||
|
||||
parentAlias := strings.TrimLeft(n.Parent.Alias(), "`")
|
||||
parentAlias = strings.TrimRight(parentAlias, "`")
|
||||
|
||||
sep := jsontypeexporter.ArraySeparator
|
||||
if n.Parent.isRoot {
|
||||
sep = "."
|
||||
}
|
||||
return fmt.Sprintf("`%s%s%s`", parentAlias, sep, n.Name)
|
||||
}
|
||||
|
||||
func (n *JSONAccessNode) FieldPath() string {
|
||||
key := "`" + n.Name + "`"
|
||||
return n.Parent.Alias() + "." + key
|
||||
}
|
||||
Reference in New Issue
Block a user