Compare commits

..

29 Commits

Author SHA1 Message Date
Karan Balani
103bd160c8 chore: address last set of PR comments 2025-12-29 20:58:28 +05:30
Karan Balani
c4dbe5822a chore: fix naming in example yaml for flagger as suggested by cursor bot 2025-12-29 18:59:59 +05:30
Karan Balani
0da94dbd4c chore: remove orgGetter dependency in flagger handler 2025-12-29 18:26:46 +05:30
Karan Balani
2eb38d3ad4 chore: go-fmt 2025-12-29 17:51:19 +05:30
Karan Balani
c570655333 chore: update openapi spec 2025-12-29 17:41:40 +05:30
Karan Balani
9bd37b9d01 chore: strong type config provider config 2025-12-29 17:28:33 +05:30
Karan Balani
d777a6032a chore: remove unwanted code 2025-12-29 16:52:00 +05:30
Karan Balani
75f37664af chore: address cursor bugbot comments 2025-12-29 16:41:20 +05:30
Karan Balani
a8aa5c0ded chore: address PR suggestions 2025-12-29 16:36:36 +05:30
Karan Balani
3351ddd8fe chore: update openapi spec 2025-12-29 16:29:10 +05:30
Karan Balani
c773382cb4 chore: make the registry empty for first merge 2025-12-29 16:29:10 +05:30
Karan Balani
5a6854a126 chore: address PR comments and improvements 2025-12-29 16:29:10 +05:30
Karan Balani
9a1319d9f8 chore: address cursor bugbot comments 2025-12-29 16:27:54 +05:30
Karan Balani
cd9e537794 chore: address PR comments and huddle discussions 2025-12-29 16:27:54 +05:30
Karan Balani
b6632f1e53 feat: add api for features and improve flagger config 2025-12-29 16:24:13 +05:30
Karan Balani
4245261299 feat: introduce flagger 2025-12-29 16:23:29 +05:30
Karan Balani
0340b87d87 chore: temp commit 2025-12-29 16:22:17 +05:30
Karan Balani
d11e60f5cc chore: rename flagr to flagger 2025-12-29 16:22:17 +05:30
Karan Balani
7a9d9f333c feat: introduce flagr for feature flags 2025-12-29 16:22:17 +05:30
Abhi kumar
f99821bc40 perf: optimize uplot chart data processing (#9881) 2025-12-29 14:40:51 +05:30
Niladri Adhikary
7c051601f2 fix: normalize context-prefixed field keys (#9089)
* feat: normalize context-prefixed field keys

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* test: added tests validation for context-prefixed field

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* refactor: moved logic to parse.go

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* fix: attribute key edge case

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* fix: corrupt field context

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* fix: corrupt field context

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* refactor: parse and signal

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* refactor: mismatch for unknown signal

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

---------

Signed-off-by: “niladrix719” <niladrix719@gmail.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-12-28 23:17:44 +05:30
Niladri Adhikary
b9f9c00da5 feat: implement case-insensitive query name handling in formula evaluation (#9302)
* feat: implement case-insensitive query name handling in formula evaluation

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* feat: optimized lookups

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* feat: updated naming

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* fix: normalize keys in canDefaultZero for case insensitivity

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* fix: lookup

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

* fix: canDefaultZero lookup

Signed-off-by: “niladrix719” <niladrix719@gmail.com>

---------

Signed-off-by: “niladrix719” <niladrix719@gmail.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-12-28 22:29:37 +05:30
Asp-irin
49ff86e65a fix: correctly display OS type value for host detail
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-12-28 16:47:45 +05:30
Amlan Kumar Nandy
2dc6febb38 chore: warn users about incorrect usage with y axis unit (#9588) 2025-12-28 10:33:43 +05:30
lif
4ae268d867 fix: improve light mode text color for selected values in query builder (#9876)
In light mode, selected values in query builder Select components appeared
disabled due to inheriting light-colored text from dark mode styles.

This fix adds explicit text color (--text-ink-400) for .ant-select-selection-item
elements in light mode across QueryBuilder, QueryBuilderV2, and
MetricsAggregateSection styles.

Fixes #9801

Signed-off-by: majiayu000 <1835304752@qq.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-12-26 17:38:25 +00:00
Amlan Kumar Nandy
9d78d67461 chore: y axis management in metrics explorer (#9587) 2025-12-26 17:14:15 +00:00
Abhi kumar
055d0ba90d fix: added fix for limit still getting sent in payload even after removing (#9877)
* fix: added fix for limit still getting sent in payload even after removing

* chore: removed console log
2025-12-26 17:35:08 +05:30
Abhi kumar
09dc95cfe9 fix: added fix for metric selection tooltip scroll issue (#9869) 2025-12-26 13:40:19 +05:30
Abhi kumar
d218cd5733 fix: added fix for reduceTo selection based on metric type + code cleanup (#9732)
* fix: added fixes for reduce-to, auto open + metric based default value

* fix: fixed raise condition

* chore: removed unnessasary useeffect from spaceaggregation

* test: added fix for failing test in usequerybuilderoperations

* fix: pr review comments

* fix: pr review changes
2025-12-25 22:54:25 +05:30
102 changed files with 3595 additions and 1843 deletions

View File

@@ -278,3 +278,13 @@ tokenizer:
token:
# The maximum number of tokens a user can have. This limits the number of concurrent sessions a user can have.
max_per_user: 5
##################### Flagger #####################
flagger:
# Config are the overrides for the feature flags which come directly from the config file.
config:
boolean:
string:
float:
integer:
object:

View File

@@ -1726,6 +1726,51 @@ paths:
summary: Update user preference
tags:
- preferences
/api/v2/features:
get:
deprecated: false
description: This endpoint returns the supported features and their details
operationId: GetFeatures
responses:
"200":
content:
application/json:
schema:
properties:
data:
items:
$ref: '#/components/schemas/FeaturetypesGettableFeature'
type: array
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get features
tags:
- features
/api/v2/orgs/me:
get:
deprecated: false
@@ -2173,6 +2218,24 @@ components:
message:
type: string
type: object
FeaturetypesGettableFeature:
properties:
defaultVariant:
type: string
description:
type: string
kind:
type: string
name:
type: string
resolvedValue: {}
stage:
type: string
variants:
additionalProperties: {}
nullable: true
type: object
type: object
PreferencetypesPreference:
properties:
allowedScopes:

View File

@@ -0,0 +1,29 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
export const getMetricMetadata = async (
metricName: string,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
try {
const encodedMetricName = encodeURIComponent(metricName);
const response = await axios.get(
`/metrics/metadata?metricName=${encodedMetricName}`,
{
signal,
headers,
},
);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};

View File

@@ -560,6 +560,10 @@
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
}
}
@@ -569,6 +573,10 @@
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
.ant-select-arrow {

View File

@@ -169,6 +169,10 @@
.ant-select-selector {
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100) !important;
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
}
}

View File

@@ -32,6 +32,7 @@ const ADD_ONS_KEYS = {
ORDER_BY: 'order_by',
LIMIT: 'limit',
LEGEND_FORMAT: 'legend_format',
REDUCE_TO: 'reduce_to',
};
const ADD_ONS_KEYS_TO_QUERY_PATH = {
@@ -40,13 +41,14 @@ const ADD_ONS_KEYS_TO_QUERY_PATH = {
[ADD_ONS_KEYS.ORDER_BY]: 'orderBy',
[ADD_ONS_KEYS.LIMIT]: 'limit',
[ADD_ONS_KEYS.LEGEND_FORMAT]: 'legend',
[ADD_ONS_KEYS.REDUCE_TO]: 'reduceTo',
};
const ADD_ONS = [
{
icon: <BarChart2 size={14} />,
label: 'Group By',
key: 'group_by',
key: ADD_ONS_KEYS.GROUP_BY,
description:
'Break down data by attributes like service name, endpoint, status code, or region. Essential for spotting patterns and comparing performance across different segments.',
docLink: 'https://signoz.io/docs/userguide/query-builder-v5/#grouping',
@@ -54,7 +56,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Having',
key: 'having',
key: ADD_ONS_KEYS.HAVING,
description:
'Filter grouped results based on aggregate conditions. Show only groups meeting specific criteria, like error rates > 5% or p99 latency > 500',
docLink:
@@ -63,7 +65,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Order By',
key: 'order_by',
key: ADD_ONS_KEYS.ORDER_BY,
description:
'Sort results to surface what matters most. Quickly identify slowest operations, most frequent errors, or highest resource consumers.',
docLink:
@@ -72,7 +74,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Limit',
key: 'limit',
key: ADD_ONS_KEYS.LIMIT,
description:
'Show only the top/bottom N results. Perfect for focusing on outliers, reducing noise, and improving dashboard performance.',
docLink:
@@ -81,7 +83,7 @@ const ADD_ONS = [
{
icon: <ScrollText size={14} />,
label: 'Legend format',
key: 'legend_format',
key: ADD_ONS_KEYS.LEGEND_FORMAT,
description:
'Customize series labels using variables like {{service.name}}-{{endpoint}}. Makes charts readable at a glance during incident investigation.',
docLink:
@@ -92,7 +94,7 @@ const ADD_ONS = [
const REDUCE_TO = {
icon: <ScrollText size={14} />,
label: 'Reduce to',
key: 'reduce_to',
key: ADD_ONS_KEYS.REDUCE_TO,
description:
'Apply mathematical operations like sum, average, min, max, or percentiles to reduce multiple time series into a single value.',
docLink:
@@ -218,10 +220,9 @@ function QueryAddOns({
);
const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key));
// Filter and set selected views: add-ons that are both active and available
setSelectedViews(
ADD_ONS.filter(
filteredAddOns.filter(
(addOn) =>
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
),

View File

@@ -1,6 +1,12 @@
/* eslint-disable */
import { fireEvent, render, screen } from '@testing-library/react';
import React from 'react';
import {
fireEvent,
render,
screen,
userEvent,
waitFor,
within,
} from 'tests/test-utils';
import QueryAddOns from '../QueryV2/QueryAddOns/QueryAddOns';
import { PANEL_TYPES } from 'constants/queryBuilder';
@@ -55,16 +61,7 @@ jest.mock('../QueryV2/QueryAddOns/HavingFilter/HavingFilter', () => ({
),
}));
jest.mock(
'container/QueryBuilder/filters/ReduceToFilter/ReduceToFilter',
() => ({
ReduceToFilter: ({ onChange }: any) => (
<button data-testid="reduce-to" onClick={() => onChange('sum')}>
ReduceToFilter
</button>
),
}),
);
// ReduceToFilter is not mocked - we test the actual Ant Design Select component
function baseQuery(overrides: Partial<any> = {}): any {
return {
@@ -140,7 +137,7 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('order-by-content')).toBeInTheDocument();
});
it('limit input auto-opens when limit is set and changing it calls handler', () => {
it('limit input auto-opens when limit is set and changing it calls handler', async () => {
render(
<QueryAddOns
query={baseQuery({ limit: 5 })}
@@ -183,4 +180,88 @@ describe('QueryAddOns', () => {
expect(screen.getByTestId('limit-content')).toBeInTheDocument();
expect(limitInput.value).toBe('7');
});
it('shows reduce-to add-on when showReduceTo is true', () => {
render(
<QueryAddOns
query={baseQuery()}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('query-add-on-reduce_to')).toBeInTheDocument();
});
it('auto-opens reduce-to content when reduceTo is set', () => {
render(
<QueryAddOns
query={baseQuery({ reduceTo: 'sum' })}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
it('calls handleSetQueryData when reduce-to value changes', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const query = baseQuery({
reduceTo: 'avg',
aggregations: [{ id: 'a', operator: 'count', reduceTo: 'avg' }],
});
render(
<QueryAddOns
query={query}
version="v5"
isListViewPanel={false}
showReduceTo
panelType={PANEL_TYPES.TIME_SERIES}
index={0}
isForTraceOperator={false}
/>,
);
// Wait for the reduce-to content section to be visible (it auto-opens when reduceTo is set)
await waitFor(() => {
expect(screen.getByTestId('reduce-to-content')).toBeInTheDocument();
});
// Get the Select component by its role (combobox)
// The Select is within the reduce-to-content section
const reduceToContent = screen.getByTestId('reduce-to-content');
const selectCombobox = within(reduceToContent).getByRole('combobox');
// Open the dropdown by clicking on the combobox
await user.click(selectCombobox);
// Wait for the dropdown listbox to appear
await screen.findByRole('listbox');
// Find and click the "Sum" option
const sumOption = await screen.findByText('Sum of values in timeframe');
await user.click(sumOption);
// Verify the handler was called with the correct value
await waitFor(() => {
expect(mockHandleSetQueryData).toHaveBeenCalledWith(0, {
...query,
aggregations: [
{
...(query.aggregations?.[0] as any),
reduceTo: 'sum',
},
],
});
});
});
});

View File

@@ -1,11 +1,18 @@
import './styles.scss';
import { Select } from 'antd';
import { WarningFilled } from '@ant-design/icons';
import { Select, Tooltip } from 'antd';
import { DefaultOptionType } from 'antd/es/select';
import classNames from 'classnames';
import { useMemo } from 'react';
import { UniversalYAxisUnitMappings } from './constants';
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
import { getYAxisCategories, mapMetricUnitToUniversalUnit } from './utils';
import {
getUniversalNameFromMetricUnit,
getYAxisCategories,
mapMetricUnitToUniversalUnit,
} from './utils';
function YAxisUnitSelector({
value,
@@ -14,9 +21,24 @@ function YAxisUnitSelector({
loading = false,
'data-testid': dataTestId,
source,
initialValue,
}: YAxisUnitSelectorProps): JSX.Element {
const universalUnit = mapMetricUnitToUniversalUnit(value);
const incompatibleUnitMessage = useMemo(() => {
if (!initialValue || !value || loading) return '';
const initialUniversalUnit = mapMetricUnitToUniversalUnit(initialValue);
const currentUniversalUnit = mapMetricUnitToUniversalUnit(value);
if (initialUniversalUnit !== currentUniversalUnit) {
const initialUniversalUnitName = getUniversalNameFromMetricUnit(
initialValue,
);
const currentUniversalUnitName = getUniversalNameFromMetricUnit(value);
return `Unit mismatch. Saved unit is ${initialUniversalUnitName}, but ${currentUniversalUnitName} is selected.`;
}
return '';
}, [initialValue, value, loading]);
const handleSearch = (
searchTerm: string,
currentOption: DefaultOptionType | undefined,
@@ -49,6 +71,16 @@ function YAxisUnitSelector({
placeholder={placeholder}
filterOption={(input, option): boolean => handleSearch(input, option)}
loading={loading}
suffixIcon={
incompatibleUnitMessage ? (
<Tooltip title={incompatibleUnitMessage}>
<WarningFilled />
</Tooltip>
) : undefined
}
className={classNames({
'warning-state': incompatibleUnitMessage,
})}
data-testid={dataTestId}
>
{categories.map((category) => (

View File

@@ -91,4 +91,36 @@ describe('YAxisUnitSelector', () => {
expect(screen.getByText('Bytes (B)')).toBeInTheDocument();
expect(screen.getByText('Seconds (s)')).toBeInTheDocument();
});
it('shows warning message when incompatible unit is selected', () => {
render(
<YAxisUnitSelector
source={YAxisSource.ALERTS}
value="By"
onChange={mockOnChange}
initialValue="s"
/>,
);
const warningIcon = screen.getByLabelText('warning');
expect(warningIcon).toBeInTheDocument();
fireEvent.mouseOver(warningIcon);
return screen
.findByText(
'Unit mismatch. Saved unit is Seconds (s), but Bytes (B) is selected.',
)
.then((el) => expect(el).toBeInTheDocument());
});
it('does not show warning message when compatible unit is selected', () => {
render(
<YAxisUnitSelector
source={YAxisSource.ALERTS}
value="s"
onChange={mockOnChange}
initialValue="s"
/>,
);
const warningIcon = screen.queryByLabelText('warning');
expect(warningIcon).not.toBeInTheDocument();
});
});

View File

@@ -3,3 +3,13 @@
width: 220px;
}
}
.warning-state {
.ant-select-selector {
border-color: var(--bg-amber-400) !important;
}
.anticon {
color: var(--bg-amber-400) !important;
}
}

View File

@@ -6,6 +6,7 @@ export interface YAxisUnitSelectorProps {
disabled?: boolean;
'data-testid'?: string;
source: YAxisSource;
initialValue?: string;
}
export enum UniversalYAxisUnit {

View File

@@ -55,6 +55,7 @@ export const REACT_QUERY_KEY = {
GET_METRIC_DETAILS: 'GET_METRIC_DETAILS',
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
GET_INSPECT_METRICS_DETAILS: 'GET_INSPECT_METRICS_DETAILS',
GET_METRIC_METADATA: 'GET_METRIC_METADATA',
// Traces Funnels Query Keys
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',

View File

@@ -5,9 +5,11 @@ import { useCreateAlertState } from 'container/CreateAlertV2/context';
import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview';
import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useState } from 'react';
import useGetYAxisUnit from 'hooks/useGetYAxisUnit';
import { useEffect, useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { AlertDef } from 'types/api/alerts/def';
import { EQueryType } from 'types/common/dashboard';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -18,7 +20,13 @@ export interface ChartPreviewProps {
function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
const { currentQuery, panelType, stagedQuery } = useQueryBuilder();
const { thresholdState, alertState, setAlertState } = useCreateAlertState();
const {
alertType,
thresholdState,
alertState,
setAlertState,
isEditMode,
} = useCreateAlertState();
const { selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
@@ -27,6 +35,25 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
const yAxisUnit = alertState.yAxisUnit || '';
const fetchYAxisUnit =
!isEditMode && alertType === AlertTypes.METRICS_BASED_ALERT;
const selectedQueryName = thresholdState.selectedQuery;
const { yAxisUnit: initialYAxisUnit, isLoading } = useGetYAxisUnit(
selectedQueryName,
{
enabled: fetchYAxisUnit,
},
);
// Every time a new metric is selected, set the y-axis unit to its unit value if present
// Only for metrics-based alerts
useEffect(() => {
if (fetchYAxisUnit) {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: initialYAxisUnit });
}
}, [initialYAxisUnit, setAlertState, fetchYAxisUnit]);
const headline = (
<div className="chart-preview-headline">
<PlotTag
@@ -34,11 +61,13 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
panelType={panelType || PANEL_TYPES.TIME_SERIES}
/>
<YAxisUnitSelector
value={alertState.yAxisUnit}
value={yAxisUnit}
initialValue={initialYAxisUnit}
onChange={(value): void => {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
}}
source={YAxisSource.ALERTS}
loading={isLoading}
/>
</div>
);

View File

@@ -120,7 +120,6 @@ function FullView({
originalGraphType: selectedPanelType,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
return {
query: updatedQuery,
graphType: PANEL_TYPES.LIST,

View File

@@ -137,7 +137,6 @@ function GridCardGraph({
originalGraphType: widget.panelTypes,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
const initialDataSource = updatedQuery.builder.queryData[0].dataSource;
return {
query: updatedQuery,

View File

@@ -58,6 +58,27 @@
.explore-content {
padding: 0 8px;
.y-axis-unit-selector-container {
display: flex;
align-items: center;
gap: 10px;
padding-top: 10px;
margin-bottom: 10px;
.save-unit-container {
display: flex;
align-items: center;
gap: 10px;
.ant-btn {
border-radius: 2px;
.ant-typography {
font-size: 12px;
}
}
}
}
.ant-space {
margin-top: 10px;
margin-bottom: 20px;
@@ -75,6 +96,14 @@
.time-series-view {
min-width: 100%;
width: 100%;
position: relative;
.no-unit-warning {
position: absolute;
top: 30px;
right: 40px;
z-index: 1000;
}
}
.time-series-container {

View File

@@ -1,7 +1,7 @@
import './Explorer.styles.scss';
import * as Sentry from '@sentry/react';
import { Switch } from 'antd';
import { Switch, Tooltip } from 'antd';
import logEvent from 'api/common/logEvent';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import WarningPopover from 'components/WarningPopover/WarningPopover';
@@ -25,10 +25,14 @@ import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToD
import { v4 as uuid } from 'uuid';
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
// import QuerySection from './QuerySection';
import MetricDetails from '../MetricDetails/MetricDetails';
import TimeSeries from './TimeSeries';
import { ExplorerTabs } from './types';
import { splitQueryIntoOneChartPerQuery } from './utils';
import {
getMetricUnits,
splitQueryIntoOneChartPerQuery,
useGetMetrics,
} from './utils';
const ONE_CHART_PER_QUERY_ENABLED_KEY = 'isOneChartPerQueryEnabled';
@@ -40,6 +44,34 @@ function Explorer(): JSX.Element {
currentQuery,
} = useQueryBuilder();
const { safeNavigate } = useSafeNavigate();
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(false);
const metricNames = useMemo(() => {
const currentMetricNames: string[] = [];
stagedQuery?.builder.queryData.forEach((query) => {
if (query.aggregateAttribute?.key) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames;
}, [stagedQuery]);
const {
metrics,
isLoading: isMetricUnitsLoading,
isError: isMetricUnitsError,
} = useGetMetrics(metricNames);
const units = useMemo(() => getMetricUnits(metrics), [metrics]);
const areAllMetricUnitsSame = useMemo(
() =>
!isMetricUnitsLoading &&
!isMetricUnitsError &&
units.length > 0 &&
units.every((unit) => unit && unit === units[0]),
[units, isMetricUnitsLoading, isMetricUnitsError],
);
const [searchParams, setSearchParams] = useSearchParams();
const isOneChartPerQueryEnabled =
@@ -48,7 +80,66 @@ function Explorer(): JSX.Element {
const [showOneChartPerQuery, toggleShowOneChartPerQuery] = useState(
isOneChartPerQueryEnabled,
);
const [disableOneChartPerQuery, toggleDisableOneChartPerQuery] = useState(
false,
);
const [selectedTab] = useState<ExplorerTabs>(ExplorerTabs.TIME_SERIES);
const [yAxisUnit, setYAxisUnit] = useState<string | undefined>();
const unitsLength = useMemo(() => units.length, [units]);
const firstUnit = useMemo(() => units?.[0], [units]);
useEffect(() => {
// Set the y axis unit to the first metric unit if
// 1. There is one metric unit and it is not empty
// 2. All metric units are the same and not empty
// Else, set the y axis unit to empty if
// 1. There are more than one metric units and they are not the same
// 2. There are no metric units
// 3. There is exactly one metric unit but it is empty/undefined
if (unitsLength === 0) {
setYAxisUnit(undefined);
} else if (unitsLength === 1 && firstUnit) {
setYAxisUnit(firstUnit);
} else if (unitsLength === 1 && !firstUnit) {
setYAxisUnit(undefined);
} else if (areAllMetricUnitsSame) {
if (firstUnit) {
setYAxisUnit(firstUnit);
} else {
setYAxisUnit(undefined);
}
} else if (unitsLength > 1 && !areAllMetricUnitsSame) {
setYAxisUnit(undefined);
}
}, [unitsLength, firstUnit, areAllMetricUnitsSame]);
useEffect(() => {
// Don't apply logic during loading to avoid overwriting user preferences
if (isMetricUnitsLoading) {
return;
}
// Disable one chart per query if -
// 1. There are more than one metric
// 2. The metric units are not the same
if (units.length > 1 && !areAllMetricUnitsSame) {
toggleShowOneChartPerQuery(true);
toggleDisableOneChartPerQuery(true);
} else if (units.length <= 1) {
toggleShowOneChartPerQuery(false);
toggleDisableOneChartPerQuery(true);
} else {
// When units are the same and loading is complete, restore URL-based preference
toggleShowOneChartPerQuery(isOneChartPerQueryEnabled);
toggleDisableOneChartPerQuery(false);
}
}, [
units,
areAllMetricUnitsSame,
isMetricUnitsLoading,
isOneChartPerQueryEnabled,
]);
const handleToggleShowOneChartPerQuery = (): void => {
toggleShowOneChartPerQuery(!showOneChartPerQuery);
@@ -68,15 +159,20 @@ function Explorer(): JSX.Element {
[updateAllQueriesOperators],
);
const exportDefaultQuery = useMemo(
() =>
updateAllQueriesOperators(
currentQuery || initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
),
[currentQuery, updateAllQueriesOperators],
);
const exportDefaultQuery = useMemo(() => {
const query = updateAllQueriesOperators(
currentQuery || initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
);
if (yAxisUnit && !query.unit) {
return {
...query,
unit: yAxisUnit,
};
}
return query;
}, [currentQuery, updateAllQueriesOperators, yAxisUnit]);
useShareBuilderUrl({ defaultValue: defaultQuery });
@@ -90,8 +186,16 @@ function Explorer(): JSX.Element {
const widgetId = uuid();
let query = queryToExport || exportDefaultQuery;
if (yAxisUnit && !query.unit) {
query = {
...query,
unit: yAxisUnit,
};
}
const dashboardEditView = generateExportToDashboardLink({
query: queryToExport || exportDefaultQuery,
query,
panelType: PANEL_TYPES.TIME_SERIES,
dashboardId: dashboard.id,
widgetId,
@@ -99,17 +203,33 @@ function Explorer(): JSX.Element {
safeNavigate(dashboardEditView);
},
[exportDefaultQuery, safeNavigate],
[exportDefaultQuery, safeNavigate, yAxisUnit],
);
const splitedQueries = useMemo(
() =>
splitQueryIntoOneChartPerQuery(
stagedQuery || initialQueriesMap[DataSource.METRICS],
metricNames,
units,
),
[stagedQuery],
[stagedQuery, metricNames, units],
);
const [selectedMetricName, setSelectedMetricName] = useState<string | null>(
null,
);
const handleOpenMetricDetails = (metricName: string): void => {
setIsMetricDetailsOpen(true);
setSelectedMetricName(metricName);
};
const handleCloseMetricDetails = (): void => {
setIsMetricDetailsOpen(false);
setSelectedMetricName(null);
};
useEffect(() => {
logEvent(MetricsExplorerEvents.TabChanged, {
[MetricsExplorerEventKeys.Tab]: 'explorer',
@@ -123,17 +243,44 @@ function Explorer(): JSX.Element {
const [warning, setWarning] = useState<Warning | undefined>(undefined);
const oneChartPerQueryDisabledTooltip = useMemo(() => {
if (splitedQueries.length <= 1) {
return 'One chart per query cannot be toggled for a single query.';
}
if (units.length <= 1) {
return 'One chart per query cannot be toggled when there is only one metric.';
}
if (disableOneChartPerQuery) {
return 'One chart per query cannot be disabled for multiple queries with different units.';
}
return undefined;
}, [disableOneChartPerQuery, splitedQueries.length, units.length]);
// Show the y axis unit selector if -
// 1. There is only one metric
// 2. The metric has no saved unit
const showYAxisUnitSelector = useMemo(
() => !isMetricUnitsLoading && units.length === 1 && !units[0],
[units, isMetricUnitsLoading],
);
return (
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="metrics-explorer-explore-container">
<div className="explore-header">
<div className="explore-header-left-actions">
<span>1 chart/query</span>
<Switch
checked={showOneChartPerQuery}
onChange={handleToggleShowOneChartPerQuery}
size="small"
/>
<Tooltip
open={disableOneChartPerQuery ? undefined : false}
title={oneChartPerQueryDisabledTooltip}
>
<Switch
checked={showOneChartPerQuery}
onChange={handleToggleShowOneChartPerQuery}
disabled={disableOneChartPerQuery || splitedQueries.length <= 1}
size="small"
/>
</Tooltip>
</div>
<div className="explore-header-right-actions">
{!isEmpty(warning) && <WarningPopover warningData={warning} />}
@@ -174,6 +321,16 @@ function Explorer(): JSX.Element {
<TimeSeries
showOneChartPerQuery={showOneChartPerQuery}
setWarning={setWarning}
areAllMetricUnitsSame={areAllMetricUnitsSame}
isMetricUnitsLoading={isMetricUnitsLoading}
isMetricUnitsError={isMetricUnitsError}
metricUnits={units}
metricNames={metricNames}
metrics={metrics}
handleOpenMetricDetails={handleOpenMetricDetails}
yAxisUnit={yAxisUnit}
setYAxisUnit={setYAxisUnit}
showYAxisUnitSelector={showYAxisUnitSelector}
/>
)}
{/* TODO: Enable once we have resolved all related metrics issues */}
@@ -187,9 +344,17 @@ function Explorer(): JSX.Element {
query={exportDefaultQuery}
sourcepage={DataSource.METRICS}
onExport={handleExport}
isOneChartPerQuery={false}
isOneChartPerQuery={showOneChartPerQuery}
splitedQueries={splitedQueries}
/>
{isMetricDetailsOpen && (
<MetricDetails
metricName={selectedMetricName}
isOpen={isMetricDetailsOpen}
onClose={handleCloseMetricDetails}
isModalTimeSelection={false}
/>
)}
</Sentry.ErrorBoundary>
);
}

View File

@@ -1,14 +1,18 @@
import { Color } from '@signozhq/design-tokens';
import { Tooltip, Typography } from 'antd';
import { isAxiosError } from 'axios';
import classNames from 'classnames';
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters/BuilderUnitsFilter/BuilderUnits';
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
import { convertDataValueToMs } from 'container/TimeSeriesView/utils';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { useMemo, useState } from 'react';
import { AlertTriangle } from 'lucide-react';
import { useMemo } from 'react';
import { useQueries } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -24,6 +28,13 @@ import { splitQueryIntoOneChartPerQuery } from './utils';
function TimeSeries({
showOneChartPerQuery,
setWarning,
isMetricUnitsLoading,
metricUnits,
metricNames,
handleOpenMetricDetails,
yAxisUnit,
setYAxisUnit,
showYAxisUnitSelector,
}: TimeSeriesProps): JSX.Element {
const { stagedQuery, currentQuery } = useQueryBuilder();
@@ -56,13 +67,14 @@ function TimeSeries({
showOneChartPerQuery
? splitQueryIntoOneChartPerQuery(
stagedQuery || initialQueriesMap[DataSource.METRICS],
metricNames,
metricUnits,
)
: [stagedQuery || initialQueriesMap[DataSource.METRICS]],
[showOneChartPerQuery, stagedQuery],
// eslint-disable-next-line react-hooks/exhaustive-deps
[showOneChartPerQuery, stagedQuery, JSON.stringify(metricUnits)],
);
const [yAxisUnit, setYAxisUnit] = useState<string>('');
const queries = useQueries(
queryPayloads.map((payload, index) => ({
queryKey: [
@@ -126,32 +138,148 @@ function TimeSeries({
setYAxisUnit(value);
};
// TODO: Enable once we have resolved all related metrics v2 api issues
// Show the save unit button if
// 1. There is only one metric
// 2. The metric has no saved unit
// 3. The user has selected a unit
// const showSaveUnitButton = useMemo(
// () =>
// metricUnits.length === 1 &&
// Boolean(metrics?.[0]) &&
// !metricUnits[0] &&
// yAxisUnit,
// [metricUnits, metrics, yAxisUnit],
// );
// const {
// mutate: updateMetricMetadata,
// isLoading: isUpdatingMetricMetadata,
// } = useUpdateMetricMetadata();
// const handleSaveUnit = (): void => {
// updateMetricMetadata(
// {
// metricName: metricNames[0],
// payload: {
// unit: yAxisUnit,
// description: metrics[0]?.description ?? '',
// metricType: metrics[0]?.type as MetricType,
// temporality: metrics[0]?.temporality,
// },
// },
// {
// onSuccess: () => {
// notifications.success({
// message: 'Unit saved successfully',
// });
// queryClient.invalidateQueries([
// REACT_QUERY_KEY.GET_METRIC_DETAILS,
// metricNames[0],
// ]);
// },
// onError: () => {
// notifications.error({
// message: 'Failed to save unit',
// });
// },
// },
// );
// };
return (
<>
<BuilderUnitsFilter onChange={onUnitChangeHandler} yAxisUnit={yAxisUnit} />
<div className="y-axis-unit-selector-container">
{showYAxisUnitSelector && (
<>
<YAxisUnitSelector
onChange={onUnitChangeHandler}
value={yAxisUnit}
source={YAxisSource.EXPLORER}
data-testid="y-axis-unit-selector"
/>
{/* TODO: Enable once we have resolved all related metrics v2 api issues */}
{/* {showSaveUnitButton && (
<div className="save-unit-container">
<Typography.Text>
Save the selected unit for this metric?
</Typography.Text>
<Button
type="primary"
size="small"
disabled={isUpdatingMetricMetadata}
onClick={handleSaveUnit}
>
<Typography.Paragraph>Yes</Typography.Paragraph>
</Button>
</div>
)} */}
</>
)}
</div>
<div
className={classNames({
'time-series-container': changeLayoutForOneChartPerQuery,
})}
>
{responseData.map((datapoint, index) => (
<div
className="time-series-view"
// eslint-disable-next-line react/no-array-index-key
key={index}
>
<TimeSeriesView
isFilterApplied={false}
isError={queries[index].isError}
isLoading={queries[index].isLoading}
data={datapoint}
yAxisUnit={yAxisUnit}
dataSource={DataSource.METRICS}
error={queries[index].error as APIError}
setWarning={setWarning}
/>
</div>
))}
{responseData.map((datapoint, index) => {
const isQueryDataItem = index < metricNames.length;
const metricName = isQueryDataItem ? metricNames[index] : undefined;
const metricUnit = isQueryDataItem ? metricUnits[index] : undefined;
// Show the no unit warning if -
// 1. The metric query is not loading
// 2. The metric units are not loading
// 3. There are more than one metric
// 4. The current metric unit is empty
// 5. Is a queryData item
const isMetricUnitEmpty =
isQueryDataItem &&
!queries[index].isLoading &&
!isMetricUnitsLoading &&
metricUnits.length > 1 &&
!metricUnit &&
metricName;
const currentYAxisUnit = yAxisUnit || metricUnit;
return (
<div
className="time-series-view"
// eslint-disable-next-line react/no-array-index-key
key={index}
>
{isMetricUnitEmpty && metricName && (
<Tooltip
className="no-unit-warning"
title={
<Typography.Text>
This metric does not have a unit. Please set one for it in the{' '}
<Typography.Link
onClick={(): void => handleOpenMetricDetails(metricName)}
>
metric details
</Typography.Link>{' '}
page.
</Typography.Text>
}
>
<AlertTriangle size={16} color={Color.BG_AMBER_400} />
</Tooltip>
)}
<TimeSeriesView
isFilterApplied={false}
isError={queries[index].isError}
isLoading={queries[index].isLoading || isMetricUnitsLoading}
data={datapoint}
yAxisUnit={currentYAxisUnit}
dataSource={DataSource.METRICS}
error={queries[index].error as APIError}
setWarning={setWarning}
/>
</div>
);
})}
</div>
</>
);

View File

@@ -1,4 +1,6 @@
import { render, screen } from '@testing-library/react';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import * as useOptionsMenuHooks from 'container/OptionsMenu';
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
@@ -12,13 +14,18 @@ import { MemoryRouter } from 'react-router-dom';
import { useSearchParams } from 'react-router-dom-v5-compat';
import store from 'store';
import { LicenseEvent } from 'types/api/licensesV3/getActive';
import { DataSource } from 'types/common/queryBuilder';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import Explorer from '../Explorer';
import * as useGetMetricsHooks from '../utils';
const mockSetSearchParams = jest.fn();
const queryClient = new QueryClient();
const mockUpdateAllQueriesOperators = jest.fn();
const mockUpdateAllQueriesOperators = jest
.fn()
.mockReturnValue(initialQueriesMap[DataSource.METRICS]);
const mockUseQueryBuilderData = {
handleRunQuery: jest.fn(),
stagedQuery: initialQueriesMap[DataSource.METRICS],
@@ -126,6 +133,30 @@ jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
} as any);
const Y_AXIS_UNIT_SELECTOR_TEST_ID = 'y-axis-unit-selector';
const mockMetric: MetricMetadata = {
type: MetricType.SUM,
description: 'metric1 description',
unit: 'metric1 unit',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
};
function renderExplorer(): void {
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
}
describe('Explorer', () => {
beforeEach(() => {
jest.clearAllMocks();
@@ -142,17 +173,7 @@ describe('Explorer', () => {
mockSetSearchParams,
]);
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
renderExplorer();
expect(mockUpdateAllQueriesOperators).toHaveBeenCalledWith(
initialQueriesMap[DataSource.METRICS],
@@ -166,18 +187,13 @@ describe('Explorer', () => {
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
renderExplorer();
const toggle = screen.getByRole('switch');
expect(toggle).toBeChecked();
@@ -188,20 +204,132 @@ describe('Explorer', () => {
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
renderExplorer();
const toggle = screen.getByRole('switch');
expect(toggle).not.toBeChecked();
});
it('should not render y axis unit selector for single metric which has a unit', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
});
it('should not render y axis unit selector for mutliple metrics with same unit', () => {
(useSearchParams as jest.Mock).mockReturnValueOnce([
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
});
it('should hide y axis unit selector for multiple metrics with different units', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).not.toBeInTheDocument();
// One chart per query toggle should be disabled
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).toBeDisabled();
});
it('should render empty y axis unit selector for a single metric with no unit', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [
{
type: MetricType.SUM,
description: 'metric1 description',
unit: '',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
},
],
});
renderExplorer();
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
expect(yAxisUnitSelector).toBeInTheDocument();
expect(yAxisUnitSelector).toHaveTextContent('Please select a unit');
});
it('one chart per query should be off and disabled when there is only one query', () => {
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric],
});
renderExplorer();
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).not.toBeChecked();
expect(oneChartPerQueryToggle).toBeDisabled();
});
it('one chart per query should enabled by default when there are multiple metrics with the same unit', () => {
const mockQueryData = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric1',
},
};
const mockStagedQueryWithMultipleQueries = {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [mockQueryData, mockQueryData],
},
};
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
...mockUseQueryBuilderData,
stagedQuery: mockStagedQueryWithMultipleQueries,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [mockMetric, mockMetric],
});
renderExplorer();
const oneChartPerQueryToggle = screen.getByRole('switch');
expect(oneChartPerQueryToggle).toBeEnabled();
});
});

View File

@@ -0,0 +1,180 @@
import { render, RenderResult, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { UpdateMetricMetadataResponse } from 'api/metricsExplorer/updateMetricMetadata';
import * as useUpdateMetricMetadataHooks from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import { UseUpdateMetricMetadataProps } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import { UseMutationResult } from 'react-query';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import TimeSeries from '../TimeSeries';
import { TimeSeriesProps } from '../types';
type MockUpdateMetricMetadata = UseMutationResult<
SuccessResponse<UpdateMetricMetadataResponse> | ErrorResponse,
Error,
UseUpdateMetricMetadataProps
>;
const mockUpdateMetricMetadata = jest.fn();
jest
.spyOn(useUpdateMetricMetadataHooks, 'useUpdateMetricMetadata')
.mockReturnValue(({
mutate: mockUpdateMetricMetadata,
isLoading: false,
} as Partial<MockUpdateMetricMetadata>) as MockUpdateMetricMetadata);
jest.mock('container/TimeSeriesView/TimeSeriesView', () => ({
__esModule: true,
default: jest.fn().mockReturnValue(
<div role="img" aria-label="warning">
TimeSeriesView
</div>,
),
}));
jest.mock('react-query', () => ({
...jest.requireActual('react-query'),
useQueryClient: jest.fn().mockReturnValue({
invalidateQueries: jest.fn(),
}),
useQueries: jest.fn().mockImplementation((queries: any[]) =>
queries.map(() => ({
data: undefined,
isLoading: false,
isError: false,
error: undefined,
})),
),
}));
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: jest.fn().mockReturnValue({
globalTime: {
selectedTime: '5min',
maxTime: 1713738000000,
minTime: 1713734400000,
},
}),
}));
const mockMetric: MetricMetadata = {
type: MetricType.SUM,
description: 'metric1 description',
unit: 'metric1 unit',
temporality: Temporality.CUMULATIVE,
isMonotonic: true,
};
const mockSetWarning = jest.fn();
const mockSetIsMetricDetailsOpen = jest.fn();
const mockSetYAxisUnit = jest.fn();
function renderTimeSeries(
overrides: Partial<TimeSeriesProps> = {},
): RenderResult {
return render(
<TimeSeries
showOneChartPerQuery={false}
setWarning={mockSetWarning}
areAllMetricUnitsSame={false}
isMetricUnitsLoading={false}
metricUnits={[]}
metricNames={[]}
metrics={[]}
isMetricUnitsError={false}
handleOpenMetricDetails={mockSetIsMetricDetailsOpen}
yAxisUnit="count"
setYAxisUnit={mockSetYAxisUnit}
showYAxisUnitSelector={false}
// eslint-disable-next-line react/jsx-props-no-spreading
{...overrides}
/>,
);
}
describe('TimeSeries', () => {
it('should render a warning icon when a metric has no unit among multiple metrics', () => {
const user = userEvent.setup();
const { container } = renderTimeSeries({
metricUnits: ['', 'count'],
metricNames: ['metric1', 'metric2'],
metrics: [undefined, undefined],
});
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
user.hover(alertIcon);
waitFor(() =>
expect(
screen.findByText('This metric does not have a unit'),
).toBeInTheDocument(),
);
});
it('clicking on warning icon tooltip should open metric details modal', async () => {
const user = userEvent.setup();
const { container } = renderTimeSeries({
metricUnits: ['', 'count'],
metricNames: ['metric1', 'metric2'],
metrics: [mockMetric, mockMetric],
yAxisUnit: 'seconds',
});
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
user.hover(alertIcon);
const metricDetailsLink = await screen.findByText('metric details');
user.click(metricDetailsLink);
waitFor(() =>
expect(mockSetIsMetricDetailsOpen).toHaveBeenCalledWith('metric1'),
);
});
// TODO: Unskip this test once the save unit button is implemented
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
it.skip('shows Save unit button when metric had no unit but one is selected', () => {
const { findByText, getByRole } = renderTimeSeries({
metricUnits: [undefined],
metricNames: ['metric1'],
metrics: [mockMetric],
yAxisUnit: 'seconds',
});
expect(
findByText('Save the selected unit for this metric?'),
).toBeInTheDocument();
const yesButton = getByRole('button', { name: 'Yes' });
expect(yesButton).toBeInTheDocument();
expect(yesButton).toBeEnabled();
});
// TODO: Unskip this test once the save unit button is implemented
// Tracking at - https://github.com/SigNoz/engineering-pod/issues/3495
it.skip('clicking on save unit button shoould upated metric metadata', () => {
const user = userEvent.setup();
const { getByRole } = renderTimeSeries({
metricUnits: [''],
metricNames: ['metric1'],
metrics: [mockMetric],
yAxisUnit: 'seconds',
});
const yesButton = getByRole('button', { name: /Yes/i });
user.click(yesButton);
expect(mockUpdateMetricMetadata).toHaveBeenCalledWith(
{
metricName: 'metric1',
payload: expect.objectContaining({ unit: 'seconds' }),
},
expect.objectContaining({
onSuccess: expect.any(Function),
onError: expect.any(Function),
}),
);
});
});

View File

@@ -0,0 +1,161 @@
import { renderHook } from '@testing-library/react';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { initialQueriesMap } from 'constants/queryBuilder';
import * as useGetMultipleMetricsHook from 'hooks/metricsExplorer/useGetMultipleMetrics';
import { UseQueryResult } from 'react-query';
import { SuccessResponseV2 } from 'types/api';
import {
MetricMetadata,
MetricMetadataResponse,
} from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
IBuilderFormula,
IBuilderQuery,
Query,
} from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import {
getMetricUnits,
splitQueryIntoOneChartPerQuery,
useGetMetrics,
} from '../utils';
const MOCK_QUERY_DATA_1: IBuilderQuery = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric1',
},
};
const MOCK_QUERY_DATA_2: IBuilderQuery = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'metric2',
},
};
const MOCK_FORMULA_DATA: IBuilderFormula = {
expression: '1 + 1',
disabled: false,
queryName: 'Mock Formula',
legend: 'Mock Legend',
};
const MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA: Query = {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [MOCK_QUERY_DATA_1, MOCK_QUERY_DATA_2],
queryFormulas: [MOCK_FORMULA_DATA, MOCK_FORMULA_DATA],
},
};
describe('splitQueryIntoOneChartPerQuery', () => {
it('should split a query with multiple queryData to multiple distinct queries, each with a single queryData', () => {
const result = splitQueryIntoOneChartPerQuery(
MOCK_QUERY_WITH_MULTIPLE_QUERY_DATA,
['metric1', 'metric2'],
[undefined, 'unit2'],
);
expect(result).toHaveLength(4);
// Verify query 1 has the correct data
expect(result[0].builder.queryData).toHaveLength(1);
expect(result[0].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_1);
expect(result[0].builder.queryFormulas).toHaveLength(0);
expect(result[0].unit).toBeUndefined();
// Verify query 2 has the correct data
expect(result[1].builder.queryData).toHaveLength(1);
expect(result[1].builder.queryData[0]).toEqual(MOCK_QUERY_DATA_2);
expect(result[1].builder.queryFormulas).toHaveLength(0);
expect(result[1].unit).toBe('unit2');
// Verify query 3 has the correct data
expect(result[2].builder.queryFormulas).toHaveLength(1);
expect(result[2].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
expect(result[2].builder.queryData).toHaveLength(2); // 2 disabled queries
expect(result[2].builder.queryData[0].disabled).toBe(true);
expect(result[2].builder.queryData[1].disabled).toBe(true);
expect(result[2].unit).toBeUndefined();
// Verify query 4 has the correct data
expect(result[3].builder.queryFormulas).toHaveLength(1);
expect(result[3].builder.queryFormulas[0]).toEqual(MOCK_FORMULA_DATA);
expect(result[3].builder.queryData).toHaveLength(2); // 2 disabled queries
expect(result[3].builder.queryData[0].disabled).toBe(true);
expect(result[3].builder.queryData[1].disabled).toBe(true);
expect(result[3].unit).toBeUndefined();
});
});
const MOCK_METRIC_METADATA: MetricMetadata = {
description: 'Metric 1 description',
unit: 'unit1',
type: MetricType.GAUGE,
temporality: Temporality.DELTA,
isMonotonic: true,
};
describe('useGetMetrics', () => {
beforeEach(() => {
jest
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
.mockReturnValue([
({
isLoading: false,
isError: false,
data: {
httpStatusCode: 200,
data: {
status: 'success',
data: MOCK_METRIC_METADATA,
},
},
} as Partial<
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
]);
});
it('should return the correct metrics data', () => {
const { result } = renderHook(() => useGetMetrics(['metric1']));
expect(result.current.metrics).toHaveLength(1);
expect(result.current.metrics[0]).toBeDefined();
expect(result.current.metrics[0]).toEqual(MOCK_METRIC_METADATA);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return array of undefined values of correct length when metrics data is not yet loaded', () => {
jest
.spyOn(useGetMultipleMetricsHook, 'useGetMultipleMetrics')
.mockReturnValue([
({
isLoading: true,
isError: false,
} as Partial<
UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>
>) as UseQueryResult<SuccessResponseV2<MetricMetadataResponse>, Error>,
]);
const { result } = renderHook(() => useGetMetrics(['metric1']));
expect(result.current.metrics).toHaveLength(1);
expect(result.current.metrics[0]).toBeUndefined();
});
});
describe('getMetricUnits', () => {
it('should return the same unit for units that are not known to the universal unit mapper', () => {
const result = getMetricUnits([MOCK_METRIC_METADATA]);
expect(result).toHaveLength(1);
expect(result[0]).toEqual(MOCK_METRIC_METADATA.unit);
});
it('should return universal unit for units that are known to the universal unit mapper', () => {
const result = getMetricUnits([{ ...MOCK_METRIC_METADATA, unit: 'seconds' }]);
expect(result).toHaveLength(1);
expect(result[0]).toBe('s');
});
});

View File

@@ -3,6 +3,7 @@ import { Dispatch, SetStateAction } from 'react';
import { UseQueryResult } from 'react-query';
import { SuccessResponse, Warning } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
export enum ExplorerTabs {
TIME_SERIES = 'time-series',
@@ -12,6 +13,16 @@ export enum ExplorerTabs {
export interface TimeSeriesProps {
showOneChartPerQuery: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
areAllMetricUnitsSame: boolean;
isMetricUnitsLoading: boolean;
isMetricUnitsError: boolean;
metricUnits: (string | undefined)[];
metricNames: string[];
metrics: (MetricMetadata | undefined)[];
handleOpenMetricDetails: (metricName: string) => void;
yAxisUnit: string | undefined;
setYAxisUnit: (unit: string) => void;
showYAxisUnitSelector: boolean;
}
export interface RelatedMetricsProps {

View File

@@ -1,20 +1,40 @@
import { mapMetricUnitToUniversalUnit } from 'components/YAxisUnitSelector/utils';
import { useGetMultipleMetrics } from 'hooks/metricsExplorer/useGetMultipleMetrics';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { v4 as uuid } from 'uuid';
export const splitQueryIntoOneChartPerQuery = (query: Query): Query[] => {
/**
* Split a query with multiple queryData to multiple distinct queries, each with a single queryData.
* @param query - The query to split
* @param units - The units of the metrics, can be undefined if the metric has no unit
* @returns The split queries
*/
export const splitQueryIntoOneChartPerQuery = (
query: Query,
metricNames: string[],
units: (string | undefined)[],
): Query[] => {
const queries: Query[] = [];
query.builder.queryData.forEach((currentQuery) => {
const newQuery = {
...query,
id: uuid(),
builder: {
...query.builder,
queryData: [currentQuery],
queryFormulas: [],
},
};
queries.push(newQuery);
if (currentQuery.aggregateAttribute?.key) {
const metricIndex = metricNames.indexOf(
currentQuery.aggregateAttribute?.key,
);
const unit = metricIndex >= 0 ? units[metricIndex] : undefined;
const newQuery = {
...query,
id: uuid(),
builder: {
...query.builder,
queryData: [currentQuery],
queryFormulas: [],
},
unit,
};
queries.push(newQuery);
}
});
query.builder.queryFormulas.forEach((currentFormula) => {
@@ -35,3 +55,43 @@ export const splitQueryIntoOneChartPerQuery = (query: Query): Query[] => {
return queries;
};
/**
* Hook to get data for multiple metrics with a synchronous loading and error state
* @param metricNames - The names of the metrics to get
* @param isEnabled - Whether the hook is enabled
* @returns The loading state, the metrics data, and the error state
*/
export function useGetMetrics(
metricNames: string[],
isEnabled = true,
): {
isLoading: boolean;
isError: boolean;
metrics: (MetricMetadata | undefined)[];
} {
const metricsData = useGetMultipleMetrics(metricNames, {
enabled: metricNames.length > 0 && isEnabled,
});
return {
isLoading: metricsData.some((metric) => metric.isLoading),
metrics: metricsData
.map((metric) => metric.data?.data)
.map((data) => data?.data),
isError: metricsData.some((metric) => metric.isError),
};
}
/**
* To get the units of the metrics in the universal unit standard.
* If the unit is not known to the universal unit mapper, it will return the unit as is.
* @param metrics - The metrics to get the units for
* @returns The units of the metrics, can be undefined if the metric has no unit
*/
export function getMetricUnits(
metrics: (MetricMetadata | undefined)[],
): (string | undefined)[] {
return metrics
.map((metric) => metric?.unit)
.map((unit) => mapMetricUnitToUniversalUnit(unit) || undefined);
}

View File

@@ -131,8 +131,8 @@ function MetricDetails({
>
Open in Explorer
</Button>
{/* Show the based on the feature flag. Will remove before releasing the feature */}
{showInspectFeature && (
{/* Show the inspect button if the metric type is GAUGE */}
{showInspectFeature && openInspectModal && (
<Button
className="inspect-metrics-button"
aria-label="Inspect Metric"

View File

@@ -11,7 +11,7 @@ export interface MetricDetailsProps {
isOpen: boolean;
metricName: string | null;
isModalTimeSelection: boolean;
openInspectModal: (metricName: string) => void;
openInspectModal?: (metricName: string) => void;
}
export interface DashboardsAndAlertsPopoverProps {

View File

@@ -370,10 +370,6 @@ function NewWidget({
// this has been moved here from the left container
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
const updatedQuery = cloneDeep(stagedQuery || initialQueriesMap.metrics);
if (updatedQuery?.builder?.queryData?.[0]) {
updatedQuery.builder.queryData[0].pageSize = 10;
}
if (selectedWidget) {
if (selectedGraph === PANEL_TYPES.LIST) {
return {
@@ -419,16 +415,12 @@ function NewWidget({
useEffect(() => {
if (stagedQuery) {
setIsLoadingPanelData(false);
const updatedStagedQuery = cloneDeep(stagedQuery);
if (updatedStagedQuery?.builder?.queryData?.[0]) {
updatedStagedQuery.builder.queryData[0].pageSize = 10;
}
setRequestData((prev) => ({
...prev,
selectedTime: selectedTime.enum || prev.selectedTime,
globalSelectedInterval: customGlobalSelectedInterval,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: updatedStagedQuery,
query: stagedQuery,
fillGaps: selectedWidget.fillSpans || false,
isLogScale: selectedWidget.isLogScale || false,
formatForWeb:

View File

@@ -132,11 +132,20 @@ function UplotPanelWrapper({
[selectedGraph, widget?.panelTypes, widget?.stackedBarChart],
);
const chartData = getUPlotChartData(
queryResponse?.data?.payload,
widget.fillSpans,
stackedBarChart,
hiddenGraph,
const chartData = useMemo(
() =>
getUPlotChartData(
queryResponse?.data?.payload,
widget.fillSpans,
stackedBarChart,
hiddenGraph,
),
[
queryResponse?.data?.payload,
widget.fillSpans,
stackedBarChart,
hiddenGraph,
],
);
useEffect(() => {
@@ -293,7 +302,7 @@ function UplotPanelWrapper({
)}
{isFullViewMode && setGraphVisibility && !stackedBarChart && (
<GraphManager
data={getUPlotChartData(queryResponse?.data?.payload, widget.fillSpans)}
data={chartData}
name={widget.id}
options={options}
yAxisUnit={widget.yAxisUnit}

View File

@@ -206,6 +206,10 @@
.ant-select-selector {
border-color: var(--bg-vanilla-300);
background: var(--bg-vanilla-300);
.ant-select-selection-item {
color: var(--text-ink-400);
}
}
.ant-input-number {

View File

@@ -1,7 +1,5 @@
import { Select } from 'antd';
import { ATTRIBUTE_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { useEffect, useState } from 'react';
import { MetricAggregateOperator } from 'types/common/queryBuilder';
interface SpaceAggregationOptionsProps {
panelType: PANEL_TYPES | null;
@@ -22,39 +20,13 @@ export default function SpaceAggregationOptions({
operators,
qbVersion,
}: SpaceAggregationOptionsProps): JSX.Element {
const placeHolderText =
panelType === PANEL_TYPES.VALUE || qbVersion === 'v3' ? 'Sum' : 'Sum By';
const [defaultValue, setDefaultValue] = useState(
selectedValue || placeHolderText,
);
useEffect(() => {
if (!selectedValue) {
if (
aggregatorAttributeType === ATTRIBUTE_TYPES.HISTOGRAM ||
aggregatorAttributeType === ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
setDefaultValue(MetricAggregateOperator.P90);
onSelect(MetricAggregateOperator.P90);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.SUM) {
setDefaultValue(MetricAggregateOperator.SUM);
onSelect(MetricAggregateOperator.SUM);
} else if (aggregatorAttributeType === ATTRIBUTE_TYPES.GAUGE) {
setDefaultValue(MetricAggregateOperator.AVG);
onSelect(MetricAggregateOperator.AVG);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [aggregatorAttributeType]);
return (
<div
className="spaceAggregationOptionsContainer"
key={aggregatorAttributeType}
>
<Select
defaultValue={defaultValue}
defaultValue={selectedValue}
style={{ minWidth: '5.625rem' }}
disabled={disabled}
onChange={onSelect}

View File

@@ -0,0 +1,16 @@
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.option-renderer-tooltip {
pointer-events: none;
}

View File

@@ -1,4 +1,4 @@
import './QueryBuilderSearch.styles.scss';
import './OptionRenderer.styles.scss';
import { Tooltip } from 'antd';
@@ -13,7 +13,11 @@ function OptionRenderer({
return (
<span className="option">
{type ? (
<Tooltip title={`${value}`} placement="topLeft">
<Tooltip
title={`${value}`}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<div className="selectOptionContainer">
<div className="option-value">{value}</div>
<div className="option-meta-data-container">
@@ -29,7 +33,11 @@ function OptionRenderer({
</div>
</Tooltip>
) : (
<Tooltip title={label} placement="topLeft">
<Tooltip
title={label}
placement="topLeft"
rootClassName="option-renderer-tooltip"
>
<span>{label}</span>
</Tooltip>
)}

View File

@@ -5,19 +5,6 @@
gap: 12px;
}
.selectOptionContainer {
display: flex;
gap: 8px;
justify-content: space-between;
align-items: center;
overflow-x: auto;
&::-webkit-scrollbar {
width: 0.2rem;
height: 0.2rem;
}
}
.logs-popup {
&.hide-scroll {
.rc-virtual-list-holder {

View File

@@ -0,0 +1,88 @@
import { render, screen } from '@testing-library/react';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { ReduceToFilter } from './ReduceToFilter';
const mockOnChange = jest.fn();
function baseQuery(overrides: Partial<IBuilderQuery> = {}): IBuilderQuery {
return {
dataSource: 'traces',
aggregations: [],
groupBy: [],
orderBy: [],
legend: '',
limit: null,
having: { expression: '' },
...overrides,
} as IBuilderQuery;
}
describe('ReduceToFilter', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('initializes with default avg when no reduceTo is set', () => {
render(<ReduceToFilter query={baseQuery()} onChange={mockOnChange} />);
expect(screen.getByTestId('reduce-to')).toBeInTheDocument();
expect(
screen.getByText('Average of values in timeframe'),
).toBeInTheDocument();
});
it('initializes from query.aggregations[0].reduceTo', () => {
render(
<ReduceToFilter
query={baseQuery({
aggregations: [{ reduceTo: 'sum' } as any],
aggregateAttribute: { key: 'test', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Sum of values in timeframe')).toBeInTheDocument();
});
it('initializes from query.reduceTo when aggregations[0].reduceTo is not set', () => {
render(
<ReduceToFilter
query={baseQuery({
reduceTo: 'max',
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
expect(screen.getByText('Max of values in timeframe')).toBeInTheDocument();
});
it('updates to sum when aggregateAttribute.type is SUM', async () => {
const { rerender } = render(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test', type: MetricType.GAUGE },
})}
onChange={mockOnChange}
/>,
);
rerender(
<ReduceToFilter
query={baseQuery({
aggregateAttribute: { key: 'test2', type: MetricType.SUM },
})}
onChange={mockOnChange}
/>,
);
const reduceToFilterText = (await screen.findByText(
'Sum of values in timeframe',
)) as HTMLElement;
expect(reduceToFilterText).toBeInTheDocument();
});
});

View File

@@ -1,6 +1,7 @@
import { Select } from 'antd';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { REDUCE_TO_VALUES } from 'constants/queryBuilder';
import { memo } from 'react';
import { memo, useEffect, useRef, useState } from 'react';
import { MetricAggregation } from 'types/api/v5/queryRange';
// ** Types
import { ReduceOperators } from 'types/common/queryBuilder';
@@ -12,19 +13,46 @@ export const ReduceToFilter = memo(function ReduceToFilter({
query,
onChange,
}: ReduceToFilterProps): JSX.Element {
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
const currentValue =
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[0];
const isMounted = useRef<boolean>(false);
const [currentValue, setCurrentValue] = useState<
SelectOption<ReduceOperators, string>
>(REDUCE_TO_VALUES[2]); // default to avg
const handleChange = (
newValue: SelectOption<ReduceOperators, string>,
): void => {
setCurrentValue(newValue);
onChange(newValue.value);
};
useEffect(
() => {
if (!isMounted.current) {
const reduceToValue =
(query.aggregations?.[0] as MetricAggregation)?.reduceTo || query.reduceTo;
setCurrentValue(
REDUCE_TO_VALUES.find((option) => option.value === reduceToValue) ||
REDUCE_TO_VALUES[2],
);
isMounted.current = true;
return;
}
const aggregationAttributeType = query.aggregateAttribute?.type as
| MetricType
| undefined;
if (aggregationAttributeType === MetricType.SUM) {
handleChange(REDUCE_TO_VALUES[1]);
} else {
handleChange(REDUCE_TO_VALUES[2]);
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[query.aggregateAttribute?.key],
);
return (
<Select
placeholder="Reduce to"

View File

@@ -0,0 +1,32 @@
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useQueries, UseQueryOptions, UseQueryResult } from 'react-query';
import { SuccessResponseV2 } from 'types/api';
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';
type QueryResult = UseQueryResult<
SuccessResponseV2<MetricMetadataResponse>,
Error
>;
type UseGetMultipleMetrics = (
metricNames: string[],
options?: UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>,
headers?: Record<string, string>,
) => QueryResult[];
export const useGetMultipleMetrics: UseGetMultipleMetrics = (
metricNames,
options,
headers,
) =>
useQueries(
metricNames.map(
(metricName) =>
({
queryKey: [REACT_QUERY_KEY.GET_METRIC_METADATA, metricName],
queryFn: ({ signal }) => getMetricMetadata(metricName, signal, headers),
...options,
} as UseQueryOptions<SuccessResponseV2<MetricMetadataResponse>, Error>),
),
);

View File

@@ -5,7 +5,7 @@ import updateMetricMetadata, {
import { useMutation, UseMutationResult } from 'react-query';
import { ErrorResponse, SuccessResponse } from 'types/api';
interface UseUpdateMetricMetadataProps {
export interface UseUpdateMetricMetadataProps {
metricName: string;
payload: UpdateMetricMetadataProps;
}

View File

@@ -188,7 +188,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
],
}),
@@ -239,7 +239,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.RATE,
metricName: 'new_sum_metric',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
],
}),
@@ -315,7 +315,7 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
timeAggregation: MetricAggregateOperator.AVG,
metricName: 'new_gauge',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
],
}),

View File

@@ -317,7 +317,7 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
@@ -326,7 +326,20 @@ export const useQueryOperations: UseQueryOperations = ({
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
];
} else if (
newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.HISTOGRAM ||
newQuery.aggregateAttribute?.type ===
ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM
) {
newQuery.aggregations = [
{
timeAggregation: '',
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.P90,
},
];
} else {

View File

@@ -0,0 +1,238 @@
import { renderHook } from '@testing-library/react';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { useGetMetrics } from 'container/MetricsExplorer/Explorer/utils';
import { MetricMetadata } from 'types/api/metricsExplorer/v2/getMetricMetadata';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
import useGetYAxisUnit from './useGetYAxisUnit';
jest.mock('./queryBuilder/useQueryBuilder');
jest.mock('container/MetricsExplorer/Explorer/utils', () => ({
...jest.requireActual('container/MetricsExplorer/Explorer/utils'),
useGetMetrics: jest.fn(),
}));
const mockUseQueryBuilder = useQueryBuilder as jest.MockedFunction<
typeof useQueryBuilder
>;
const mockUseGetMetrics = useGetMetrics as jest.MockedFunction<
typeof useGetMetrics
>;
const MOCK_METRIC_1 = {
unit: UniversalYAxisUnit.BYTES,
} as MetricMetadata;
const MOCK_METRIC_2 = {
unit: UniversalYAxisUnit.SECONDS,
} as MetricMetadata;
const MOCK_METRIC_3 = {
unit: '',
} as MetricMetadata;
function createMockCurrentQuery(
queryType: EQueryType,
queryData: Query['builder']['queryData'] = [],
): Query {
return {
queryType,
promql: [],
builder: {
queryData,
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: [],
id: 'test-id',
};
}
describe('useGetYAxisUnit', () => {
beforeEach(() => {
jest.clearAllMocks();
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [],
});
mockUseQueryBuilder.mockReturnValue(({
currentQuery: undefined,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
});
it('should return undefined yAxisUnit and not call useGetMetrics when currentQuery is null', async () => {
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when queryType is PROM', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.PROM);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when queryType is CLICKHOUSE', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.CLICKHOUSE);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when dataSource is TRACES', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.TRACES,
aggregateAttribute: { key: 'trace_metric' },
} as Query['builder']['queryData'][0],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should return undefined yAxisUnit when dataSource is LOGS', async () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.LOGS,
aggregateAttribute: { key: 'log_metric' },
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
currentQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
expect(mockUseGetMetrics).toHaveBeenCalledWith([], false);
});
it('should extract all metric names from queryData when no selected query name is provided', () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric1' },
queryName: 'query1',
} as Query['builder']['queryData'][number],
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric2' },
queryName: 'query2',
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
stagedQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderHook(() => useGetYAxisUnit());
expect(mockUseGetMetrics).toHaveBeenCalledWith(['metric1', 'metric2'], true);
});
it('should extract metric name for the selected query only when one is provided', () => {
const mockCurrentQuery = createMockCurrentQuery(EQueryType.QUERY_BUILDER, [
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric1' },
queryName: 'query1',
} as Query['builder']['queryData'][number],
{
dataSource: DataSource.METRICS,
aggregateAttribute: { key: 'metric2' },
queryName: 'query2',
} as Query['builder']['queryData'][number],
]);
mockUseQueryBuilder.mockReturnValueOnce(({
stagedQuery: mockCurrentQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderHook(() => useGetYAxisUnit('query2'));
expect(mockUseGetMetrics).toHaveBeenCalledWith(['metric2'], true);
});
it('should return the unit when there is a single metric with a non-empty unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBe(UniversalYAxisUnit.BYTES);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return undefined when there is a single metric with no unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_3],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return the unit when all metrics have the same non-empty unit', async () => {
mockUseGetMetrics.mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1, MOCK_METRIC_1],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBe(UniversalYAxisUnit.BYTES);
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
it('should return undefined when metrics have different units', async () => {
mockUseGetMetrics.mockReturnValueOnce({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_1, MOCK_METRIC_2],
});
const { result } = renderHook(() => useGetYAxisUnit());
expect(result.current.yAxisUnit).toBeUndefined();
expect(result.current.isLoading).toBe(false);
expect(result.current.isError).toBe(false);
});
});

View File

@@ -0,0 +1,108 @@
import {
getMetricUnits,
useGetMetrics,
} from 'container/MetricsExplorer/Explorer/utils';
import { useEffect, useMemo, useState } from 'react';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
interface UseGetYAxisUnitResult {
yAxisUnit: string | undefined;
isLoading: boolean;
isError: boolean;
}
/**
* Hook to get the y-axis unit for a given metrics-based query.
* @param selectedQueryName - The name of the query to get the y-axis unit for.
* @param params.enabled - Active state of the hook.
* @returns `{ yAxisUnit, isLoading, isError }` The y-axis unit, loading state, and error state
*/
function useGetYAxisUnit(
selectedQueryName?: string,
params: {
enabled?: boolean;
} = {
enabled: true,
},
): UseGetYAxisUnitResult {
const { stagedQuery } = useQueryBuilder();
const [yAxisUnit, setYAxisUnit] = useState<string | undefined>();
const metricNames: string[] | null = useMemo(() => {
// If the query type is not QUERY_BUILDER, return null
if (stagedQuery?.queryType !== EQueryType.QUERY_BUILDER) {
return null;
}
// If the data source is not METRICS, return null
const dataSource = stagedQuery?.builder?.queryData?.[0]?.dataSource;
if (dataSource !== DataSource.METRICS) {
return null;
}
const currentMetricNames: string[] = [];
// If a selected query name is provided, return the metric name for that query only
if (selectedQueryName) {
stagedQuery?.builder?.queryData?.forEach((query) => {
if (
query.queryName === selectedQueryName &&
query.aggregateAttribute?.key
) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames.length ? currentMetricNames : null;
}
// Else, return all metric names
stagedQuery?.builder?.queryData?.forEach((query) => {
if (query.aggregateAttribute?.key) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames.length ? currentMetricNames : null;
}, [
selectedQueryName,
stagedQuery?.builder?.queryData,
stagedQuery?.queryType,
]);
const { metrics, isLoading, isError } = useGetMetrics(
metricNames ?? [],
!!metricNames && params?.enabled,
);
const units = useMemo(() => getMetricUnits(metrics), [metrics]);
const areAllMetricUnitsSame = useMemo(
() => units.every((unit) => unit === units[0]),
[units],
);
useEffect(() => {
// If there are no metrics, set the y-axis unit to undefined
if (units.length === 0) {
setYAxisUnit(undefined);
// If there is one metric and it has a non-empty unit, set the y-axis unit to it
} else if (units.length === 1 && units[0] !== '') {
setYAxisUnit(units[0]);
// If all metrics have the same non-empty unit, set the y-axis unit to it
} else if (areAllMetricUnitsSame) {
if (units[0] !== '') {
setYAxisUnit(units[0]);
} else {
setYAxisUnit(undefined);
}
// If there is more than one metric and they have different units, set the y-axis unit to undefined
} else if (units.length > 1 && !areAllMetricUnitsSame) {
setYAxisUnit(undefined);
// If there is one metric and it has an empty unit, set the y-axis unit to undefined
} else if (units.length === 1 && units[0] === '') {
setYAxisUnit(undefined);
}
}, [units, areAllMetricUnitsSame]);
return { yAxisUnit, isLoading, isError };
}
export default useGetYAxisUnit;

View File

@@ -1,6 +1,6 @@
import { themeColors } from 'constants/theme';
import getLabelName from 'lib/getLabelName';
import { cloneDeep, isUndefined } from 'lodash-es';
import { isUndefined } from 'lodash-es';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { QueryData } from 'types/api/widgets/getQuery';
@@ -8,7 +8,7 @@ import { normalizePlotValue } from './dataUtils';
import { generateColor } from './generateColor';
function getXAxisTimestamps(seriesList: QueryData[]): number[] {
const timestamps = new Set();
const timestamps = new Set<number>();
seriesList.forEach((series: { values?: [number, string][] }) => {
if (series?.values) {
@@ -18,54 +18,71 @@ function getXAxisTimestamps(seriesList: QueryData[]): number[] {
}
});
const timestampsArr: number[] | unknown[] = Array.from(timestamps) || [];
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
return timestampsArr.sort((a, b) => a - b);
const timestampsArr = Array.from(timestamps);
timestampsArr.sort((a, b) => a - b);
return timestampsArr;
}
function fillMissingXAxisTimestamps(timestampArr: number[], data: any[]): any {
// eslint-disable-next-line sonarjs/cognitive-complexity
function fillMissingXAxisTimestamps(
timestampArr: number[],
data: Array<{ values?: [number, string][] }>,
): (number | null)[][] {
// Generate a set of all timestamps in the range
const allTimestampsSet = new Set(timestampArr);
const processedData = cloneDeep(data);
const result: (number | null)[][] = [];
// Fill missing timestamps with null values
processedData.forEach((entry: { values: (number | null)[][] }) => {
const existingTimestamps = new Set(
(entry?.values ?? []).map((value) => value[0]),
);
// Process each series entry
for (let i = 0; i < data.length; i++) {
const entry = data[i];
if (!entry?.values) {
result.push([]);
} else {
// Build Set of existing timestamps directly (avoid intermediate array)
const existingTimestamps = new Set<number>();
const valuesMap = new Map<number, number | null>();
const missingTimestamps = Array.from(allTimestampsSet).filter(
(timestamp) => !existingTimestamps.has(timestamp),
);
for (let j = 0; j < entry.values.length; j++) {
const [timestamp, value] = entry.values[j];
existingTimestamps.add(timestamp);
valuesMap.set(timestamp, normalizePlotValue(value));
}
missingTimestamps.forEach((timestamp) => {
const value = null;
// Find missing timestamps by iterating Set directly (avoid Array.from + filter)
const missingTimestamps: number[] = [];
const allTimestampsArray = Array.from(allTimestampsSet);
for (let k = 0; k < allTimestampsArray.length; k++) {
const timestamp = allTimestampsArray[k];
if (!existingTimestamps.has(timestamp)) {
missingTimestamps.push(timestamp);
}
}
entry?.values?.push([timestamp, value]);
});
// Add missing timestamps to map
for (let j = 0; j < missingTimestamps.length; j++) {
valuesMap.set(missingTimestamps[j], null);
}
entry?.values?.forEach((v) => {
// eslint-disable-next-line no-param-reassign
v[1] = normalizePlotValue(v[1]);
});
// Build sorted array of values
const sortedTimestamps = Array.from(valuesMap.keys()).sort((a, b) => a - b);
const yValues = sortedTimestamps.map((timestamp) => {
const value = valuesMap.get(timestamp);
return value !== undefined ? value : null;
});
result.push(yValues);
}
}
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
entry?.values?.sort((a, b) => a[0] - b[0]);
});
return processedData.map((entry: { values: [number, string][] }) =>
entry?.values?.map((value) => value[1]),
);
return result;
}
function getStackedSeries(val: any): any {
const series = cloneDeep(val) || [];
function getStackedSeries(val: (number | null)[][]): (number | null)[][] {
const series = val ? val.map((row: (number | null)[]) => [...row]) : [];
for (let i = series.length - 2; i >= 0; i--) {
for (let j = 0; j < series[i].length; j++) {
series[i][j] += series[i + 1][j];
series[i][j] = (series[i][j] || 0) + (series[i + 1][j] || 0);
}
}
@@ -110,6 +127,7 @@ const processAnomalyDetectionData = (
queryIndex < anomalyDetectionData.length;
queryIndex++
) {
const queryData = anomalyDetectionData[queryIndex];
const {
series,
predictedSeries,
@@ -117,7 +135,7 @@ const processAnomalyDetectionData = (
lowerBoundSeries,
queryName,
legend,
} = anomalyDetectionData[queryIndex];
} = queryData;
for (let index = 0; index < series?.length; index++) {
const label = getLabelName(
@@ -129,14 +147,30 @@ const processAnomalyDetectionData = (
const objKey =
anomalyDetectionData.length > 1 ? `${queryName}-${label}` : label;
// Single iteration instead of 5 separate map operations
const { values: seriesValues } = series[index];
const { values: predictedValues } = predictedSeries[index];
const { values: upperBoundValues } = upperBoundSeries[index];
const { values: lowerBoundValues } = lowerBoundSeries[index];
// eslint-disable-next-line prefer-destructuring
const length = seriesValues.length;
const timestamps: number[] = new Array(length);
const values: number[] = new Array(length);
const predicted: number[] = new Array(length);
const upperBound: number[] = new Array(length);
const lowerBound: number[] = new Array(length);
for (let i = 0; i < length; i++) {
timestamps[i] = seriesValues[i].timestamp / 1000;
values[i] = seriesValues[i].value;
predicted[i] = predictedValues[i].value;
upperBound[i] = upperBoundValues[i].value;
lowerBound[i] = lowerBoundValues[i].value;
}
processedData[objKey] = {
data: [
series[index].values.map((v: { timestamp: number }) => v.timestamp / 1000),
series[index].values.map((v: { value: number }) => v.value),
predictedSeries[index].values.map((v: { value: number }) => v.value),
upperBoundSeries[index].values.map((v: { value: number }) => v.value),
lowerBoundSeries[index].values.map((v: { value: number }) => v.value),
],
data: [timestamps, values, predicted, upperBound, lowerBound],
color: generateColor(
objKey,
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
@@ -152,14 +186,7 @@ const processAnomalyDetectionData = (
export const getUplotChartDataForAnomalyDetection = (
apiResponse: MetricRangePayloadProps,
isDarkMode: boolean,
): Record<
string,
{
[x: string]: any;
data: number[][];
color: string;
}
> => {
): Record<string, { [x: string]: any; data: number[][]; color: string }> => {
const anomalyDetectionData = apiResponse?.data?.newResult?.data?.result;
return processAnomalyDetectionData(anomalyDetectionData, isDarkMode);
};

View File

@@ -0,0 +1,15 @@
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
export interface MetricMetadata {
description: string;
type: MetricType;
unit: string;
temporality: Temporality;
isMonotonic: boolean;
}
export interface MetricMetadataResponse {
status: string;
data: MetricMetadata;
}

18
go.mod
View File

@@ -11,7 +11,6 @@ require (
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
github.com/antlr4-go/antlr/v4 v4.13.1
github.com/antonmedv/expr v1.15.3
github.com/bytedance/sonic v1.14.1
github.com/cespare/xxhash/v2 v2.3.0
github.com/coreos/go-oidc/v3 v3.14.1
github.com/dgraph-io/ristretto/v2 v2.3.0
@@ -75,12 +74,12 @@ require (
go.opentelemetry.io/otel/trace v1.38.0
go.uber.org/multierr v1.11.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.41.0
golang.org/x/crypto v0.46.0
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/net v0.43.0
golang.org/x/net v0.47.0
golang.org/x/oauth2 v0.30.0
golang.org/x/sync v0.17.0
golang.org/x/text v0.28.0
golang.org/x/sync v0.19.0
golang.org/x/text v0.32.0
google.golang.org/protobuf v1.36.9
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
@@ -90,6 +89,7 @@ require (
require (
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic v1.14.1 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
@@ -103,6 +103,7 @@ require (
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
go.yaml.in/yaml/v2 v2.4.2 // indirect
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
modernc.org/libc v1.66.10 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
@@ -223,6 +224,7 @@ require (
github.com/oklog/run v1.1.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/oklog/ulid/v2 v2.1.1 // indirect
github.com/open-feature/go-sdk v1.17.0
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
@@ -336,10 +338,10 @@ require (
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/mock v0.6.0 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/mod v0.27.0 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/mod v0.30.0 // indirect
golang.org/x/sys v0.39.0 // indirect
golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.36.0 // indirect
golang.org/x/tools v0.39.0 // indirect
gonum.org/v1/gonum v0.16.0 // indirect
google.golang.org/api v0.236.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect

36
go.sum
View File

@@ -762,6 +762,8 @@ github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4=
github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog=
github.com/open-feature/go-sdk v1.17.0 h1:/OUBBw5d9D61JaNZZxb2Nnr5/EJrEpjtKCTY3rspJQk=
github.com/open-feature/go-sdk v1.17.0/go.mod h1:lPxPSu1UnZ4E3dCxZi5gV3et2ACi8O8P+zsTGVsDZUw=
github.com/open-telemetry/opamp-go v0.19.0 h1:8LvQKDwqi+BU3Yy159SU31e2XB0vgnk+PN45pnKilPs=
github.com/open-telemetry/opamp-go v0.19.0/go.mod h1:9/1G6T5dnJz4cJtoYSr6AX18kHdOxnxxETJPZSHyEUg=
github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage v0.128.0 h1:T5IE0l1qcIg6dkHui4hHe+qj3VzuMwpnhrUyubyCwO0=
@@ -1282,8 +1284,8 @@ golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -1321,8 +1323,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ=
golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc=
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -1371,8 +1373,8 @@ golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su
golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -1407,8 +1409,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -1495,12 +1497,12 @@ golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q=
golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -1511,8 +1513,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -1575,8 +1577,10 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
golang.org/x/tools/godoc v0.1.0-deprecated h1:o+aZ1BOj6Hsx/GBdJO/s815sqftjSnrZZwyYTHODvtk=
golang.org/x/tools/godoc v0.1.0-deprecated/go.mod h1:qM63CriJ961IHWmnWa9CjZnBndniPt4a3CK0PVB9bIg=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View File

@@ -80,6 +80,17 @@ func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, er
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
@@ -102,6 +113,16 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
@@ -121,3 +142,21 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -0,0 +1,31 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFlaggerRoutes(router *mux.Router) error {
if err := router.Handle("/api/v2/features", handler.New(provider.authZ.ViewAccess(provider.flaggerHandler.GetFeatures), handler.OpenAPIDef{
ID: "GetFeatures",
Tags: []string{"features"},
Summary: "Get features",
Description: "This endpoint returns the supported features and their details",
Request: nil,
RequestContentType: "",
Response: make([]*featuretypes.GettableFeature, 0),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -6,6 +6,7 @@ import (
"github.com/SigNoz/signoz/pkg/apiserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
@@ -32,6 +33,7 @@ type provider struct {
preferenceHandler preference.Handler
globalHandler global.Handler
promoteHandler promote.Handler
flaggerHandler flagger.Handler
}
func NewFactory(
@@ -44,9 +46,10 @@ func NewFactory(
preferenceHandler preference.Handler,
globalHandler global.Handler,
promoteHandler promote.Handler,
flaggerHandler flagger.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler)
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler)
})
}
@@ -63,6 +66,7 @@ func newProvider(
preferenceHandler preference.Handler,
globalHandler global.Handler,
promoteHandler promote.Handler,
flaggerHandler flagger.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -78,6 +82,7 @@ func newProvider(
preferenceHandler: preferenceHandler,
globalHandler: globalHandler,
promoteHandler: promoteHandler,
flaggerHandler: flaggerHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
@@ -122,6 +127,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFlaggerRoutes(router); err != nil {
return err
}
return nil
}

32
pkg/flagger/config.go Normal file
View File

@@ -0,0 +1,32 @@
package flagger
import "github.com/SigNoz/signoz/pkg/factory"
type Config struct {
Config ConfigProvider `mapstructure:"config"`
}
type ConfigProvider struct {
Boolean map[string]bool `mapstructure:"boolean"`
String map[string]string `mapstructure:"string"`
Float map[string]float64 `mapstructure:"float"`
Integer map[string]int64 `mapstructure:"integer"`
Object map[string]any `mapstructure:"object"`
}
func NewConfigFactory() factory.ConfigFactory {
return factory.NewConfigFactory(
factory.MustNewName("flagger"), newConfig,
)
}
// newConfig creates a new config with the default values.
func newConfig() factory.Config {
return &Config{
Config: ConfigProvider{},
}
}
func (c Config) Validate() error {
return nil
}

View File

@@ -0,0 +1,320 @@
package configflagger
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/open-feature/go-sdk/openfeature"
)
type provider struct {
config flagger.Config
settings factory.ScopedProviderSettings
// This is the default registry that will be containing all the supported features along with there all possible variants
registry featuretypes.Registry
// These are the feature variants that are configured in the config file and will be used as overrides
featureVariants map[featuretypes.Name]*featuretypes.FeatureVariant
}
func NewFactory(registry featuretypes.Registry) factory.ProviderFactory[flagger.FlaggerProvider, flagger.Config] {
return factory.NewProviderFactory(factory.MustNewName("config"), func(ctx context.Context, ps factory.ProviderSettings, c flagger.Config) (flagger.FlaggerProvider, error) {
return New(ctx, ps, c, registry)
})
}
func New(ctx context.Context, ps factory.ProviderSettings, c flagger.Config, registry featuretypes.Registry) (flagger.FlaggerProvider, error) {
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/pkg/flagger/configflagger")
featureVariants := make(map[featuretypes.Name]*featuretypes.FeatureVariant)
for name, value := range c.Config.Boolean {
feature, _, err := registry.GetByString(name)
if err != nil {
return nil, err
}
variant, err := featuretypes.VariantByValue(feature, value)
if err != nil {
return nil, err
}
featureVariants[feature.Name] = variant
}
for name, value := range c.Config.String {
feature, _, err := registry.GetByString(name)
if err != nil {
return nil, err
}
variant, err := featuretypes.VariantByValue(feature, value)
if err != nil {
return nil, err
}
featureVariants[feature.Name] = variant
}
for name, value := range c.Config.Float {
feature, _, err := registry.GetByString(name)
if err != nil {
return nil, err
}
variant, err := featuretypes.VariantByValue(feature, value)
if err != nil {
return nil, err
}
featureVariants[feature.Name] = variant
}
for name, value := range c.Config.Integer {
feature, _, err := registry.GetByString(name)
if err != nil {
return nil, err
}
variant, err := featuretypes.VariantByValue(feature, value)
if err != nil {
return nil, err
}
featureVariants[feature.Name] = variant
}
for name, value := range c.Config.Object {
feature, _, err := registry.GetByString(name)
if err != nil {
return nil, err
}
variant, err := featuretypes.VariantByValue(feature, value)
if err != nil {
return nil, err
}
featureVariants[feature.Name] = variant
}
return &provider{
config: c,
settings: settings,
registry: registry,
featureVariants: featureVariants,
}, nil
}
func (provider *provider) Metadata() openfeature.Metadata {
return openfeature.Metadata{
Name: "config",
}
}
func (p *provider) BooleanEvaluation(ctx context.Context, flag string, defaultValue bool, evalCtx openfeature.FlattenedContext) openfeature.BoolResolutionDetail {
// check if the feature is present in the default registry
feature, detail, err := p.registry.GetByString(flag)
if err != nil {
return openfeature.BoolResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// get the default value from the feature from default registry
value, detail, err := featuretypes.VariantValue[bool](feature, feature.DefaultVariant)
if err != nil {
return openfeature.BoolResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// check if the feature is present in the featureVariants map
variant, ok := p.featureVariants[feature.Name]
if ok {
// return early as we have found the value in the featureVariants map
return openfeature.BoolResolutionDetail{
Value: variant.Value.(bool),
ProviderResolutionDetail: detail,
}
}
// return the value from the default registry we found earlier
return openfeature.BoolResolutionDetail{
Value: value,
ProviderResolutionDetail: detail,
}
}
func (p *provider) FloatEvaluation(ctx context.Context, flag string, defaultValue float64, evalCtx openfeature.FlattenedContext) openfeature.FloatResolutionDetail {
// check if the feature is present in the default registry
feature, detail, err := p.registry.GetByString(flag)
if err != nil {
return openfeature.FloatResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// get the default value from the feature from default registry
value, detail, err := featuretypes.VariantValue[float64](feature, feature.DefaultVariant)
if err != nil {
return openfeature.FloatResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// check if the feature is present in the featureVariants map
variant, ok := p.featureVariants[feature.Name]
if ok {
// return early as we have found the value in the featureVariants map
return openfeature.FloatResolutionDetail{
Value: variant.Value.(float64),
ProviderResolutionDetail: detail,
}
}
// return the value from the default registry we found earlier
return openfeature.FloatResolutionDetail{
Value: value,
ProviderResolutionDetail: detail,
}
}
func (p *provider) StringEvaluation(ctx context.Context, flag string, defaultValue string, evalCtx openfeature.FlattenedContext) openfeature.StringResolutionDetail {
// check if the feature is present in the default registry
feature, detail, err := p.registry.GetByString(flag)
if err != nil {
return openfeature.StringResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// get the default value from the feature from default registry
value, detail, err := featuretypes.VariantValue[string](feature, feature.DefaultVariant)
if err != nil {
return openfeature.StringResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// check if the feature is present in the featureVariants map
variant, ok := p.featureVariants[feature.Name]
if ok {
// return early as we have found the value in the featureVariants map
return openfeature.StringResolutionDetail{
Value: variant.Value.(string),
ProviderResolutionDetail: detail,
}
}
// return the value from the default registry we found earlier
return openfeature.StringResolutionDetail{
Value: value,
ProviderResolutionDetail: detail,
}
}
func (p *provider) IntEvaluation(ctx context.Context, flag string, defaultValue int64, evalCtx openfeature.FlattenedContext) openfeature.IntResolutionDetail {
// check if the feature is present in the default registry
feature, detail, err := p.registry.GetByString(flag)
if err != nil {
return openfeature.IntResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// get the default value from the feature from default registry
value, detail, err := featuretypes.VariantValue[int64](feature, feature.DefaultVariant)
if err != nil {
return openfeature.IntResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// check if the feature is present in the featureVariants map
variant, ok := p.featureVariants[feature.Name]
if ok {
// return early as we have found the value in the featureVariants map
return openfeature.IntResolutionDetail{
Value: variant.Value.(int64),
ProviderResolutionDetail: detail,
}
}
// return the value from the default registry we found earlier
return openfeature.IntResolutionDetail{
Value: value,
ProviderResolutionDetail: detail,
}
}
func (p *provider) ObjectEvaluation(ctx context.Context, flag string, defaultValue any, evalCtx openfeature.FlattenedContext) openfeature.InterfaceResolutionDetail {
// check if the feature is present in the default registry
feature, detail, err := p.registry.GetByString(flag)
if err != nil {
return openfeature.InterfaceResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// get the default value from the feature from default registry
value, detail, err := featuretypes.VariantValue[any](feature, feature.DefaultVariant)
if err != nil {
return openfeature.InterfaceResolutionDetail{
Value: defaultValue,
ProviderResolutionDetail: detail,
}
}
// check if the feature is present in the featureVariants map
variant, ok := p.featureVariants[feature.Name]
if ok {
// return early as we have found the value in the featureVariants map
return openfeature.InterfaceResolutionDetail{
Value: variant.Value,
ProviderResolutionDetail: detail,
}
}
// return the value from the default registry we found earlier
return openfeature.InterfaceResolutionDetail{
Value: value,
ProviderResolutionDetail: detail,
}
}
func (provider *provider) Hooks() []openfeature.Hook {
return []openfeature.Hook{}
}
func (p *provider) List(ctx context.Context) ([]*featuretypes.GettableFeature, error) {
result := make([]*featuretypes.GettableFeature, 0, len(p.featureVariants))
for featureName, variant := range p.featureVariants {
feature, _, err := p.registry.Get(featureName)
if err != nil {
return nil, err
}
result = append(result, &featuretypes.GettableFeature{
Name: feature.Name.String(),
Kind: feature.Kind.StringValue(),
Stage: feature.Stage.StringValue(),
Description: feature.Description,
DefaultVariant: feature.DefaultVariant.String(),
Variants: nil,
ResolvedValue: variant.Value,
})
}
return result, nil
}

282
pkg/flagger/flagger.go Normal file
View File

@@ -0,0 +1,282 @@
package flagger
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/open-feature/go-sdk/openfeature"
)
// Any feature flag provider has to implement this interface.
type FlaggerProvider interface {
openfeature.FeatureProvider
// List returns all the feature flags
List(ctx context.Context) ([]*featuretypes.GettableFeature, error)
}
// This is the consumer facing interface for the Flagger service.
type Flagger interface {
Boolean(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (bool, error)
String(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (string, error)
Float(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (float64, error)
Int(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (int64, error)
Object(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (any, error)
List(ctx context.Context, evalCtx featuretypes.FlaggerEvaluationContext) ([]*featuretypes.GettableFeature, error)
}
// This is the concrete implementation of the Flagger interface.
type flagger struct {
registry featuretypes.Registry
settings factory.ScopedProviderSettings
providers map[string]FlaggerProvider
clients map[string]*openfeature.Client
}
func New(ctx context.Context, ps factory.ProviderSettings, config Config, registry featuretypes.Registry, factories ...factory.ProviderFactory[FlaggerProvider, Config]) (Flagger, error) {
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/pkg/flagger")
providers := make(map[string]FlaggerProvider)
clients := make(map[string]*openfeature.Client)
for _, factory := range factories {
provider, err := factory.New(ctx, ps, config)
if err != nil {
return nil, err
}
providers[provider.Metadata().Name] = provider
openfeatureClient := openfeature.NewClient(provider.Metadata().Name)
if err := openfeature.SetNamedProviderAndWait(provider.Metadata().Name, provider); err != nil {
return nil, err
}
clients[provider.Metadata().Name] = openfeatureClient
}
return &flagger{
registry: registry,
settings: settings,
providers: providers,
clients: clients,
}, nil
}
func (f *flagger) Boolean(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (bool, error) {
// check if the feature is present in the default registry
feature, _, err := f.registry.GetByString(flag)
if err != nil {
f.settings.Logger().ErrorContext(ctx, "failed to get feature from default registry", "error", err, "flag", flag)
return false, err
}
// get the default value from the feature from default registry
defaultValue, _, err := featuretypes.VariantValue[bool](feature, feature.DefaultVariant)
if err != nil {
// something which should never happen
f.settings.Logger().ErrorContext(ctx, "failed to get default value from feature", "error", err, "flag", flag)
return false, err
}
// * this logic can be optimised based on priority of the clients and short circuiting
// now ask all the available clients for the value
for _, client := range f.clients {
value, err := client.BooleanValue(ctx, flag, defaultValue, evalCtx.Ctx())
if err != nil {
f.settings.Logger().ErrorContext(ctx, "failed to get value from client", "error", err, "flag", flag, "client", client.Metadata().Name)
continue
}
if value != defaultValue {
return value, nil
}
}
return defaultValue, nil
}
func (f *flagger) String(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (string, error) {
// check if the feature is present in the default registry
feature, _, err := f.registry.GetByString(flag)
if err != nil {
f.settings.Logger().ErrorContext(ctx, "failed to get feature from default registry", "error", err, "flag", flag)
return "", err
}
// get the default value from the feature from default registry
defaultValue, _, err := featuretypes.VariantValue[string](feature, feature.DefaultVariant)
if err != nil {
// something which should never happen
f.settings.Logger().ErrorContext(ctx, "failed to get default value from feature", "error", err, "flag", flag)
return "", err
}
// * this logic can be optimised based on priority of the clients and short circuiting
// now ask all the available clients for the value
for _, client := range f.clients {
value, err := client.StringValue(ctx, flag, defaultValue, evalCtx.Ctx())
if err != nil {
f.settings.Logger().WarnContext(ctx, "failed to get value from client", "error", err, "flag", flag, "client", client.Metadata().Name)
continue
}
if value != defaultValue {
return value, nil
}
}
return defaultValue, nil
}
func (f *flagger) Float(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (float64, error) {
// check if the feature is present in the default registry
feature, _, err := f.registry.GetByString(flag)
if err != nil {
f.settings.Logger().ErrorContext(ctx, "failed to get feature from default registry", "error", err, "flag", flag)
return 0, err
}
// get the default value from the feature from default registry
defaultValue, _, err := featuretypes.VariantValue[float64](feature, feature.DefaultVariant)
if err != nil {
// something which should never happen
f.settings.Logger().ErrorContext(ctx, "failed to get default value from feature", "error", err, "flag", flag)
return 0, err
}
// * this logic can be optimised based on priority of the clients and short circuiting
// now ask all the available clients for the value
for _, client := range f.clients {
value, err := client.FloatValue(ctx, flag, defaultValue, evalCtx.Ctx())
if err != nil {
f.settings.Logger().WarnContext(ctx, "failed to get value from client", "error", err, "flag", flag, "client", client.Metadata().Name)
continue
}
if value != defaultValue {
return value, nil
}
}
return defaultValue, nil
}
func (f *flagger) Int(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (int64, error) {
// check if the feature is present in the default registry
feature, _, err := f.registry.GetByString(flag)
if err != nil {
f.settings.Logger().ErrorContext(ctx, "failed to get feature from default registry", "error", err, "flag", flag)
return 0, err
}
// get the default value from the feature from default registry
defaultValue, _, err := featuretypes.VariantValue[int64](feature, feature.DefaultVariant)
if err != nil {
// something which should never happen
f.settings.Logger().ErrorContext(ctx, "failed to get default value from feature", "error", err, "flag", flag)
return 0, err
}
// * this logic can be optimised based on priority of the clients and short circuiting
// now ask all the available clients for the value
for _, client := range f.clients {
value, err := client.IntValue(ctx, flag, defaultValue, evalCtx.Ctx())
if err != nil {
f.settings.Logger().WarnContext(ctx, "failed to get value from client", "error", err, "flag", flag, "client", client.Metadata().Name)
continue
}
if value != defaultValue {
return value, nil
}
}
return defaultValue, nil
}
func (f *flagger) Object(ctx context.Context, flag string, evalCtx featuretypes.FlaggerEvaluationContext) (any, error) {
// check if the feature is present in the default registry
feature, _, err := f.registry.GetByString(flag)
if err != nil {
f.settings.Logger().ErrorContext(ctx, "failed to get feature from default registry", "error", err, "flag", flag)
return nil, err
}
// get the default value from the feature from default registry
defaultValue, _, err := featuretypes.VariantValue[any](feature, feature.DefaultVariant)
if err != nil {
// something which should never happen
f.settings.Logger().ErrorContext(ctx, "failed to get default value from feature", "error", err, "flag", flag)
return nil, err
}
// * this logic can be optimised based on priority of the clients and short circuiting
// now ask all the available clients for the value
for _, client := range f.clients {
value, err := client.ObjectValue(ctx, flag, defaultValue, evalCtx.Ctx())
if err != nil {
f.settings.Logger().WarnContext(ctx, "failed to get value from client", "error", err, "flag", flag, "client", client.Metadata().Name)
continue
}
// ! for object we do not compare with the default value for now, we will figure this out better in future coming releases
// if value != defaultValue {
// return value, nil
// }
return value, nil
}
return defaultValue, nil
}
func (f *flagger) List(ctx context.Context, evalCtx featuretypes.FlaggerEvaluationContext) ([]*featuretypes.GettableFeature, error) {
// get all the feature from the default registry
allFeatures := f.registry.List()
// make a map of name of feature -> the dict we want to create from all features
featureMap := make(map[string]*featuretypes.GettableFeature, len(allFeatures))
for _, feature := range allFeatures {
variants := make(map[string]any, len(feature.Variants))
for name, value := range feature.Variants {
variants[name.String()] = value.Value
}
featureMap[feature.Name.String()] = &featuretypes.GettableFeature{
Name: feature.Name.String(),
Kind: feature.Kind.StringValue(),
Stage: feature.Stage.StringValue(),
Description: feature.Description,
DefaultVariant: feature.DefaultVariant.String(),
Variants: variants,
ResolvedValue: feature.Variants[feature.DefaultVariant].Value,
}
}
// now call each provider and fix the value in feature map
for _, provider := range f.providers {
pFeatures, err := provider.List(ctx)
if err != nil {
f.settings.Logger().WarnContext(ctx, "failed to get features from provider", "error", err, "provider", provider.Metadata().Name)
continue
}
// merge
for _, pFeature := range pFeatures {
if existing, ok := featureMap[pFeature.Name]; ok {
existing.ResolvedValue = pFeature.ResolvedValue
}
}
}
result := make([]*featuretypes.GettableFeature, 0, len(allFeatures))
for _, f := range featureMap {
result = append(result, f)
}
return result, nil
}

53
pkg/flagger/handler.go Normal file
View File

@@ -0,0 +1,53 @@
package flagger
import (
"context"
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Handler interface {
GetFeatures(http.ResponseWriter, *http.Request)
}
type handler struct {
flagger Flagger
}
func NewHandler(flagger Flagger) Handler {
return &handler{
flagger: flagger,
}
}
func (handler *handler) GetFeatures(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
features, err := handler.flagger.List(ctx, evalCtx)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, features)
}

12
pkg/flagger/registry.go Normal file
View File

@@ -0,0 +1,12 @@
package flagger
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
func MustNewRegistry() featuretypes.Registry {
registry, err := featuretypes.NewRegistry()
if err != nil {
panic(err)
}
return registry
}

View File

@@ -61,7 +61,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
response := []promotetypes.PromotePath{}
for _, path := range promotedPaths {
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
path = telemetrytypes.BodyJSONStringSearchPrefix + path
path = telemetrylogs.BodyJSONStringSearchPrefix + path
item := promotetypes.PromotePath{
Path: path,
Promote: true,
@@ -77,7 +77,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
// add the paths that are not promoted but have indexes
for path, indexes := range aggr {
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
path = telemetrytypes.BodyJSONStringSearchPrefix + path
path = telemetrylogs.BodyJSONStringSearchPrefix + path
response = append(response, promotetypes.PromotePath{
Path: path,
Indexes: indexes,

View File

@@ -10,11 +10,9 @@ import (
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
type builderQuery[T any] struct {
@@ -250,40 +248,6 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
return nil, err
}
// merge body_json and promoted into body
if q.spec.Signal == telemetrytypes.SignalLogs {
switch typedPayload := payload.(type) {
case *qbtypes.RawData:
for _, rr := range typedPayload.Rows {
seeder := func() error {
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
if !ok {
return nil
}
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
if !ok {
return nil
}
seed(promoted, body)
str, err := sonic.MarshalString(body)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
}
rr.Data["body"] = str
return nil
}
err := seeder()
if err != nil {
return nil, err
}
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
}
payload = typedPayload
}
}
return &qbtypes.Result{
Type: q.kind,
Value: payload,
@@ -411,18 +375,3 @@ func decodeCursor(cur string) (int64, error) {
}
return strconv.ParseInt(string(b), 10, 64)
}
func seed(promoted map[string]any, body map[string]any) {
for key, fromValue := range promoted {
if toValue, ok := body[key]; !ok {
body[key] = fromValue
} else {
if fromValue, ok := fromValue.(map[string]any); ok {
if toValue, ok := toValue.(map[string]any); ok {
seed(fromValue, toValue)
body[key] = toValue
}
}
}
}
}

View File

@@ -14,7 +14,6 @@ import (
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
var (
@@ -52,6 +51,7 @@ func consume(rows driver.Rows, kind qbtypes.RequestType, queryWindow *qbtypes.Ti
}
func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbtypes.Step, queryName string) (*qbtypes.TimeSeriesData, error) {
colTypes := rows.ColumnTypes()
colNames := rows.Columns()
@@ -354,22 +354,10 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
colTypes := rows.ColumnTypes()
colCnt := len(colNames)
// Helper that decides scan target per column based on DB type
makeScanTarget := func(i int) any {
dbt := strings.ToUpper(colTypes[i].DatabaseTypeName())
if strings.HasPrefix(dbt, "JSON") {
// Since the driver fails to decode JSON/Dynamic into native Go values, we read it as raw bytes
// TODO: check in future if fixed in the driver
var v []byte
return &v
}
return reflect.New(colTypes[i].ScanType()).Interface()
}
// Build a template slice of correctly-typed pointers once
scanTpl := make([]any, colCnt)
for i := range colTypes {
scanTpl[i] = makeScanTarget(i)
for i, ct := range colTypes {
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
}
var outRows []*qbtypes.RawRow
@@ -378,7 +366,7 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
// fresh copy of the scan slice (otherwise the driver reuses pointers)
scan := make([]any, colCnt)
for i := range scanTpl {
scan[i] = makeScanTarget(i)
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
}
if err := rows.Scan(scan...); err != nil {
@@ -395,28 +383,6 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
// de-reference the typed pointer to any
val := reflect.ValueOf(cellPtr).Elem().Interface()
// Post-process JSON columns: normalize into structured values
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
switch x := val.(type) {
case []byte:
if len(x) > 0 {
var v any
if err := sonic.Unmarshal(x, &v); err == nil {
val = v
}
}
case string:
if x != "" {
var v any
if err := sonic.Unmarshal([]byte(x), &v); err == nil {
val = v
}
}
default:
// already a structured type (map[string]any, []any, etc.)
}
}
// special-case: timestamp column
if name == "timestamp" || name == "timestamp_datetime" {
switch t := val.(type) {

View File

@@ -78,7 +78,7 @@ func newProvider(
telemetryMetadataStore,
)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
settings,
telemetryMetadataStore,
@@ -102,13 +102,14 @@ func newProvider(
// Create log statement builder
logFieldMapper := telemetrylogs.NewFieldMapper()
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, telemetryMetadataStore)
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
@@ -116,6 +117,7 @@ func newProvider(
telemetrylogs.DefaultFullTextColumn,
logFieldMapper,
logConditionBuilder,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)
logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder(
@@ -126,6 +128,7 @@ func newProvider(
logResourceFilterStmtBuilder,
logAggExprRewriter,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)

View File

@@ -520,7 +520,7 @@ func (h *HostsRepo) GetHostList(ctx context.Context, orgID valuer.UUID, req mode
if _, ok := hostAttrs[record.HostName]; ok {
record.Meta = hostAttrs[record.HostName]
}
if osType, ok := record.Meta["os_type"]; ok {
if osType, ok := record.Meta[GetDotMetrics("os_type")]; ok {
record.OS = osType
}
record.Active = activeHosts[record.HostName]

View File

@@ -11,16 +11,13 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"go.uber.org/zap"
)
@@ -131,40 +128,6 @@ func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
return err
}
func (ic *LogParsingPipelineController) getDefaultPipelines() ([]pipelinetypes.GettablePipeline, error) {
defaultPipelines := []pipelinetypes.GettablePipeline{}
if querybuilder.BodyJSONQueryEnabled {
preprocessingPipeline := pipelinetypes.GettablePipeline{
StoreablePipeline: pipelinetypes.StoreablePipeline{
Name: "Default Pipeline - PreProcessing Body",
Alias: "NormalizeBodyDefault",
Enabled: true,
},
Filter: &v3.FilterSet{
Items: []v3.FilterItem{
{
Key: v3.AttributeKey{
Key: "body",
},
Operator: v3.FilterOperatorExists,
},
},
},
Config: []pipelinetypes.PipelineOperator{
{
ID: uuid.NewString(),
Type: "normalize",
Enabled: true,
If: "body != nil",
},
},
}
defaultPipelines = append(defaultPipelines, preprocessingPipeline)
}
return defaultPipelines, nil
}
// Returns effective list of pipelines including user created
// pipelines and pipelines for installed integrations
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
@@ -295,13 +258,6 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
return nil, "", err
}
// recommend default pipelines along with user created pipelines
defaultPipelines, err := pc.getDefaultPipelines()
if err != nil {
return nil, "", model.InternalError(fmt.Errorf("failed to get default pipelines: %w", err))
}
pipelinesResp.Pipelines = append(pipelinesResp.Pipelines, defaultPipelines...)
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
if err != nil {
return nil, "", err

View File

@@ -132,7 +132,7 @@ func SignozLogsToPLogs(logs []model.SignozLog) []plog.Logs {
slRecord.SetSeverityText(log.SeverityText)
slRecord.SetSeverityNumber(plog.SeverityNumber(log.SeverityNumber))
slRecord.Body().FromRaw(log.Body)
slRecord.Body().SetStr(log.Body)
slAttribs := slRecord.Attributes()
for k, v := range log.Attributes_int64 {

View File

@@ -20,6 +20,7 @@ type aggExprRewriter struct {
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -30,6 +31,7 @@ func NewAggExprRewriter(
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *aggExprRewriter {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
@@ -39,6 +41,7 @@ func NewAggExprRewriter(
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -78,6 +81,7 @@ func (r *aggExprRewriter) Rewrite(
r.fullTextColumn,
r.fieldMapper,
r.conditionBuilder,
r.jsonBodyPrefix,
r.jsonKeyToKey,
)
// Rewrite the first select item (our expression)
@@ -125,6 +129,7 @@ type exprVisitor struct {
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
Modified bool
chArgs []any
@@ -137,6 +142,7 @@ func newExprVisitor(
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *exprVisitor {
return &exprVisitor{
@@ -145,6 +151,7 @@ func newExprVisitor(
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -183,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
if aggFunc.FuncCombinator {
// Map the predicate (last argument)
origPred := args[len(args)-1].String()
whereClause, err := PrepareWhereClause(
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
@@ -192,7 +199,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
ConditionBuilder: v.conditionBuilder,
FullTextColumn: v.fullTextColumn,
JsonKeyToKey: v.jsonKeyToKey,
}, 0, 0,
}, 0, 0,
)
if err != nil {
return err
@@ -212,7 +219,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i := 0; i < len(args)-1; i++ {
origVal := args[i].String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
if err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
}
@@ -230,7 +237,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
for i, arg := range args {
orig := arg.String()
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
if err != nil {
return err
}

View File

@@ -24,6 +24,7 @@ func CollisionHandledFinalExpr(
cb qbtypes.ConditionBuilder,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
requiredDataType telemetrytypes.FieldDataType,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) (string, []any, error) {
@@ -44,7 +45,7 @@ func CollisionHandledFinalExpr(
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
sb := sqlbuilder.NewSelectBuilder()
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
if err != nil {
return err
}
@@ -57,8 +58,8 @@ func CollisionHandledFinalExpr(
return nil
}
colName, fieldForErr := fm.FieldFor(ctx, field)
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
colName, err := fm.FieldFor(ctx, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
keysForField := keys[field.Name]
@@ -81,10 +82,10 @@ func CollisionHandledFinalExpr(
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
if found {
// we found a close match, in the error message send the suggestion
return "", nil, errors.WithAdditionalf(fieldForErr, "%s", correction)
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
} else {
// not even a close match, return an error
return "", nil, errors.WithAdditionalf(fieldForErr, "field `%s` not found", field.Name)
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
}
} else {
for _, key := range keysForField {
@@ -103,11 +104,10 @@ func CollisionHandledFinalExpr(
return "", nil, err
}
// first if condition covers the older tests and second if condition covers the array conditions
if !BodyJSONQueryEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil {
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
// TODO(nitya): enable group by on body column?
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
} else if strings.Contains(field.Name, telemetrytypes.ArraySep) || strings.Contains(field.Name, telemetrytypes.ArrayAnyIndex) {
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", field.Name)
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
} else {
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
}
@@ -204,7 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
case telemetrytypes.FieldDataTypeString:
switch v := value.(type) {
case float64:
// try to convert the string value to to number
@@ -219,36 +219,8 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case telemetrytypes.FieldDataTypeFloat64,
telemetrytypes.FieldDataTypeArrayFloat64:
switch v := value.(type) {
case float32, float64:
tblFieldName = castFloatHack(tblFieldName)
case string:
// check if it's a number inside a string
isNumber := false
if _, err := strconv.ParseFloat(v, 64); err == nil {
isNumber = true
}
if !operator.IsComparisonOperator() || !isNumber {
// try to convert the number attribute to string
tblFieldName = castString(tblFieldName) // numeric col vs string literal
} else {
tblFieldName = castFloatHack(tblFieldName)
}
case []any:
if allFloats(v) {
tblFieldName = castFloatHack(tblFieldName)
} else if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
case telemetrytypes.FieldDataTypeInt64,
telemetrytypes.FieldDataTypeArrayInt64,
telemetrytypes.FieldDataTypeNumber,
telemetrytypes.FieldDataTypeArrayNumber:
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
switch v := value.(type) {
// why? ; CH returns an error for a simple check
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
@@ -286,8 +258,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
}
}
case telemetrytypes.FieldDataTypeBool,
telemetrytypes.FieldDataTypeArrayBool:
case telemetrytypes.FieldDataTypeBool:
switch v := value.(type) {
case string:
tblFieldName = castString(tblFieldName)

View File

@@ -43,6 +43,7 @@ type resourceFilterStatementBuilder[T any] struct {
signal telemetrytypes.Signal
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -75,6 +76,7 @@ func NewLogResourceFilterStatementBuilder(
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
@@ -85,6 +87,7 @@ func NewLogResourceFilterStatementBuilder(
metadataStore: metadataStore,
signal: telemetrytypes.SignalLogs,
fullTextColumn: fullTextColumn,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -97,18 +100,12 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
keySelectors = append(keySelectors, whereClauseSelectors...)
}
// exclude out the body related key selectors
filteredKeySelectors := []*telemetrytypes.FieldKeySelector{}
for idx := range keySelectors {
if keySelectors[idx].FieldContext == telemetrytypes.FieldContextBody {
continue
}
keySelectors[idx].Signal = b.signal
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
}
return filteredKeySelectors
return keySelectors
}
// Build builds a SQL query based on the given parameters
@@ -171,7 +168,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
// there is no need for "key" not found error for resource filtering
IgnoreNotFoundKeys: true,
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return err

View File

@@ -17,6 +17,7 @@ import (
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/instrumentation"
@@ -105,6 +106,9 @@ type Config struct {
// MetricsExplorer config
MetricsExplorer metricsexplorer.Config `mapstructure:"metricsexplorer"`
// Flagger config
Flagger flagger.Config `mapstructure:"flagger"`
}
// DeprecatedFlags are the flags that are deprecated and scheduled for removal.
@@ -166,6 +170,7 @@ func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.R
gateway.NewConfigFactory(),
tokenizer.NewConfigFactory(),
metricsexplorer.NewConfigFactory(),
flagger.NewConfigFactory(),
}
conf, err := config.New(ctx, resolverConfig, configFactories)

View File

@@ -2,6 +2,7 @@ package signoz
import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/global/signozglobal"
"github.com/SigNoz/signoz/pkg/licensing"
@@ -37,9 +38,10 @@ type Handlers struct {
Services services.Handler
MetricsExplorer metricsexplorer.Handler
Global global.Handler
FlaggerHandler flagger.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global) Handlers {
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -51,5 +53,6 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
}
}

View File

@@ -40,7 +40,7 @@ func TestNewHandlers(t *testing.T) {
require.NoError(t, err)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
handlers := NewHandlers(modules, providerSettings, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/apiserver"
"github.com/SigNoz/signoz/pkg/apiserver/signozapiserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/instrumentation"
@@ -40,6 +41,7 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ preference.Handler }{},
struct{ global.Handler }{},
struct{ promote.Handler }{},
struct{ flagger.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -18,6 +18,8 @@ import (
"github.com/SigNoz/signoz/pkg/emailing/noopemailing"
"github.com/SigNoz/signoz/pkg/emailing/smtpemailing"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/flagger/configflagger"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/global/signozglobal"
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
@@ -54,6 +56,7 @@ import (
"github.com/SigNoz/signoz/pkg/tokenizer/opaquetokenizer"
"github.com/SigNoz/signoz/pkg/tokenizer/tokenizerstore/sqltokenizerstore"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/web"
"github.com/SigNoz/signoz/pkg/web/noopweb"
@@ -236,6 +239,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
implpreference.NewHandler(modules.Preference),
signozglobal.NewHandler(global),
implpromote.NewHandler(modules.Promote),
handlers.FlaggerHandler,
),
)
}
@@ -253,3 +257,9 @@ func NewGlobalProviderFactories() factory.NamedMap[factory.ProviderFactory[globa
signozglobal.NewFactory(),
)
}
func NewFlaggerProviderFactories(registry featuretypes.Registry) factory.NamedMap[factory.ProviderFactory[flagger.FlaggerProvider, flagger.Config]] {
return factory.MustNewNamedMap(
configflagger.NewFactory(registry),
)
}

View File

@@ -14,6 +14,7 @@ import (
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
@@ -66,6 +67,7 @@ type SigNoz struct {
Modules Modules
Handlers Handlers
QueryParser queryparser.QueryParser
Flagger flagger.Flagger
}
func New(
@@ -356,11 +358,25 @@ func New(
return nil, err
}
// Initialize flagger from the available flagger provider factories
flaggerRegistry := flagger.MustNewRegistry()
flaggerProviderFactories := NewFlaggerProviderFactories(flaggerRegistry)
flagger, err := flagger.New(
ctx,
providerSettings,
config.Flagger,
flaggerRegistry,
flaggerProviderFactories.GetInOrder()...,
)
if err != nil {
return nil, err
}
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(
@@ -434,5 +450,6 @@ func New(
Modules: modules,
Handlers: handlers,
QueryParser: queryParser,
Flagger: flagger,
}, nil
}

View File

@@ -16,12 +16,11 @@ import (
)
type conditionBuilder struct {
fm qbtypes.FieldMapper
metadataStore telemetrytypes.MetadataStore
fm qbtypes.FieldMapper
}
func NewConditionBuilder(fm qbtypes.FieldMapper, metadataStore telemetrytypes.MetadataStore) *conditionBuilder {
return &conditionBuilder{fm: fm, metadataStore: metadataStore}
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
return &conditionBuilder{fm: fm}
}
func (c *conditionBuilder) conditionFor(
@@ -31,34 +30,22 @@ func (c *conditionBuilder) conditionFor(
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := c.fm.ColumnFor(ctx, key)
if err != nil {
return "", err
}
// For JSON columns, preserve the original value type (numeric, bool, etc.)
// Only format to string for non-JSON columns that need string formatting
isJSONColumn := column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled && key.FieldContext == telemetrytypes.FieldContextBody
if !isJSONColumn {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
}
if isJSONColumn {
cond, err := c.buildJSONCondition(ctx, key, operator, value, sb)
if err != nil {
return "", err
}
return cond, nil
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
if err != nil {
return "", err
@@ -176,7 +163,9 @@ func (c *conditionBuilder) conditionFor(
// in the UI based query builder, `exists` and `not exists` are used for
// key membership checks, so depending on the column type, the condition changes
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled {
// Check if this is a body JSON search - by FieldContext
if key.FieldContext == telemetrytypes.FieldContextBody {
if operator == qbtypes.FilterOperatorExists {
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
} else {
@@ -258,7 +247,7 @@ func (c *conditionBuilder) ConditionFor(
return "", err
}
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
// with an exception for body json search
field, _ := c.fm.FieldFor(ctx, key)

View File

@@ -373,8 +373,7 @@ func TestConditionFor(t *testing.T) {
}
fm := NewFieldMapper()
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
@@ -427,8 +426,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
}
fm := NewFieldMapper()
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
@@ -687,8 +685,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
}
fm := NewFieldMapper()
mockMetadataStore := buildTestTelemetryMetadataStore()
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
conditionBuilder := NewConditionBuilder(fm)
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()

View File

@@ -2,6 +2,7 @@ package telemetrylogs
import (
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -36,6 +37,8 @@ const (
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
ArraySep = jsontypeexporter.ArraySeparator
ArrayAnyIndex = "[*]."
)
var (
@@ -45,7 +48,8 @@ var (
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
}
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
BodyJSONStringSearchPrefix = `body.`
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
"body": {
Name: "body",
Signal: telemetrytypes.SignalLogs,

View File

@@ -6,9 +6,7 @@ import (
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz-otel-collector/utils"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -30,11 +28,6 @@ var (
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
"body": {Name: "body", Type: schema.ColumnTypeString},
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
MaxDynamicTypes: utils.ToPointer(uint(32)),
MaxDynamicPaths: utils.ToPointer(uint(0)),
}},
LogsV2BodyPromotedColumn: {Name: LogsV2BodyPromotedColumn, Type: schema.JSONColumnType{}},
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeString,
@@ -90,23 +83,13 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
return logsV2Columns["attributes_bool"], nil
}
case telemetrytypes.FieldContextBody:
// Body context is for JSON body fields
// Use body_json if feature flag is enabled
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
}
// Fall back to legacy body column
// body context fields are stored in the body column
return logsV2Columns["body"], nil
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
col, ok := logsV2Columns[key.Name]
if !ok {
// check if the key has body JSON search
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
// Use body_json if feature flag is enabled and we have a body condition builder
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
}
// Fall back to legacy body column
// check if the key has body JSON search (backward compatibility)
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
return logsV2Columns["body"], nil
}
return nil, qbtypes.ErrColumnNotFound
@@ -126,34 +109,21 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
switch column.Type.GetType() {
case schema.ColumnTypeEnumJSON:
// json is only supported for resource context as of now
switch key.FieldContext {
case telemetrytypes.FieldContextResource:
oldColumn := logsV2Columns["resources_string"]
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
// once clickHouse dependency is updated, we need to check if we can remove it.
if key.Materialized {
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
}
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
case telemetrytypes.FieldContextBody:
if strings.Contains(key.Name, telemetrytypes.ArraySep) || strings.Contains(key.Name, telemetrytypes.ArrayAnyIndex) {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", key.Name)
}
fieldExpr := BodyJSONColumnPrefix + fmt.Sprintf("`%s`", key.Name)
expr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldExpr, key.JSONDataType.StringValue())
if key.Materialized {
promotedFieldExpr := BodyPromotedColumnPrefix + fmt.Sprintf("`%s`", key.Name)
expr = fmt.Sprintf("coalesce(%s, %s)", expr, fmt.Sprintf("dynamicElement(%s, '%s')", promotedFieldExpr, key.JSONDataType.StringValue()))
}
// returning qbtypes.ErrColumnNotFound is a hack that will trigger the fallback expr logic to include all the types for the key
return expr, qbtypes.ErrColumnNotFound
default:
if key.FieldContext != telemetrytypes.FieldContextResource {
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
}
oldColumn := logsV2Columns["resources_string"]
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
// once clickHouse dependency is updated, we need to check if we can remove it.
if key.Materialized {
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
} else {
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
}
case schema.ColumnTypeEnumLowCardinality:
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
case schema.ColumnTypeEnumString:

View File

@@ -11,7 +11,7 @@ import (
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
@@ -33,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)
@@ -47,7 +47,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
@@ -69,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
@@ -15,7 +14,8 @@ import (
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
func TestFilterExprLogsBodyJSON(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, telemetrytypestest.NewMockMetadataStore())
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()

View File

@@ -16,7 +16,7 @@ import (
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogs(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
@@ -2423,7 +2423,7 @@ func TestFilterExprLogs(t *testing.T) {
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
func TestFilterExprLogsConflictNegation(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
cb := NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()

View File

@@ -84,6 +84,7 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
}
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
dataType, value := inferDataType(value, operator, key)
// for array types, we need to extract the value from the JSON_QUERY

View File

@@ -30,7 +30,7 @@ func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, paren
}
part := pb.parts[index]
pathSoFar := strings.Join(pb.parts[:index+1], telemetrytypes.ArraySep)
pathSoFar := strings.Join(pb.parts[:index+1], ArraySep)
isTerminal := index == len(pb.parts)-1
// Calculate progression parameters based on parent's values
@@ -110,8 +110,8 @@ func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbt
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
// creation time in distributed_promoted_paths
path := strings.ReplaceAll(key.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
parts := strings.Split(path, telemetrytypes.ArraySep)
path := strings.ReplaceAll(key.Name, ArrayAnyIndex, ArraySep)
parts := strings.Split(path, ArraySep)
pb := &JSONAccessPlanBuilder{
key: key,

View File

@@ -814,6 +814,9 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
// testTypeSet returns a map of path->types and a getTypes function for testing
// This represents the type information available in the test JSON structure
//
// TODO(Piyush): Remove this unparam nolint
// nolint:unparam
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
types := map[string][]telemetrytypes.JSONDataType{
"user.name": {telemetrytypes.String},

View File

@@ -1,455 +0,0 @@
package telemetrylogs
import (
"context"
"fmt"
"slices"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
)
var (
CodeCurrentNodeNil = errors.MustNewCode("current_node_nil")
CodeNextNodeNil = errors.MustNewCode("next_node_nil")
CodeNestedExpressionsEmpty = errors.MustNewCode("nested_expressions_empty")
CodeGroupByPlanEmpty = errors.MustNewCode("group_by_plan_empty")
CodeArrayMapExpressionsEmpty = errors.MustNewCode("array_map_expressions_empty")
CodePromotedPlanMissing = errors.MustNewCode("promoted_plan_missing")
CodeArrayNavigationFailed = errors.MustNewCode("array_navigation_failed")
)
func (c *conditionBuilder) getTypes(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
keys, _, err := c.metadataStore.GetKeys(ctx, &telemetrytypes.FieldKeySelector{
Name: path,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
Signal: telemetrytypes.SignalLogs,
Limit: 1,
})
if err != nil {
return nil, err
}
types := []telemetrytypes.JSONDataType{}
for _, key := range keys[path] {
if key.JSONDataType != nil {
types = append(types, *key.JSONDataType)
}
}
return types, nil
}
// BuildCondition builds the full WHERE condition for body_json JSON paths
func (c *conditionBuilder) buildJSONCondition(ctx context.Context, key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
plan, err := PlanJSON(ctx, key, operator, value, c.getTypes)
if err != nil {
return "", err
}
conditions := []string{}
for _, plan := range plan {
condition, err := c.emitPlannedCondition(plan, operator, value, sb)
if err != nil {
return "", err
}
conditions = append(conditions, condition)
}
return sb.Or(conditions...), nil
}
// emitPlannedCondition handles paths with array traversal
func (c *conditionBuilder) emitPlannedCondition(plan *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Build traversal + terminal recursively per-hop
compiled, err := c.recurseArrayHops(plan, operator, value, sb)
if err != nil {
return "", err
}
// sb.AddWhereClause(sqlbuilder.NewWhereClause().AddWhereExpr(sb.Args, compiled))
return compiled, nil
}
// buildTerminalCondition creates the innermost condition
func (c *conditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Use the parent's alias + current field name for the full path
fieldPath := node.FieldPath()
if node.TerminalConfig.ElemType.IsArray {
// switch operator for array membership checks
switch operator {
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
operator = qbtypes.FilterOperatorEqual
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
operator = qbtypes.FilterOperatorNotEqual
}
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
if err != nil {
return "", err
}
return arrayCond, nil
}
conditions := []string{}
elemType := node.TerminalConfig.ElemType
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
fieldExpr, value = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, fieldExpr, operator)
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
return index.Type == elemType && index.ColumnExpression == fieldPath
})
if elemType.IndexSupported && indexed {
indexedExpr := assumeNotNull(fieldPath, elemType)
emptyValue := func() any {
switch elemType {
case telemetrytypes.String:
return ""
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
return 0
default:
return nil
}
}()
// switch the operator and value for exists and not exists
switch operator {
case qbtypes.FilterOperatorExists:
operator = qbtypes.FilterOperatorNotEqual
value = emptyValue
case qbtypes.FilterOperatorNotExists:
operator = qbtypes.FilterOperatorEqual
value = emptyValue
default:
// do nothing
}
cond, err := c.applyOperator(sb, indexedExpr, operator, value)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
// Switch operator to EXISTS
operator = qbtypes.FilterOperatorExists
}
cond, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
if len(conditions) > 1 {
return sb.And(conditions...), nil
}
return cond, nil
}
// buildArrayMembershipCondition handles array membership checks
func (c *conditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
arrayPath := node.FieldPath()
// create typed array out of a dynamic array
filteredDynamicExpr := func() string {
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
node.TerminalConfig.ValueType.StringValue(),
node.TerminalConfig.ValueType.StringValue(),
baseArrayDynamicExpr)
}
typedArrayExpr := func() string {
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
}
var arrayExpr string
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
arrayExpr = filteredDynamicExpr()
} else {
arrayExpr = typedArrayExpr()
}
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, "x", operator)
op, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
}
// recurseArrayHops recursively builds array traversal conditions
func (c *conditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
if current == nil {
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
}
if current.IsTerminal {
terminalCond, err := c.buildTerminalCondition(current, operator, value, sb)
if err != nil {
return "", err
}
return terminalCond, nil
}
currAlias := current.Alias()
fieldPath := current.FieldPath()
// Determine availability of Array(JSON) and Array(Dynamic) at this hop
hasArrayJSON := current.Branches[telemetrytypes.BranchJSON] != nil
hasArrayDynamic := current.Branches[telemetrytypes.BranchDynamic] != nil
// Then, at this hop, compute child per branch and wrap
branches := make([]string, 0, 2)
if hasArrayJSON {
jsonArrayExpr := fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')", fieldPath, current.MaxDynamicTypes, current.MaxDynamicPaths)
childGroupJSON, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchJSON], operator, value, sb)
if err != nil {
return "", err
}
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupJSON, jsonArrayExpr))
}
if hasArrayDynamic {
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", fieldPath)
dynFilteredExpr := fmt.Sprintf("arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
// Create the Query for Dynamic array
childGroupDyn, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchDynamic], operator, value, sb)
if err != nil {
return "", err
}
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupDyn, dynFilteredExpr))
}
if len(branches) == 1 {
return branches[0], nil
}
return fmt.Sprintf("(%s)", strings.Join(branches, " OR ")), nil
}
func (c *conditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, fieldExpr string, operator qbtypes.FilterOperator, value any) (string, error) {
switch operator {
case qbtypes.FilterOperatorEqual:
return sb.E(fieldExpr, value), nil
case qbtypes.FilterOperatorNotEqual:
return sb.NE(fieldExpr, value), nil
case qbtypes.FilterOperatorGreaterThan:
return sb.G(fieldExpr, value), nil
case qbtypes.FilterOperatorGreaterThanOrEq:
return sb.GE(fieldExpr, value), nil
case qbtypes.FilterOperatorLessThan:
return sb.LT(fieldExpr, value), nil
case qbtypes.FilterOperatorLessThanOrEq:
return sb.LE(fieldExpr, value), nil
case qbtypes.FilterOperatorLike:
return sb.Like(fieldExpr, value), nil
case qbtypes.FilterOperatorNotLike:
return sb.NotLike(fieldExpr, value), nil
case qbtypes.FilterOperatorILike:
return sb.ILike(fieldExpr, value), nil
case qbtypes.FilterOperatorNotILike:
return sb.NotILike(fieldExpr, value), nil
case qbtypes.FilterOperatorRegexp:
return fmt.Sprintf("match(%s, %s)", fieldExpr, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
return fmt.Sprintf("NOT match(%s, %s)", fieldExpr, sb.Var(value)), nil
case qbtypes.FilterOperatorContains:
return sb.ILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorNotContains:
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
values, ok := value.([]any)
if !ok {
values = []any{value}
}
conds := []string{}
for _, v := range values {
if operator == qbtypes.FilterOperatorIn {
conds = append(conds, sb.E(fieldExpr, v))
} else {
conds = append(conds, sb.NE(fieldExpr, v))
}
}
if operator == qbtypes.FilterOperatorIn {
return sb.Or(conds...), nil
}
return sb.And(conds...), nil
case qbtypes.FilterOperatorExists:
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
case qbtypes.FilterOperatorNotExists:
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
default:
return "", qbtypes.ErrUnsupportedOperator
}
}
// GroupByArrayJoinInfo contains information about array joins needed for GroupBy
type GroupByArrayJoinInfo struct {
ArrayJoinClauses []string // ARRAY JOIN clauses to add to FROM clause
TerminalExpr string // Terminal field expression for SELECT/GROUP BY
}
// BuildGroupBy builds GroupBy information for body JSON fields using arrayConcat pattern
func (c *conditionBuilder) BuildGroupBy(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*GroupByArrayJoinInfo, error) {
path := strings.TrimPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix)
plan, err := PlanJSON(ctx, key, qbtypes.FilterOperatorExists, nil, c.getTypes)
if err != nil {
return nil, err
}
if len(plan) == 0 {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
"Could not find any valid paths for: %s", path)
}
if plan[0].IsTerminal {
node := plan[0]
expr := fmt.Sprintf("dynamicElement(%s, '%s')", node.FieldPath(), node.TerminalConfig.ElemType.StringValue())
if key.Materialized {
if len(plan) < 2 {
return nil, errors.Newf(errors.TypeUnexpected, CodePromotedPlanMissing,
"plan length is less than 2 for promoted path: %s", path)
}
// promoted column first then body_json column
// TODO(Piyush): Change this in future for better performance
expr = fmt.Sprintf("coalesce(%s, %s)",
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
expr,
)
}
return &GroupByArrayJoinInfo{
ArrayJoinClauses: []string{},
TerminalExpr: expr,
}, nil
}
// Build arrayConcat pattern directly from the tree structure
arrayConcatExpr, err := c.buildArrayConcat(plan)
if err != nil {
return nil, err
}
// Create single ARRAY JOIN clause with arrayFlatten
arrayJoinClause := fmt.Sprintf("ARRAY JOIN %s AS `%s`", arrayConcatExpr, key.Name)
return &GroupByArrayJoinInfo{
ArrayJoinClauses: []string{arrayJoinClause},
TerminalExpr: fmt.Sprintf("`%s`", key.Name),
}, nil
}
// buildArrayConcat builds the arrayConcat pattern directly from the tree structure
func (c *conditionBuilder) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (string, error) {
if len(plan) == 0 {
return "", errors.Newf(errors.TypeInternal, CodeGroupByPlanEmpty, "group by plan is empty while building arrayConcat")
}
// Build arrayMap expressions for ALL available branches at the root level
var arrayMapExpressions []string
for _, node := range plan {
hasJSON := node.Branches[telemetrytypes.BranchJSON] != nil
hasDynamic := node.Branches[telemetrytypes.BranchDynamic] != nil
if hasJSON {
jsonExpr, err := c.buildArrayMap(node, telemetrytypes.BranchJSON)
if err != nil {
return "", err
}
arrayMapExpressions = append(arrayMapExpressions, jsonExpr)
}
if hasDynamic {
dynamicExpr, err := c.buildArrayMap(node, telemetrytypes.BranchDynamic)
if err != nil {
return "", err
}
arrayMapExpressions = append(arrayMapExpressions, dynamicExpr)
}
}
if len(arrayMapExpressions) == 0 {
return "", errors.Newf(errors.TypeInternal, CodeArrayMapExpressionsEmpty, "array map expressions are empty while building arrayConcat")
}
// Build the arrayConcat expression
arrayConcatExpr := fmt.Sprintf("arrayConcat(%s)", strings.Join(arrayMapExpressions, ", "))
// Wrap with arrayFlatten
arrayFlattenExpr := fmt.Sprintf("arrayFlatten(%s)", arrayConcatExpr)
return arrayFlattenExpr, nil
}
// buildArrayMap builds the arrayMap expression for a specific branch, handling all sub-branches
func (c *conditionBuilder) buildArrayMap(currentNode *telemetrytypes.JSONAccessNode, branchType telemetrytypes.JSONAccessBranchType) (string, error) {
if currentNode == nil {
return "", errors.Newf(errors.TypeInternal, CodeCurrentNodeNil, "current node is nil while building arrayMap")
}
nextNode := currentNode.Branches[branchType]
if nextNode == nil {
return "", errors.Newf(errors.TypeInternal, CodeNextNodeNil, "next node is nil while building arrayMap")
}
// Build the array expression for this level
var arrayExpr string
if branchType == telemetrytypes.BranchJSON {
// Array(JSON) branch
arrayExpr = fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')",
currentNode.FieldPath(), currentNode.MaxDynamicTypes, currentNode.MaxDynamicPaths)
} else {
// Array(Dynamic) branch - filter for JSON objects
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", currentNode.FieldPath())
arrayExpr = fmt.Sprintf("arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
}
// If this is the terminal level, return the simple arrayMap
if nextNode.IsTerminal {
dynamicElementExpr := fmt.Sprintf("dynamicElement(%s, '%s')", nextNode.FieldPath(),
nextNode.TerminalConfig.ElemType.StringValue(),
)
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), dynamicElementExpr, arrayExpr), nil
}
// For non-terminal nodes, we need to handle ALL possible branches at the next level
var nestedExpressions []string
hasJSON := nextNode.Branches[telemetrytypes.BranchJSON] != nil
hasDynamic := nextNode.Branches[telemetrytypes.BranchDynamic] != nil
if hasJSON {
jsonNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchJSON)
if err != nil {
return "", err
}
nestedExpressions = append(nestedExpressions, jsonNested)
}
if hasDynamic {
dynamicNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchDynamic)
if err != nil {
return "", err
}
nestedExpressions = append(nestedExpressions, dynamicNested)
}
// If we have multiple nested expressions, we need to concat them
var nestedExpr string
if len(nestedExpressions) == 1 {
nestedExpr = nestedExpressions[0]
} else if len(nestedExpressions) > 1 {
// This shouldn't happen in our current tree structure, but handle it just in case
nestedExpr = fmt.Sprintf("arrayConcat(%s)", strings.Join(nestedExpressions, ", "))
} else {
return "", errors.Newf(errors.TypeInternal, CodeNestedExpressionsEmpty, "nested expressions are empty while building arrayMap")
}
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), nestedExpr, arrayExpr), nil
}
func assumeNotNull(column string, elemType telemetrytypes.JSONDataType) string {
return fmt.Sprintf("assumeNotNull(dynamicElement(%s, '%s'))", column, elemType.StringValue())
}

File diff suppressed because one or more lines are too long

View File

@@ -23,6 +23,7 @@ type logQueryStatementBuilder struct {
aggExprRewriter qbtypes.AggExprRewriter
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
@@ -36,6 +37,7 @@ func NewLogQueryStatementBuilder(
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
aggExprRewriter qbtypes.AggExprRewriter,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *logQueryStatementBuilder {
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
@@ -48,6 +50,7 @@ func NewLogQueryStatementBuilder(
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
aggExprRewriter: aggExprRewriter,
fullTextColumn: fullTextColumn,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -168,25 +171,6 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
overallMatch = overallMatch || findMatch(IntrinsicFields)
}
if strings.Contains(k.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
k.Name = strings.TrimPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix)
fieldKeys, found := keys[k.Name]
if found && len(fieldKeys) > 0 {
k.FieldContext = fieldKeys[0].FieldContext
k.FieldDataType = fieldKeys[0].FieldDataType
k.Materialized = fieldKeys[0].Materialized
k.JSONDataType = fieldKeys[0].JSONDataType
k.Indexes = fieldKeys[0].Indexes
overallMatch = true // because we found a match
} else {
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
k.FieldContext = telemetrytypes.FieldContextBody
k.FieldDataType = telemetrytypes.FieldDataTypeString
k.JSONDataType = &telemetrytypes.String
}
}
if !overallMatch {
// check if all the key for the given field have been materialized, if so
// set the key to materialized
@@ -237,9 +221,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
cteArgs = append(cteArgs, args)
}
// Collect array join info for body JSON fields
var arrayJoinClauses []string
// Select timestamp and id by default
sb.Select(LogsV2TimestampColumn)
sb.SelectMore(LogsV2IDColumn)
@@ -253,10 +234,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
sb.SelectMore(LogsV2ScopeNameColumn)
sb.SelectMore(LogsV2ScopeVersionColumn)
sb.SelectMore(LogsV2BodyColumn)
if querybuilder.BodyJSONQueryEnabled {
sb.SelectMore(LogsV2BodyJSONColumn)
sb.SelectMore(LogsV2BodyPromotedColumn)
}
sb.SelectMore(LogsV2AttributesStringColumn)
sb.SelectMore(LogsV2AttributesNumberColumn)
sb.SelectMore(LogsV2AttributesBoolColumn)
@@ -269,7 +246,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn {
continue
}
// get column expression for the field - use array index directly to avoid pointer to loop variable
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
if err != nil {
@@ -279,12 +255,8 @@ func (b *logQueryStatementBuilder) buildListQuery(
}
}
// From table (inject ARRAY JOINs if collected)
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
// From table
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
@@ -358,17 +330,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
var allGroupByArgs []any
// Collect array join info for body JSON fields
var arrayJoinClauses []string
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
@@ -390,13 +358,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
}
// Add FROM clause
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
@@ -442,6 +404,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
// Stitch it all together: WITH … SELECT …
@@ -468,6 +431,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
// Stitch it all together: WITH … SELECT …
@@ -514,15 +478,11 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
// Collect array join info for body JSON fields
var arrayJoinClauses []string
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
@@ -548,12 +508,8 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
}
}
// From table (inject ARRAY JOINs if collected)
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
if len(arrayJoinClauses) > 0 {
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
}
sb.From(fromBase)
// From table
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
@@ -698,6 +654,7 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
start, end uint64,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
return b.resourceFilterStmtBuilder.Build(
ctx,
start,

View File

@@ -32,6 +32,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
cb,
mockMetadataStore,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
}
@@ -196,11 +197,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -212,6 +213,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -316,11 +318,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -332,6 +334,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -424,11 +427,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -440,11 +443,10 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
//
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
@@ -489,8 +491,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "status",
FieldContext: telemetrytypes.FieldContextBody,
Name: "body.status",
},
},
},
@@ -500,11 +501,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -516,6 +517,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
@@ -595,11 +597,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -611,6 +613,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)

View File

@@ -47,6 +47,9 @@ var (
//
// searchOperator: LIKE for pattern matching, EQUAL for exact match
// Returns: (paths, error)
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
@@ -132,7 +135,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
orClauses := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
// replace [*] with []
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrylogs.ArrayAnyIndex, telemetrylogs.ArraySep)
// Extract search text for body JSON keys
keyName := CleanPathPrefixes(fieldKeySelector.Name)
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
@@ -159,11 +162,13 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
return query, args, limit, nil
}
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
filteredPaths := []string{}
for _, path := range paths {
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
continue
}
filteredPaths = append(filteredPaths, path)
@@ -291,7 +296,7 @@ func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...str
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
path = CleanPathPrefixes(path)
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
}
@@ -451,7 +456,7 @@ func derefValue(v any) any {
// IsPathPromoted checks if a specific path is promoted
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
split := strings.Split(path, telemetrytypes.ArraySep)
split := strings.Split(path, telemetrylogs.ArraySep)
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
if err != nil {

View File

@@ -572,14 +572,6 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
}
if querybuilder.BodyJSONQueryEnabled {
bodyJSONPaths, finished, err := t.getBodyJSONPaths(ctx, fieldKeySelectors) // LIKE for pattern matching
if err != nil {
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
}
keys = append(keys, bodyJSONPaths...)
complete = complete && finished
}
return keys, complete, nil
}
@@ -986,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
FieldMapper: t.fm,
ConditionBuilder: t.conditionBuilder,
FieldKeys: keys,
}, 0, 0)
}, 0, 0)
if err == nil {
sb.AddWhereClause(whereClause.WhereClause)
} else {
@@ -1010,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
// search on attributes
key.FieldContext = telemetrytypes.FieldContextAttribute
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
// search on resource
key.FieldContext = telemetrytypes.FieldContextResource
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
key.FieldContext = origContext
} else {
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
@@ -1172,10 +1164,6 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
limit = 50
}
if strings.HasPrefix(fieldValueSelector.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
return t.ListJSONValues(ctx, fieldValueSelector.Name, limit)
}
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
if fieldValueSelector.Name != "" {

View File

@@ -32,8 +32,6 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
options.MaxIdleConns = config.Connection.MaxIdleConns
options.MaxOpenConns = config.Connection.MaxOpenConns
options.DialTimeout = config.Connection.DialTimeout
// This is to avoid the driver decoding issues with JSON columns
options.Settings["output_format_native_write_json_as_string"] = 1
chConn, err := clickhouse.Open(options)
if err != nil {

View File

@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
// Keep original column expressions so we can build the tuple
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
if err != nil {
return nil, err
}
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
var allGroupByArgs []any
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
if err != nil {
return nil, err
}
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
FieldKeys: keys,
SkipResourceFilter: true,
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return nil, err

View File

@@ -357,7 +357,7 @@ func TestStatementBuilder(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -525,7 +525,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -681,7 +681,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()

View File

@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
ConditionBuilder: b.stmtBuilder.cb,
FieldKeys: keys,
SkipResourceFilter: true,
}, b.start, b.end,
}, b.start, b.end,
)
if err != nil {
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
@@ -552,6 +552,7 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {
@@ -661,6 +662,7 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {
@@ -800,6 +802,7 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
b.stmtBuilder.cb,
keys,
telemetrytypes.FieldDataTypeString,
"",
nil,
)
if err != nil {

View File

@@ -390,7 +390,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(
@@ -506,7 +506,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(

View File

@@ -44,7 +44,7 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
mockMetadataStore,
)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),

View File

@@ -0,0 +1,23 @@
package featuretypes
import (
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/open-feature/go-sdk/openfeature"
)
// A concrete wrapper around the openfeature.EvaluationContext
type FlaggerEvaluationContext struct {
ctx openfeature.EvaluationContext
}
// Creates a new FlaggerEvaluationContext with given details
func NewFlaggerEvaluationContext(orgID valuer.UUID) FlaggerEvaluationContext {
ctx := openfeature.NewTargetlessEvaluationContext(map[string]any{
"orgId": orgID.String(),
})
return FlaggerEvaluationContext{ctx: ctx}
}
func (ctx FlaggerEvaluationContext) Ctx() openfeature.EvaluationContext {
return ctx.ctx
}

View File

@@ -0,0 +1,113 @@
package featuretypes
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/open-feature/go-sdk/openfeature"
)
var (
ErrCodeFeatureVariantNotFound = errors.MustNewCode("feature_variant_not_found")
ErrCodeFeatureValueNotFound = errors.MustNewCode("feature_value_not_found")
ErrCodeFeatureVariantKindMismatch = errors.MustNewCode("feature_variant_kind_mismatch")
ErrCodeFeatureDefaultVariantNotFound = errors.MustNewCode("feature_default_variant_not_found")
ErrCodeFeatureNotFound = errors.MustNewCode("feature_not_found")
)
// A concrete type for a feature flag
type Feature struct {
// Name of the feature
Name Name `json:"name"`
// Kind of the feature
Kind Kind `json:"kind"`
// Stage of the feature
Stage Stage `json:"stage"`
// Description of the feature
Description string `json:"description"`
// DefaultVariant of the feature
DefaultVariant Name `json:"defaultVariant"`
// Variants of the feature
Variants map[Name]FeatureVariant `json:"variants"`
}
// A concrete type for a feature flag variant
type FeatureVariant struct {
// Name of the variant
Variant Name `json:"variant"`
// Value of the variant
Value any `json:"value"`
}
type GettableFeature struct {
Name string `json:"name"`
Kind string `json:"kind"`
Stage string `json:"stage"`
Description string `json:"description"`
DefaultVariant string `json:"defaultVariant"`
Variants map[string]any `json:"variants"`
ResolvedValue any `json:"resolvedValue"`
}
// This is the helper function to get the value of a variant of a feature
func VariantValue[T any](feature *Feature, variant Name) (t T, detail openfeature.ProviderResolutionDetail, err error) {
value, ok := feature.Variants[variant]
if !ok {
err = errors.Newf(errors.TypeInvalidInput, ErrCodeFeatureVariantNotFound, "variant %s not found for feature %s in variants %v", variant.String(), feature.Name.String(), feature.Variants)
detail = openfeature.ProviderResolutionDetail{
ResolutionError: openfeature.NewGeneralResolutionError(err.Error()),
Reason: openfeature.ErrorReason,
Variant: feature.DefaultVariant.String(),
}
return
}
t, ok = value.Value.(T)
if !ok {
err = errors.Newf(errors.TypeInvalidInput, ErrCodeFeatureVariantKindMismatch, "variant %s for feature %s has type %T, expected %T", variant.String(), feature.Name.String(), value.Value, t)
detail = openfeature.ProviderResolutionDetail{
ResolutionError: openfeature.NewTypeMismatchResolutionError(err.Error()),
Reason: openfeature.ErrorReason,
Variant: variant.String(),
}
return
}
detail = openfeature.ProviderResolutionDetail{
Reason: openfeature.StaticReason,
Variant: variant.String(),
}
return
}
// This is the helper function to get the variant by value for the given feature
func VariantByValue[T comparable](feature *Feature, value T) (featureVariant *FeatureVariant, err error) {
// technically this method should not be called for object kind
// but just for fallback
if feature.Kind == KindObject {
// return the default variant - just for fallback
// ? think more on this
return &FeatureVariant{Variant: feature.DefaultVariant, Value: value}, nil
}
for _, variant := range feature.Variants {
if variant.Value == value {
return &variant, nil
}
}
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeFeatureVariantNotFound, "no variant found for value %v for feature %s in variants %v", value, feature.Name.String(), feature.Variants)
}
func NewBooleanVariants() map[Name]FeatureVariant {
return map[Name]FeatureVariant{
MustNewName("disabled"): {
Variant: MustNewName("disabled"),
Value: false,
},
MustNewName("enabled"): {
Variant: MustNewName("enabled"),
Value: true,
},
}
}

View File

@@ -0,0 +1,14 @@
package featuretypes
import "github.com/SigNoz/signoz/pkg/valuer"
// A concrete type for a feature flag kind
type Kind struct{ valuer.String }
var (
KindBoolean = Kind{valuer.NewString("boolean")}
KindString = Kind{valuer.NewString("string")}
KindFloat = Kind{valuer.NewString("float")}
KindInt = Kind{valuer.NewString("int")}
KindObject = Kind{valuer.NewString("object")}
)

View File

@@ -0,0 +1,37 @@
package featuretypes
import (
"regexp"
"github.com/SigNoz/signoz/pkg/errors"
)
var nameRegex = regexp.MustCompile(`^[a-z][a-z0-9_]+$`)
// Name is a concrete type for a feature name.
// We make this abstract to avoid direct use of strings and enforce
// a consistent way to create and validate feature names.
type Name struct {
s string
}
func NewName(s string) (Name, error) {
if !nameRegex.MatchString(s) {
return Name{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid feature name: %s", s)
}
return Name{s: s}, nil
}
func MustNewName(s string) Name {
name, err := NewName(s)
if err != nil {
panic(err)
}
return name
}
func (n Name) String() string {
return n.s
}

View File

@@ -0,0 +1,129 @@
package featuretypes
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/open-feature/go-sdk/openfeature"
)
// Consumer facing interface for the feature registry
type Registry interface {
// Returns the feature and the resolution detail for the given name
Get(name Name) (*Feature, openfeature.ProviderResolutionDetail, error)
// Returns the feature and the resolution detail for the given string name
GetByString(name string) (*Feature, openfeature.ProviderResolutionDetail, error)
// Returns all the features in the registry
List() []*Feature
}
// Concrete implementation of the Registry interface
type registry struct {
features map[Name]*Feature
}
// Validates and builds a new registry from a list of features
func NewRegistry(features ...*Feature) (Registry, error) {
registry := &registry{features: make(map[Name]*Feature)}
for _, feature := range features {
// Check if the name is unique
if _, ok := registry.features[feature.Name]; ok {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "feature name %s already exists", feature.Name.String())
}
// Default variant should always be present
if _, ok := feature.Variants[feature.DefaultVariant]; !ok {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "default variant %s not found for feature %s in variants %v", feature.DefaultVariant.String(), feature.Name.String(), feature.Variants)
}
switch feature.Kind {
case KindBoolean:
err := validateFeature[bool](feature)
if err != nil {
return nil, err
}
case KindString:
err := validateFeature[string](feature)
if err != nil {
return nil, err
}
case KindFloat:
err := validateFeature[float64](feature)
if err != nil {
return nil, err
}
case KindInt:
err := validateFeature[int64](feature)
if err != nil {
return nil, err
}
case KindObject:
err := validateFeature[any](feature)
if err != nil {
return nil, err
}
}
registry.features[feature.Name] = feature
}
return registry, nil
}
func validateFeature[T any](feature *Feature) error {
_, _, err := VariantValue[T](feature, feature.DefaultVariant)
if err != nil {
return err
}
for variant := range feature.Variants {
_, _, err := VariantValue[T](feature, variant)
if err != nil {
return err
}
}
return nil
}
func (r *registry) Get(name Name) (f *Feature, detail openfeature.ProviderResolutionDetail, err error) {
feature, ok := r.features[name]
if !ok {
err = errors.Newf(errors.TypeNotFound, ErrCodeFeatureNotFound, "feature %s not found", name.String())
detail = openfeature.ProviderResolutionDetail{
ResolutionError: openfeature.NewGeneralResolutionError(err.Error()),
Reason: openfeature.ErrorReason,
}
return
}
return feature, openfeature.ProviderResolutionDetail{}, nil
}
func (r *registry) GetByString(name string) (f *Feature, detail openfeature.ProviderResolutionDetail, err error) {
featureName, err := NewName(name)
if err != nil {
detail = openfeature.ProviderResolutionDetail{
ResolutionError: openfeature.NewFlagNotFoundResolutionError(err.Error()),
Reason: openfeature.ErrorReason,
}
return
}
return r.Get(featureName)
}
func (r *registry) List() []*Feature {
features := make([]*Feature, 0, len(r.features))
for _, f := range r.features {
features = append(features, f)
}
return features
}

View File

@@ -0,0 +1,20 @@
package featuretypes
import "github.com/SigNoz/signoz/pkg/valuer"
// A concrete type for a feature flag stage
type Stage struct{ valuer.String }
var (
// Used when the feature is experimental
StageExperimental = Stage{valuer.NewString("experimental")}
// Used when the feature works and in preview stage but is not ready for production
StagePreview = Stage{valuer.NewString("preview")}
// Used when the feature is stable and ready for production
StageStable = Stage{valuer.NewString("stable")}
// Used when the feature is deprecated and will be removed in the future
StageDeprecated = Stage{valuer.NewString("deprecated")}
)

View File

@@ -6,6 +6,7 @@ import (
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -32,7 +33,7 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
}
if strings.Contains(i.Path, telemetrytypes.ArraySep) || strings.Contains(i.Path, telemetrytypes.ArrayAnyIndex) {
if strings.Contains(i.Path, telemetrylogs.ArraySep) || strings.Contains(i.Path, telemetrylogs.ArrayAnyIndex) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
}
@@ -40,12 +41,12 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
}
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
if !strings.HasPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
}
// remove the "body." prefix from the path
i.Path = strings.TrimPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix)
i.Path = strings.TrimPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix)
isCardinal := keycheck.IsCardinal(i.Path)
if isCardinal {

View File

@@ -153,10 +153,28 @@ func NewFormulaEvaluator(expressionStr string, canDefaultZero map[string]bool) (
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse expression")
}
// Normalize canDefaultZero keys to match variable casing from expression
normalizedCanDefaultZero := make(map[string]bool)
vars := expression.Vars()
for _, variable := range vars {
// If exact match exists, use it
if val, ok := canDefaultZero[variable]; ok {
normalizedCanDefaultZero[variable] = val
continue
}
// Otherwise try case-insensitive lookup
for k, v := range canDefaultZero {
if strings.EqualFold(k, variable) {
normalizedCanDefaultZero[variable] = v
break
}
}
}
evaluator := &FormulaEvaluator{
expression: expression,
variables: expression.Vars(),
canDefaultZero: canDefaultZero,
variables: vars,
canDefaultZero: normalizedCanDefaultZero,
aggRefs: make(map[string]aggregationRef),
}
@@ -281,6 +299,16 @@ func (fe *FormulaEvaluator) buildSeriesLookup(timeSeriesData map[string]*TimeSer
// We are only interested in the time series data for the queries that are
// involved in the formula expression.
data, exists := timeSeriesData[aggRef.QueryName]
if !exists {
// try case-insensitive lookup
for k, v := range timeSeriesData {
if strings.EqualFold(k, aggRef.QueryName) {
data = v
exists = true
break
}
}
}
if !exists {
continue
}

View File

@@ -864,6 +864,158 @@ func TestComplexExpression(t *testing.T) {
}
}
func TestCaseInsensitiveQueryNames(t *testing.T) {
tests := []struct {
name string
expression string
tsData map[string]*TimeSeriesData
expectedValues []float64
}{
{
name: "lowercase query names",
expression: "a / b",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{5.0},
},
{
name: "mixed case query names",
expression: "A / b",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{5.0},
},
{
name: "uppercase query names with lowercase data keys",
expression: "A / B",
tsData: map[string]*TimeSeriesData{
"a": createFormulaTestTimeSeriesData("a", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{5.0},
},
{
name: "all lowercase",
expression: "a/b",
tsData: map[string]*TimeSeriesData{
"a": createFormulaTestTimeSeriesData("a", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 100}),
},
}),
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 10}),
},
}),
},
expectedValues: []float64{10.0},
},
{
name: "complex expression with mixed case",
expression: "a + B * c",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 5}),
},
}),
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 3}),
},
}),
"C": createFormulaTestTimeSeriesData("C", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{1: 2}),
},
}),
},
expectedValues: []float64{11.0}, // 5 + 3 * 2 = 11
},
{
name: "lowercase variables with default zero missing point",
expression: "a + b",
tsData: map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{
1: 10,
2: 20,
}),
},
}),
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
{
Labels: createLabels(map[string]string{}),
Values: createValues(map[int64]float64{
1: 5,
}),
},
}),
},
expectedValues: []float64{15.0, 20.0}, // t1: 10+5, t2: 20+0
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
evaluator, err := NewFormulaEvaluator(tt.expression, map[string]bool{"a": true, "A": true, "b": true, "B": true, "c": true, "C": true})
require.NoError(t, err)
result, err := evaluator.EvaluateFormula(tt.tsData)
require.NoError(t, err)
require.NotNil(t, result)
assert.Equal(t, 1, len(result), "should have exactly one result series")
assert.Equal(t, len(tt.expectedValues), len(result[0].Values), "should match expected number of values")
for i, v := range tt.expectedValues {
assert.InDelta(t, v, result[0].Values[i].Value, 0.0001, "value at index %d should match", i)
}
})
}
}
func TestAbsValueExpression(t *testing.T) {
tsData := map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{

View File

@@ -4,7 +4,6 @@ import (
"fmt"
"strings"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -18,13 +17,9 @@ var (
FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")}
)
const (
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
// e.g., "body.status" where "body." is the prefix
BodyJSONStringSearchPrefix = "body."
ArraySep = jsontypeexporter.ArraySeparator
ArrayAnyIndex = "[*]."
)
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
// e.g., "body.status" where "body." is the prefix
const BodyJSONStringSearchPrefix = `body.`
type TelemetryFieldKey struct {
Name string `json:"name"`

Some files were not shown because too many files have changed in this diff Show More