mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-28 22:24:11 +00:00
Compare commits
116 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26207cf0b3 | ||
|
|
d93a8d1f63 | ||
|
|
f5cdce4f70 | ||
|
|
08bce90f1d | ||
|
|
63c27e9af6 | ||
|
|
f6da9adb86 | ||
|
|
c82f54b548 | ||
|
|
85f0193a1e | ||
|
|
bde9d91867 | ||
|
|
5147db6997 | ||
|
|
0c5c2a00d9 | ||
|
|
2669eda2f8 | ||
|
|
b4df9ba1b9 | ||
|
|
af7b4c6fa8 | ||
|
|
5667793d7f | ||
|
|
92a79bbdce | ||
|
|
1887ddd49c | ||
|
|
57aac8b800 | ||
|
|
e4c1b2ce50 | ||
|
|
3c564b6809 | ||
|
|
d149e53f70 | ||
|
|
220c78e72b | ||
|
|
1ff971dac4 | ||
|
|
1dc03eebd4 | ||
|
|
9f71a6423f | ||
|
|
0a3e2e6215 | ||
|
|
12476b719f | ||
|
|
193f35ba17 | ||
|
|
de28d6ba15 | ||
|
|
7a3f9b963d | ||
|
|
aedf61c8e0 | ||
|
|
f12f16f996 | ||
|
|
ad61e8f700 | ||
|
|
286129c7a0 | ||
|
|
78a3cc69ee | ||
|
|
c2790258a3 | ||
|
|
4c70b44230 | ||
|
|
a178c90a08 | ||
|
|
cc9c316bfe | ||
|
|
6ea517f530 | ||
|
|
c1789e7921 | ||
|
|
4b67a1c52f | ||
|
|
beb4dc060d | ||
|
|
f6fd182558 | ||
|
|
92e5abed6e | ||
|
|
dcb0ca9265 | ||
|
|
8ab44fd846 | ||
|
|
3d3a566094 | ||
|
|
6b86779623 | ||
|
|
f0c405f068 | ||
|
|
9bea32c354 | ||
|
|
1bb9386ea1 | ||
|
|
f3dfaf37ce | ||
|
|
0498c77634 | ||
|
|
38146ae364 | ||
|
|
f911f98f83 | ||
|
|
28b1656d4c | ||
|
|
fe28290c76 | ||
|
|
1b5738cdae | ||
|
|
0c61174506 | ||
|
|
b850b69e16 | ||
|
|
b0d52ee87a | ||
|
|
97ead5c5b7 | ||
|
|
aee5c4f8f7 | ||
|
|
255b39f43c | ||
|
|
441d328976 | ||
|
|
93ea44ff62 | ||
|
|
d31cce3a1f | ||
|
|
b7fc9ee85a | ||
|
|
2cc3ad335c | ||
|
|
748b08827d | ||
|
|
be17b71a93 | ||
|
|
917345ddf6 | ||
|
|
9d7260ad40 | ||
|
|
f240bbdfd6 | ||
|
|
b2d5560576 | ||
|
|
8880d13b8b | ||
|
|
3ef9f8750f | ||
|
|
b2bdfa25c7 | ||
|
|
a4c2234e05 | ||
|
|
3abb9c824f | ||
|
|
2876312719 | ||
|
|
7d56901562 | ||
|
|
9619356c33 | ||
|
|
7bce659e5a | ||
|
|
7ac05c901f | ||
|
|
8573ed4021 | ||
|
|
09155f7ede | ||
|
|
b2c0ff566a | ||
|
|
0e6b310865 | ||
|
|
99ec44a793 | ||
|
|
bb4eae3fd9 | ||
|
|
8af86a1e6a | ||
|
|
016c92ea7e | ||
|
|
b7ffbca598 | ||
|
|
23fee19048 | ||
|
|
c5622a5560 | ||
|
|
8f1dc4c36f | ||
|
|
21f918a950 | ||
|
|
471bcc9de8 | ||
|
|
14e4564188 | ||
|
|
8d007836da | ||
|
|
589f32d326 | ||
|
|
74c31c97f9 | ||
|
|
696f457521 | ||
|
|
1b12ebd76a | ||
|
|
82a67f7604 | ||
|
|
4c8fedc881 | ||
|
|
de2c95e2f2 | ||
|
|
07052d9118 | ||
|
|
db88929016 | ||
|
|
356e84fb96 | ||
|
|
034ce8054c | ||
|
|
fdf01ad707 | ||
|
|
d1a27fc3cd | ||
|
|
405b51c680 |
@@ -853,7 +853,7 @@ paths:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
operationId: ListPromotedAndIndexedPaths
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
@@ -883,13 +883,11 @@ paths:
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
operationId: HandlePromoteAndIndexPaths
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
@@ -915,9 +913,7 @@ paths:
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
/api/v1/org/preferences:
|
||||
get:
|
||||
deprecated: false
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricAlertsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricAlerts = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricAlertsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/alerts', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
@@ -1,37 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAttributesRequest,
|
||||
GetMetricAttributesResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricAttributes = async (
|
||||
{ metricName, start, end }: GetMetricAttributesRequest,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricAttributesResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.post(
|
||||
'/metrics/attributes',
|
||||
{
|
||||
metricName: encodedMetricName,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
signal,
|
||||
headers,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
@@ -1,29 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricDashboardsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricDashboards = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricDashboardsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/dashboards', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
@@ -1,29 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricHighlightsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricHighlights = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricHighlightsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/highlights', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
@@ -1,29 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricMetadataResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricMetadata = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricMetadataResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metrics/metadata', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
@@ -1,28 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
UpdateMetricMetadataRequest,
|
||||
UpdateMetricMetadataResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
const updateMetricMetadata = async (
|
||||
metricName: string,
|
||||
props: UpdateMetricMetadataRequest,
|
||||
): Promise<SuccessResponseV2<UpdateMetricMetadataResponse>> => {
|
||||
try {
|
||||
const response = await axios.post(`/metrics/${metricName}/metadata`, {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateMetricMetadata;
|
||||
@@ -56,13 +56,6 @@ export const REACT_QUERY_KEY = {
|
||||
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
|
||||
GET_INSPECT_METRICS_DETAILS: 'GET_INSPECT_METRICS_DETAILS',
|
||||
|
||||
// Metrics Explorer V2 Query Keys
|
||||
GET_METRIC_HIGHLIGHTS: 'GET_METRIC_HIGHLIGHTS',
|
||||
GET_METRIC_METADATA: 'GET_METRIC_METADATA',
|
||||
GET_METRIC_ATTRIBUTES: 'GET_METRIC_ATTRIBUTES',
|
||||
GET_METRIC_ALERTS: 'GET_METRIC_ALERTS',
|
||||
GET_METRIC_DASHBOARDS: 'GET_METRIC_DASHBOARDS',
|
||||
|
||||
// Traces Funnels Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
GET_DOMAIN_METRICS_DATA: 'GET_DOMAIN_METRICS_DATA',
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
import {
|
||||
Button,
|
||||
Collapse,
|
||||
Input,
|
||||
Menu,
|
||||
Popover,
|
||||
Skeleton,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Button, Collapse, Input, Menu, Popover, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useGetMetricAttributes } from 'hooks/metricsExplorer/v2/useGetMetricAttributes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Compass, Copy, Search } from 'lucide-react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
@@ -22,9 +13,7 @@ import ROUTES from '../../../constants/routes';
|
||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { AllAttributesProps, AllAttributesValueProps } from './types';
|
||||
import { getMetricDetailsQuery, transformMetricAttributes } from './utils';
|
||||
|
||||
const ALL_ATTRIBUTES_KEY = 'all-attributes';
|
||||
import { getMetricDetailsQuery } from './utils';
|
||||
|
||||
export function AllAttributesValue({
|
||||
filterKey,
|
||||
@@ -121,20 +110,13 @@ export function AllAttributesValue({
|
||||
|
||||
function AllAttributes({
|
||||
metricName,
|
||||
attributes,
|
||||
metricType,
|
||||
}: AllAttributesProps): JSX.Element {
|
||||
const [searchString, setSearchString] = useState('');
|
||||
const [activeKey, setActiveKey] = useState<string[]>([ALL_ATTRIBUTES_KEY]);
|
||||
|
||||
const {
|
||||
data: attributesData,
|
||||
isLoading: isLoadingAttributes,
|
||||
isError: isErrorAttributes,
|
||||
} = useGetMetricAttributes({
|
||||
metricName,
|
||||
});
|
||||
|
||||
const { attributes } = transformMetricAttributes(attributesData);
|
||||
const [activeKey, setActiveKey] = useState<string | string[]>(
|
||||
'all-attributes',
|
||||
);
|
||||
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
@@ -196,7 +178,7 @@ function AllAttributes({
|
||||
attributes.filter(
|
||||
(attribute) =>
|
||||
attribute.key.toLowerCase().includes(searchString.toLowerCase()) ||
|
||||
attribute.values.some((value) =>
|
||||
attribute.value.some((value) =>
|
||||
value.toLowerCase().includes(searchString.toLowerCase()),
|
||||
),
|
||||
),
|
||||
@@ -213,7 +195,7 @@ function AllAttributes({
|
||||
},
|
||||
value: {
|
||||
key: attribute.key,
|
||||
value: attribute.values,
|
||||
value: attribute.value,
|
||||
},
|
||||
}))
|
||||
: [],
|
||||
@@ -270,38 +252,8 @@ function AllAttributes({
|
||||
],
|
||||
);
|
||||
|
||||
const emptyText = useMemo(
|
||||
() =>
|
||||
isErrorAttributes ? 'Error fetching attributes' : 'No attributes found',
|
||||
[isErrorAttributes],
|
||||
);
|
||||
|
||||
const items = useMemo(() => {
|
||||
let children;
|
||||
if (isLoadingAttributes) {
|
||||
children = (
|
||||
<div className="all-attributes-skeleton-container">
|
||||
<Skeleton active title={false} paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
children = (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
loading={isLoadingAttributes}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content all-attributes-content"
|
||||
scroll={{ y: 600 }}
|
||||
locale={{
|
||||
emptyText,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return [
|
||||
const items = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: (
|
||||
<div className="metrics-accordion-header">
|
||||
@@ -318,22 +270,32 @@ function AllAttributes({
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
disabled={isLoadingAttributes}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
key: 'all-attributes',
|
||||
children,
|
||||
children: (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content all-attributes-content"
|
||||
scroll={{ y: 600 }}
|
||||
/>
|
||||
),
|
||||
},
|
||||
];
|
||||
}, [searchString, columns, isLoadingAttributes, tableData, emptyText]);
|
||||
],
|
||||
[columns, tableData, searchString],
|
||||
);
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
bordered
|
||||
className="metrics-accordion metrics-all-attributes-accordion"
|
||||
className="metrics-accordion metrics-metadata-accordion"
|
||||
activeKey={activeKey}
|
||||
onChange={(keys): void => setActiveKey(keys as string[])}
|
||||
onChange={(keys): void => setActiveKey(keys)}
|
||||
items={items}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -1,56 +1,37 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Dropdown, Typography } from 'antd';
|
||||
import { Skeleton } from 'antd/lib';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetMetricAlerts } from 'hooks/metricsExplorer/v2/useGetMetricAlerts';
|
||||
import { useGetMetricDashboards } from 'hooks/metricsExplorer/v2/useGetMetricDashboards';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import { Bell, Grid } from 'lucide-react';
|
||||
import { useMemo } from 'react';
|
||||
import { generatePath } from 'react-router-dom';
|
||||
import { pluralize } from 'utils/pluralize';
|
||||
|
||||
import { DashboardsAndAlertsPopoverProps } from './types';
|
||||
import { transformMetricAlerts, transformMetricDashboards } from './utils';
|
||||
|
||||
function DashboardsAndAlertsPopover({
|
||||
metricName,
|
||||
alerts,
|
||||
dashboards,
|
||||
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const params = useUrlQuery();
|
||||
|
||||
const {
|
||||
data: alertsData,
|
||||
isLoading: isLoadingAlerts,
|
||||
isError: isErrorAlerts,
|
||||
} = useGetMetricAlerts(metricName);
|
||||
|
||||
const {
|
||||
data: dashboardsData,
|
||||
isLoading: isLoadingDashboards,
|
||||
isError: isErrorDashboards,
|
||||
} = useGetMetricDashboards(metricName);
|
||||
|
||||
const alerts = transformMetricAlerts(alertsData);
|
||||
const dashboards = transformMetricDashboards(dashboardsData);
|
||||
|
||||
const alertsPopoverContent = useMemo(() => {
|
||||
if (alerts && alerts.length > 0) {
|
||||
return alerts.map((alert) => ({
|
||||
key: alert.alertId,
|
||||
key: alert.alert_id,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={alert.alertId}
|
||||
key={alert.alert_id}
|
||||
onClick={(): void => {
|
||||
params.set(QueryParams.ruleId, alert.alertId);
|
||||
params.set(QueryParams.ruleId, alert.alert_id);
|
||||
history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{alert.alertName || alert.alertId}
|
||||
{alert.alert_name || alert.alert_id}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
@@ -58,44 +39,41 @@ function DashboardsAndAlertsPopover({
|
||||
return null;
|
||||
}, [alerts, params]);
|
||||
|
||||
const uniqueDashboards = useMemo(
|
||||
() =>
|
||||
dashboards?.filter(
|
||||
(item, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboard_id === item.dashboard_id),
|
||||
),
|
||||
[dashboards],
|
||||
);
|
||||
|
||||
const dashboardsPopoverContent = useMemo(() => {
|
||||
if (dashboards && dashboards.length > 0) {
|
||||
return dashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboardId,
|
||||
if (uniqueDashboards && uniqueDashboards.length > 0) {
|
||||
return uniqueDashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboard_id,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={dashboard.dashboardId}
|
||||
key={dashboard.dashboard_id}
|
||||
onClick={(): void => {
|
||||
safeNavigate(
|
||||
generatePath(ROUTES.DASHBOARD, {
|
||||
dashboardId: dashboard.dashboardId,
|
||||
dashboardId: dashboard.dashboard_id,
|
||||
}),
|
||||
);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{dashboard.dashboardName || dashboard.dashboardId}
|
||||
{dashboard.dashboard_name || dashboard.dashboard_id}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
}
|
||||
return null;
|
||||
}, [dashboards, safeNavigate]);
|
||||
}, [uniqueDashboards, safeNavigate]);
|
||||
|
||||
if (isLoadingAlerts || isLoadingDashboards) {
|
||||
return (
|
||||
<div className="dashboards-and-alerts-popover-container">
|
||||
<Skeleton title={false} paragraph={{ rows: 1 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// If there are no dashboards or alerts or both have errors, don't show the popover
|
||||
const hidePopover =
|
||||
(!dashboardsPopoverContent && !alertsPopoverContent) ||
|
||||
(isErrorAlerts && isErrorDashboards);
|
||||
if (hidePopover) {
|
||||
return <div className="dashboards-and-alerts-popover-container" />;
|
||||
if (!dashboardsPopoverContent && !alertsPopoverContent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -114,7 +92,8 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Grid size={12} color={Color.BG_SIENNA_500} />
|
||||
<Typography.Text>
|
||||
{pluralize(dashboards.length, 'dashboard', 'dashboards')}
|
||||
{uniqueDashboards?.length} dashboard
|
||||
{uniqueDashboards?.length === 1 ? '' : 's'}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
@@ -133,7 +112,7 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Bell size={12} color={Color.BG_SAKURA_500} />
|
||||
<Typography.Text>
|
||||
{pluralize(alerts.length, 'alert rule', 'alert rules')}
|
||||
{alerts?.length} alert {alerts?.length === 1 ? 'rule' : 'rules'}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
import { Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { useGetMetricHighlights } from 'hooks/metricsExplorer/v2/useGetMetricHighlights';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import { HighlightsProps } from './types';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
transformMetricHighlights,
|
||||
} from './utils';
|
||||
|
||||
function Highlights({ metricName }: HighlightsProps): JSX.Element {
|
||||
const {
|
||||
data: metricHighlightsData,
|
||||
isLoading: isLoadingMetricHighlights,
|
||||
isError: isErrorMetricHighlights,
|
||||
} = useGetMetricHighlights(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
|
||||
const metricHighlights = transformMetricHighlights(metricHighlightsData);
|
||||
|
||||
const dataPoints = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metricHighlights?.dataPoints.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metricHighlights?.dataPoints ?? 0)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
const timeSeriesActive = formatNumberToCompactFormat(
|
||||
metricHighlights.activeTimeSeries,
|
||||
);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(
|
||||
metricHighlights.totalTimeSeries,
|
||||
);
|
||||
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
const displayText = formatTimestampToReadableDate(
|
||||
metricHighlights.lastReceived,
|
||||
);
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={displayText}>{displayText}</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
if (isLoadingMetricHighlights) {
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<Skeleton title={false} paragraph={{ rows: 2 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
{dataPoints}
|
||||
{timeSeries}
|
||||
{lastReceived}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Highlights;
|
||||
@@ -1,19 +1,19 @@
|
||||
import { Button, Collapse, Input, Select, Skeleton, Typography } from 'antd';
|
||||
import { Button, Collapse, Input, Select, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { UpdateMetricMetadataProps } from 'api/metricsExplorer/updateMetricMetadata';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import FieldRenderer from 'container/LogDetailedView/FieldRenderer';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/v2/useUpdateMetricMetadata';
|
||||
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Edit2, Save, X } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
@@ -23,22 +23,23 @@ import {
|
||||
} from '../Summary/constants';
|
||||
import { MetricTypeRenderer } from '../Summary/utils';
|
||||
import { METRIC_METADATA_KEYS } from './constants';
|
||||
import { MetadataProps, MetricMetadataState, TableFields } from './types';
|
||||
import { transformUpdateMetricMetadataRequest } from './utils';
|
||||
import { MetadataProps } from './types';
|
||||
import { determineIsMonotonic } from './utils';
|
||||
|
||||
function Metadata({
|
||||
metricName,
|
||||
metadata,
|
||||
isErrorMetricMetadata,
|
||||
isLoadingMetricMetadata,
|
||||
refetchMetricDetails,
|
||||
}: MetadataProps): JSX.Element {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
|
||||
const [metricMetadata, setMetricMetadata] = useState<MetricMetadataState>({
|
||||
metricType: MetricType.SUM,
|
||||
description: '',
|
||||
temporality: undefined,
|
||||
unit: undefined,
|
||||
const [
|
||||
metricMetadata,
|
||||
setMetricMetadata,
|
||||
] = useState<UpdateMetricMetadataProps>({
|
||||
metricType: metadata?.metric_type || MetricType.SUM,
|
||||
description: metadata?.description || '',
|
||||
temporality: metadata?.temporality,
|
||||
unit: metadata?.unit,
|
||||
});
|
||||
const { notifications } = useNotifications();
|
||||
const {
|
||||
@@ -50,18 +51,6 @@ function Metadata({
|
||||
);
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Initialize state from metadata api data
|
||||
useEffect(() => {
|
||||
if (metadata) {
|
||||
setMetricMetadata({
|
||||
metricType: metadata.metricType,
|
||||
description: metadata.description,
|
||||
temporality: metadata.temporality,
|
||||
unit: metadata.unit,
|
||||
});
|
||||
}
|
||||
}, [metadata]);
|
||||
|
||||
const tableData = useMemo(
|
||||
() =>
|
||||
metadata
|
||||
@@ -70,7 +59,7 @@ function Metadata({
|
||||
temporality: metadata?.temporality,
|
||||
})
|
||||
// Filter out monotonic as user input is not required
|
||||
.filter((key) => key !== TableFields.IS_MONOTONIC)
|
||||
.filter((key) => key !== 'monotonic')
|
||||
.map((key) => ({
|
||||
key,
|
||||
value: {
|
||||
@@ -83,37 +72,30 @@ function Metadata({
|
||||
);
|
||||
|
||||
// Render un-editable field value
|
||||
const renderUneditableField = useCallback(
|
||||
(key: keyof MetricMetadataState, value: string) => {
|
||||
if (isErrorMetricMetadata) {
|
||||
return <FieldRenderer field="-" />;
|
||||
}
|
||||
if (key === TableFields.METRIC_TYPE) {
|
||||
return <MetricTypeRenderer type={value as MetricType} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === TableFields.UNIT) {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
},
|
||||
[isErrorMetricMetadata],
|
||||
);
|
||||
const renderUneditableField = useCallback((key: string, value: string) => {
|
||||
if (key === 'metric_type') {
|
||||
return <MetricTypeRenderer type={value as MetricType} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === 'unit') {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
}, []);
|
||||
|
||||
const renderColumnValue = useCallback(
|
||||
(field: { value: string; key: keyof MetricMetadataState }): JSX.Element => {
|
||||
(field: { value: string; key: string }): JSX.Element => {
|
||||
if (!isEditing) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
// Don't allow editing of unit if it's already set
|
||||
const metricUnitAlreadySet =
|
||||
field.key === TableFields.UNIT && Boolean(metadata?.unit);
|
||||
const metricUnitAlreadySet = field.key === 'unit' && Boolean(metadata?.unit);
|
||||
if (metricUnitAlreadySet) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
if (field.key === TableFields.METRIC_TYPE) {
|
||||
if (field.key === 'metric_type') {
|
||||
return (
|
||||
<Select
|
||||
data-testid="metric-type-select"
|
||||
@@ -131,7 +113,7 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === TableFields.UNIT) {
|
||||
if (field.key === 'unit') {
|
||||
return (
|
||||
<YAxisUnitSelector
|
||||
value={metricMetadata.unit}
|
||||
@@ -143,7 +125,7 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === TableFields.Temporality) {
|
||||
if (field.key === 'temporality') {
|
||||
return (
|
||||
<Select
|
||||
data-testid="temporality-select"
|
||||
@@ -161,12 +143,16 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === TableFields.DESCRIPTION) {
|
||||
if (field.key === 'description') {
|
||||
return (
|
||||
<Input
|
||||
data-testid="description-input"
|
||||
name={field.key}
|
||||
defaultValue={metricMetadata.description}
|
||||
defaultValue={
|
||||
metricMetadata[
|
||||
field.key as Exclude<keyof UpdateMetricMetadataProps, 'isMonotonic'>
|
||||
]
|
||||
}
|
||||
onChange={(e): void => {
|
||||
setMetricMetadata((prev) => ({
|
||||
...prev,
|
||||
@@ -216,11 +202,17 @@ function Metadata({
|
||||
updateMetricMetadata(
|
||||
{
|
||||
metricName,
|
||||
payload: transformUpdateMetricMetadataRequest(metricMetadata),
|
||||
payload: {
|
||||
...metricMetadata,
|
||||
isMonotonic: determineIsMonotonic(
|
||||
metricMetadata.metricType,
|
||||
metricMetadata.temporality,
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: (response): void => {
|
||||
if (response?.httpStatusCode === 200) {
|
||||
if (response?.statusCode === 200) {
|
||||
logEvent(MetricsExplorerEvents.MetricMetadataUpdated, {
|
||||
[MetricsExplorerEventKeys.MetricName]: metricName,
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
@@ -229,12 +221,9 @@ function Metadata({
|
||||
notifications.success({
|
||||
message: 'Metadata updated successfully',
|
||||
});
|
||||
refetchMetricDetails();
|
||||
setIsEditing(false);
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_METRICS_LIST]);
|
||||
queryClient.invalidateQueries([
|
||||
REACT_QUERY_KEY.GET_METRIC_METADATA,
|
||||
metricName,
|
||||
]);
|
||||
queryClient.invalidateQueries(['metricsList']);
|
||||
} else {
|
||||
notifications.error({
|
||||
message:
|
||||
@@ -254,36 +243,21 @@ function Metadata({
|
||||
metricName,
|
||||
metricMetadata,
|
||||
notifications,
|
||||
refetchMetricDetails,
|
||||
queryClient,
|
||||
]);
|
||||
|
||||
const cancelEdit = useCallback(
|
||||
(e: React.MouseEvent<HTMLElement, MouseEvent>): void => {
|
||||
e.stopPropagation();
|
||||
if (metadata) {
|
||||
setMetricMetadata({
|
||||
metricType: metadata.metricType,
|
||||
description: metadata.description,
|
||||
temporality: metadata.temporality,
|
||||
unit: metadata.unit,
|
||||
});
|
||||
}
|
||||
setIsEditing(false);
|
||||
},
|
||||
[metadata],
|
||||
);
|
||||
|
||||
const actionButton = useMemo(() => {
|
||||
if (isLoadingMetricMetadata) {
|
||||
return null;
|
||||
}
|
||||
if (isEditing) {
|
||||
return (
|
||||
<div className="action-menu">
|
||||
<Button
|
||||
className="action-button"
|
||||
type="text"
|
||||
onClick={cancelEdit}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
setIsEditing(false);
|
||||
}}
|
||||
disabled={isUpdatingMetricsMetadata}
|
||||
>
|
||||
<X size={14} />
|
||||
@@ -320,35 +294,10 @@ function Metadata({
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}, [
|
||||
isLoadingMetricMetadata,
|
||||
isEditing,
|
||||
isUpdatingMetricsMetadata,
|
||||
cancelEdit,
|
||||
handleSave,
|
||||
]);
|
||||
}, [handleSave, isEditing, isUpdatingMetricsMetadata]);
|
||||
|
||||
const items = useMemo(() => {
|
||||
let children;
|
||||
if (isLoadingMetricMetadata) {
|
||||
children = (
|
||||
<div className="metrics-metadata-skeleton-container">
|
||||
<Skeleton active title={false} paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
children = (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content metrics-metadata-container"
|
||||
/>
|
||||
);
|
||||
}
|
||||
return [
|
||||
const items = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: (
|
||||
<div className="metrics-accordion-header metrics-metadata-header">
|
||||
@@ -357,10 +306,20 @@ function Metadata({
|
||||
</div>
|
||||
),
|
||||
key: 'metric-metadata',
|
||||
children,
|
||||
children: (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content metrics-metadata-container"
|
||||
/>
|
||||
),
|
||||
},
|
||||
];
|
||||
}, [actionButton, columns, isLoadingMetricMetadata, tableData]);
|
||||
],
|
||||
[actionButton, columns, tableData],
|
||||
);
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
|
||||
@@ -39,7 +39,6 @@
|
||||
gap: 12px;
|
||||
|
||||
.metric-details-content-grid {
|
||||
height: 50px;
|
||||
.labels-row,
|
||||
.values-row {
|
||||
display: grid;
|
||||
@@ -73,7 +72,6 @@
|
||||
.dashboards-and-alerts-popover-container {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
height: 32px;
|
||||
|
||||
.dashboards-and-alerts-popover {
|
||||
border-radius: 20px;
|
||||
@@ -150,6 +148,7 @@
|
||||
|
||||
.all-attributes-search-input {
|
||||
width: 300px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,7 +161,6 @@
|
||||
.ant-typography:first-child {
|
||||
font-family: 'Geist Mono';
|
||||
color: var(--bg-robin-400);
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
.all-attributes-contribution {
|
||||
@@ -239,7 +237,6 @@
|
||||
}
|
||||
|
||||
.metric-metadata-value {
|
||||
height: 67px;
|
||||
background: rgba(22, 25, 34, 0.4);
|
||||
overflow-x: scroll;
|
||||
.field-renderer-container {
|
||||
@@ -269,33 +266,6 @@
|
||||
border-top-width: 0.5px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-metadata-accordion {
|
||||
.ant-collapse-item {
|
||||
.ant-collapse-content {
|
||||
height: 268px;
|
||||
|
||||
.ant-collapse-content-box {
|
||||
.metrics-metadata-skeleton-container {
|
||||
margin: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-all-attributes-accordion {
|
||||
.ant-collapse-item {
|
||||
.ant-collapse-content {
|
||||
height: 600px;
|
||||
.ant-collapse-content-box {
|
||||
.all-attributes-skeleton-container {
|
||||
margin: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select {
|
||||
@@ -360,26 +330,18 @@
|
||||
.metric-details-content {
|
||||
.metrics-accordion {
|
||||
.metrics-accordion-header {
|
||||
.action-menu {
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-accordion-content {
|
||||
.metric-metadata-key {
|
||||
.field-renderer-container {
|
||||
.label {
|
||||
color: var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
.all-attributes-key {
|
||||
.ant-typography:last-child {
|
||||
color: var(--bg-vanilla-200);
|
||||
color: var(--bg-slate-400);
|
||||
background-color: var(--bg-robin-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,9 +2,17 @@ import './MetricDetails.styles.scss';
|
||||
import '../Summary/Summary.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Divider, Drawer, Empty, Typography } from 'antd';
|
||||
import {
|
||||
Button,
|
||||
Divider,
|
||||
Drawer,
|
||||
Empty,
|
||||
Skeleton,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetMetricMetadata } from 'hooks/metricsExplorer/v2/useGetMetricMetadata';
|
||||
import { useGetMetricDetails } from 'hooks/metricsExplorer/useGetMetricDetails';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Compass, Crosshair, X } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
@@ -14,12 +22,16 @@ import ROUTES from '../../../constants/routes';
|
||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { isInspectEnabled } from '../Inspect/utils';
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import AllAttributes from './AllAttributes';
|
||||
import DashboardsAndAlertsPopover from './DashboardsAndAlertsPopover';
|
||||
import Highlights from './Highlights';
|
||||
import Metadata from './Metadata';
|
||||
import { MetricDetailsProps } from './types';
|
||||
import { getMetricDetailsQuery, transformMetricMetadata } from './utils';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
getMetricDetailsQuery,
|
||||
} from './utils';
|
||||
|
||||
function MetricDetails({
|
||||
onClose,
|
||||
@@ -31,25 +43,50 @@ function MetricDetails({
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
const {
|
||||
data: metricMetadataResponse,
|
||||
isLoading: isLoadingMetricMetadata,
|
||||
isError: isErrorMetricMetadata,
|
||||
} = useGetMetricMetadata(metricName ?? '', {
|
||||
data,
|
||||
isLoading,
|
||||
isFetching,
|
||||
error: metricDetailsError,
|
||||
refetch: refetchMetricDetails,
|
||||
} = useGetMetricDetails(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
|
||||
const metadata = transformMetricMetadata(metricMetadataResponse);
|
||||
const metric = data?.payload?.data;
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metric) return null;
|
||||
return formatTimestampToReadableDate(metric.lastReceived);
|
||||
}, [metric]);
|
||||
|
||||
const showInspectFeature = useMemo(
|
||||
() => isInspectEnabled(metadata?.metricType),
|
||||
[metadata],
|
||||
() => isInspectEnabled(metric?.metadata?.metric_type),
|
||||
[metric],
|
||||
);
|
||||
|
||||
const isMetricDetailsLoading = isLoading || isFetching;
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metric) return null;
|
||||
const timeSeriesActive = formatNumberToCompactFormat(metric.timeSeriesActive);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(metric.timeSeriesTotal);
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
);
|
||||
}, [metric]);
|
||||
|
||||
const goToMetricsExplorerwithSelectedMetric = useCallback(() => {
|
||||
if (metricName) {
|
||||
const compositeQuery = getMetricDetailsQuery(
|
||||
metricName,
|
||||
metadata?.metricType,
|
||||
metric?.metadata?.metric_type,
|
||||
);
|
||||
handleExplorerTabChange(
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
@@ -66,7 +103,9 @@ function MetricDetails({
|
||||
[MetricsExplorerEventKeys.Modal]: 'metric-details',
|
||||
});
|
||||
}
|
||||
}, [metricName, handleExplorerTabChange, metadata?.metricType]);
|
||||
}, [metricName, handleExplorerTabChange, metric?.metadata?.metric_type]);
|
||||
|
||||
const isMetricDetailsError = metricDetailsError || !metric;
|
||||
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.ModalOpened, {
|
||||
@@ -74,10 +113,6 @@ function MetricDetails({
|
||||
});
|
||||
}, []);
|
||||
|
||||
if (!metricName) {
|
||||
return <Empty description="Metric not found" />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
@@ -85,7 +120,7 @@ function MetricDetails({
|
||||
<div className="metric-details-header">
|
||||
<div className="metric-details-title">
|
||||
<Divider type="vertical" />
|
||||
<Typography.Text>{metricName}</Typography.Text>
|
||||
<Typography.Text>{metric?.name}</Typography.Text>
|
||||
</div>
|
||||
<div className="metric-details-header-buttons">
|
||||
<Button
|
||||
@@ -103,8 +138,8 @@ function MetricDetails({
|
||||
aria-label="Inspect Metric"
|
||||
icon={<Crosshair size={18} />}
|
||||
onClick={(): void => {
|
||||
if (metricName) {
|
||||
openInspectModal(metricName);
|
||||
if (metric?.name) {
|
||||
openInspectModal(metric.name);
|
||||
}
|
||||
}}
|
||||
data-testid="inspect-metric-button"
|
||||
@@ -124,17 +159,60 @@ function MetricDetails({
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} />}
|
||||
>
|
||||
<div className="metric-details-content">
|
||||
<Highlights metricName={metricName} />
|
||||
<DashboardsAndAlertsPopover metricName={metricName} />
|
||||
<Metadata
|
||||
metricName={metricName}
|
||||
metadata={metadata}
|
||||
isErrorMetricMetadata={isErrorMetricMetadata}
|
||||
isLoadingMetricMetadata={isLoadingMetricMetadata}
|
||||
/>
|
||||
<AllAttributes metricName={metricName} metricType={metadata?.metricType} />
|
||||
</div>
|
||||
{isMetricDetailsLoading && (
|
||||
<div data-testid="metric-details-skeleton">
|
||||
<Skeleton active />
|
||||
</div>
|
||||
)}
|
||||
{isMetricDetailsError && !isMetricDetailsLoading && (
|
||||
<Empty description="Error fetching metric details" />
|
||||
)}
|
||||
{!isMetricDetailsLoading && !isMetricDetailsError && (
|
||||
<div className="metric-details-content">
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metric?.samples.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric?.samples)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={timeSeries}>{timeSeries}</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={lastReceived}>{lastReceived}</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
<DashboardsAndAlertsPopover
|
||||
dashboards={metric.dashboards}
|
||||
alerts={metric.alerts}
|
||||
/>
|
||||
<Metadata
|
||||
metricName={metric?.name}
|
||||
metadata={metric.metadata}
|
||||
refetchMetricDetails={refetchMetricDetails}
|
||||
/>
|
||||
{metric.attributes && (
|
||||
<AllAttributes
|
||||
metricName={metric?.name}
|
||||
attributes={metric.attributes}
|
||||
metricType={metric?.metadata?.metric_type}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export const METRIC_METADATA_KEYS = {
|
||||
description: 'Description',
|
||||
unit: 'Unit',
|
||||
metricType: 'Metric Type',
|
||||
metric_type: 'Metric Type',
|
||||
temporality: 'Temporality',
|
||||
};
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import {
|
||||
MetricDetails,
|
||||
MetricDetailsAlert,
|
||||
MetricDetailsAttribute,
|
||||
MetricDetailsDashboard,
|
||||
} from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
export interface MetricDetailsProps {
|
||||
@@ -9,21 +14,19 @@ export interface MetricDetailsProps {
|
||||
openInspectModal: (metricName: string) => void;
|
||||
}
|
||||
|
||||
export interface HighlightsProps {
|
||||
metricName: string;
|
||||
}
|
||||
export interface DashboardsAndAlertsPopoverProps {
|
||||
metricName: string;
|
||||
dashboards: MetricDetailsDashboard[] | null;
|
||||
alerts: MetricDetailsAlert[] | null;
|
||||
}
|
||||
|
||||
export interface MetadataProps {
|
||||
metricName: string;
|
||||
metadata: MetricMetadata | null;
|
||||
isErrorMetricMetadata: boolean;
|
||||
isLoadingMetricMetadata: boolean;
|
||||
metadata: MetricDetails['metadata'] | undefined;
|
||||
refetchMetricDetails: () => void;
|
||||
}
|
||||
|
||||
export interface AllAttributesProps {
|
||||
attributes: MetricDetailsAttribute[];
|
||||
metricName: string;
|
||||
metricType: MetricType | undefined;
|
||||
}
|
||||
@@ -33,51 +36,3 @@ export interface AllAttributesValueProps {
|
||||
filterValue: string[];
|
||||
goToMetricsExploreWithAppliedAttribute: (key: string, value: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricHighlight {
|
||||
dataPoints: number;
|
||||
lastReceived: number;
|
||||
totalTimeSeries: number;
|
||||
activeTimeSeries: number;
|
||||
}
|
||||
|
||||
export interface MetricAlert {
|
||||
alertName: string;
|
||||
alertId: string;
|
||||
}
|
||||
|
||||
export interface MetricDashboard {
|
||||
dashboardName: string;
|
||||
dashboardId: string;
|
||||
widgetId: string;
|
||||
widgetName: string;
|
||||
}
|
||||
|
||||
export interface MetricMetadata {
|
||||
metricType: MetricType;
|
||||
description: string;
|
||||
unit: string;
|
||||
temporality: Temporality;
|
||||
isMonotonic: boolean;
|
||||
}
|
||||
|
||||
export interface MetricMetadataState {
|
||||
metricType: MetricType;
|
||||
description: string;
|
||||
temporality: Temporality | undefined;
|
||||
unit: string | undefined;
|
||||
}
|
||||
|
||||
export interface MetricAttribute {
|
||||
key: string;
|
||||
values: string[];
|
||||
valueCount: number;
|
||||
}
|
||||
|
||||
export enum TableFields {
|
||||
DESCRIPTION = 'description',
|
||||
UNIT = 'unit',
|
||||
METRIC_TYPE = 'metricType',
|
||||
Temporality = 'temporality',
|
||||
IS_MONOTONIC = 'isMonotonic',
|
||||
}
|
||||
|
||||
@@ -2,29 +2,11 @@ import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAlertsResponse,
|
||||
GetMetricAttributesResponse,
|
||||
GetMetricDashboardsResponse,
|
||||
GetMetricHighlightsResponse,
|
||||
GetMetricMetadataResponse,
|
||||
UpdateMetricMetadataRequest,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import {
|
||||
MetricAlert,
|
||||
MetricAttribute,
|
||||
MetricDashboard,
|
||||
MetricHighlight,
|
||||
MetricMetadata,
|
||||
MetricMetadataState,
|
||||
} from './types';
|
||||
|
||||
export function formatTimestampToReadableDate(timestamp: number): string {
|
||||
export function formatTimestampToReadableDate(timestamp: string): string {
|
||||
const date = new Date(timestamp);
|
||||
const now = new Date();
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000);
|
||||
@@ -172,149 +154,3 @@ export function getMetricDetailsQuery(
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricHighlights(
|
||||
apiData: SuccessResponseV2<GetMetricHighlightsResponse> | undefined,
|
||||
): MetricHighlight | null {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {
|
||||
dataPoints,
|
||||
lastReceived,
|
||||
totalTimeSeries,
|
||||
activeTimeSeries,
|
||||
} = apiData.data.data;
|
||||
|
||||
return {
|
||||
dataPoints,
|
||||
lastReceived,
|
||||
totalTimeSeries,
|
||||
activeTimeSeries,
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricAlerts(
|
||||
apiData: SuccessResponseV2<GetMetricAlertsResponse> | undefined,
|
||||
): MetricAlert[] {
|
||||
if (
|
||||
!apiData ||
|
||||
!apiData.data ||
|
||||
!apiData.data.data ||
|
||||
!apiData.data.data.alerts
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
return apiData.data.data.alerts.map((alert) => ({
|
||||
alertName: alert.alertName,
|
||||
alertId: alert.alertId,
|
||||
}));
|
||||
}
|
||||
|
||||
export function transformMetricDashboards(
|
||||
apiData: SuccessResponseV2<GetMetricDashboardsResponse> | undefined,
|
||||
): MetricDashboard[] {
|
||||
if (
|
||||
!apiData ||
|
||||
!apiData.data ||
|
||||
!apiData.data.data ||
|
||||
!apiData.data.data.dashboards
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
const dashboards = apiData.data.data.dashboards.map((dashboard) => ({
|
||||
dashboardName: dashboard.dashboardName,
|
||||
dashboardId: dashboard.dashboardId,
|
||||
widgetId: dashboard.widgetId,
|
||||
widgetName: dashboard.widgetName,
|
||||
}));
|
||||
// Remove duplicate dashboards
|
||||
return dashboards.filter(
|
||||
(dashboard, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboardId === dashboard.dashboardId),
|
||||
);
|
||||
}
|
||||
|
||||
export function transformTemporality(temporality: string): Temporality {
|
||||
switch (temporality) {
|
||||
case 'delta':
|
||||
return Temporality.DELTA;
|
||||
case 'cumulative':
|
||||
return Temporality.CUMULATIVE;
|
||||
default:
|
||||
return Temporality.DELTA;
|
||||
}
|
||||
}
|
||||
|
||||
export function transformMetricType(type: string): MetricType {
|
||||
switch (type) {
|
||||
case 'sum':
|
||||
return MetricType.SUM;
|
||||
case 'gauge':
|
||||
return MetricType.GAUGE;
|
||||
case 'summary':
|
||||
return MetricType.SUMMARY;
|
||||
case 'histogram':
|
||||
return MetricType.HISTOGRAM;
|
||||
case 'exponential_histogram':
|
||||
return MetricType.EXPONENTIAL_HISTOGRAM;
|
||||
default:
|
||||
return MetricType.SUM;
|
||||
}
|
||||
}
|
||||
|
||||
export function transformMetricMetadata(
|
||||
apiData: SuccessResponseV2<GetMetricMetadataResponse> | undefined,
|
||||
): MetricMetadata | null {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return null;
|
||||
}
|
||||
const {
|
||||
type,
|
||||
description,
|
||||
unit,
|
||||
temporality,
|
||||
isMonotonic,
|
||||
} = apiData.data.data;
|
||||
|
||||
return {
|
||||
metricType: transformMetricType(type),
|
||||
description,
|
||||
unit,
|
||||
temporality: transformTemporality(temporality),
|
||||
isMonotonic,
|
||||
};
|
||||
}
|
||||
|
||||
export function transformUpdateMetricMetadataRequest(
|
||||
metricMetadata: MetricMetadataState,
|
||||
): UpdateMetricMetadataRequest {
|
||||
return {
|
||||
type: metricMetadata.metricType,
|
||||
description: metricMetadata.description,
|
||||
unit: metricMetadata.unit || '',
|
||||
temporality: metricMetadata.temporality || '',
|
||||
isMonotonic: determineIsMonotonic(
|
||||
metricMetadata.metricType,
|
||||
metricMetadata.temporality,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricAttributes(
|
||||
apiData: SuccessResponseV2<GetMetricAttributesResponse> | undefined,
|
||||
): { attributes: MetricAttribute[]; totalKeys: number } {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return { attributes: [], totalKeys: 0 };
|
||||
}
|
||||
const { attributes, totalKeys } = apiData.data.data;
|
||||
return {
|
||||
attributes: attributes.map((attribute) => ({
|
||||
key: attribute.key,
|
||||
values: attribute.values,
|
||||
valueCount: attribute.valueCount,
|
||||
})),
|
||||
totalKeys,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import { getMetricAlerts } from 'api/metricsExplorer/v2/getMetricAlerts';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricAlertsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricAlerts = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<SuccessResponseV2<GetMetricAlertsResponse>, Error>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricAlertsResponse>, Error>;
|
||||
|
||||
export const useGetMetricAlerts: UseGetMetricAlerts = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricAlertsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricAlerts(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_ALERTS, metricName],
|
||||
});
|
||||
@@ -1,36 +0,0 @@
|
||||
import { getMetricAttributes } from 'api/metricsExplorer/v2/getMetricAttributes';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAttributesRequest,
|
||||
GetMetricAttributesResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricAttributes = (
|
||||
requestData: GetMetricAttributesRequest,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricAttributesResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricAttributesResponse>, Error>;
|
||||
|
||||
export const useGetMetricAttributes: UseGetMetricAttributes = (
|
||||
requestData,
|
||||
options,
|
||||
headers,
|
||||
) => {
|
||||
const queryKey = [
|
||||
REACT_QUERY_KEY.GET_METRIC_ATTRIBUTES,
|
||||
requestData.metricName,
|
||||
requestData.start,
|
||||
requestData.end,
|
||||
];
|
||||
|
||||
return useQuery<SuccessResponseV2<GetMetricAttributesResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricAttributes(requestData, signal, headers),
|
||||
...options,
|
||||
queryKey,
|
||||
});
|
||||
};
|
||||
@@ -1,25 +0,0 @@
|
||||
import { getMetricDashboards } from 'api/metricsExplorer/v2/getMetricDashboards';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricDashboardsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricDashboards = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricDashboardsResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricDashboardsResponse>, Error>;
|
||||
|
||||
export const useGetMetricDashboards: UseGetMetricDashboards = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricDashboardsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricDashboards(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_DASHBOARDS, metricName],
|
||||
});
|
||||
@@ -1,25 +0,0 @@
|
||||
import { getMetricHighlights } from 'api/metricsExplorer/v2/getMetricHighlights';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricHighlightsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricHighlights = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricHighlightsResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricHighlightsResponse>, Error>;
|
||||
|
||||
export const useGetMetricHighlights: UseGetMetricHighlights = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricHighlightsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricHighlights(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_HIGHLIGHTS, metricName],
|
||||
});
|
||||
@@ -1,22 +0,0 @@
|
||||
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricMetadataResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricMetadata = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<SuccessResponseV2<GetMetricMetadataResponse>, Error>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricMetadataResponse>, Error>;
|
||||
|
||||
export const useGetMetricMetadata: UseGetMetricMetadata = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricMetadataResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricMetadata(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_METADATA, metricName],
|
||||
});
|
||||
@@ -1,22 +0,0 @@
|
||||
import updateMetricMetadata from 'api/metricsExplorer/v2/updateMetricMetadata';
|
||||
import { useMutation, UseMutationResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
UpdateMetricMetadataResponse,
|
||||
UseUpdateMetricMetadataProps,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export function useUpdateMetricMetadata(): UseMutationResult<
|
||||
SuccessResponseV2<UpdateMetricMetadataResponse>,
|
||||
Error,
|
||||
UseUpdateMetricMetadataProps
|
||||
> {
|
||||
return useMutation<
|
||||
SuccessResponseV2<UpdateMetricMetadataResponse>,
|
||||
Error,
|
||||
UseUpdateMetricMetadataProps
|
||||
>({
|
||||
mutationFn: ({ metricName, payload }) =>
|
||||
updateMetricMetadata(metricName, payload),
|
||||
});
|
||||
}
|
||||
@@ -15,7 +15,7 @@ function NoData(): JSX.Element {
|
||||
<Typography.Text className="not-found-text-1">
|
||||
Uh-oh! We cannot show the selected trace.
|
||||
<span className="not-found-text-2">
|
||||
This can happen in either of the two scenraios -
|
||||
This can happen in either of the two scenarios -
|
||||
</span>
|
||||
</Typography.Text>
|
||||
</section>
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
export interface GetMetricMetadataResponse {
|
||||
status: string;
|
||||
data: {
|
||||
description: string;
|
||||
type: string;
|
||||
unit: string;
|
||||
temporality: string;
|
||||
isMonotonic: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricHighlightsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
dataPoints: number;
|
||||
lastReceived: number;
|
||||
totalTimeSeries: number;
|
||||
activeTimeSeries: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricAttributesRequest {
|
||||
metricName: string;
|
||||
start?: number;
|
||||
end?: number;
|
||||
}
|
||||
|
||||
export interface GetMetricAttributesResponse {
|
||||
status: string;
|
||||
data: {
|
||||
attributes: {
|
||||
key: string;
|
||||
values: string[];
|
||||
valueCount: number;
|
||||
}[];
|
||||
totalKeys: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricAlertsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
alerts: {
|
||||
alertName: string;
|
||||
alertId: string;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricDashboardsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
dashboards: {
|
||||
dashboardName: string;
|
||||
dashboardId: string;
|
||||
widgetId: string;
|
||||
widgetName: string;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataRequest {
|
||||
type: string;
|
||||
description: string;
|
||||
temporality: string;
|
||||
unit: string;
|
||||
isMonotonic: boolean;
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataResponse {
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface UseUpdateMetricMetadataProps {
|
||||
metricName: string;
|
||||
payload: UpdateMetricMetadataRequest;
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
export function pluralize(
|
||||
count: number,
|
||||
singular: string,
|
||||
plural: string,
|
||||
): string {
|
||||
if (count === 1) {
|
||||
return `${count} ${singular}`;
|
||||
}
|
||||
return `${count} ${plural}`;
|
||||
}
|
||||
2
go.mod
2
go.mod
@@ -11,6 +11,7 @@ require (
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
@@ -89,7 +90,6 @@ require (
|
||||
|
||||
require (
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
|
||||
func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.EditAccess(provider.promoteHandler.HandlePromoteAndIndexPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
ID: "HandlePromoteAndIndexPaths",
|
||||
Tags: []string{"logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: new([]*promotetypes.PromotePath),
|
||||
@@ -25,8 +25,8 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.ViewAccess(provider.promoteHandler.ListPromotedAndIndexedPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
ID: "ListPromotedAndIndexedPaths",
|
||||
Tags: []string{"logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: nil,
|
||||
|
||||
@@ -61,7 +61,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
response := []promotetypes.PromotePath{}
|
||||
for _, path := range promotedPaths {
|
||||
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
item := promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Promote: true,
|
||||
@@ -77,7 +77,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Indexes: indexes,
|
||||
|
||||
@@ -10,9 +10,11 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -248,6 +250,40 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -375,3 +411,18 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -51,7 +52,6 @@ func consume(rows driver.Rows, kind qbtypes.RequestType, queryWindow *qbtypes.Ti
|
||||
}
|
||||
|
||||
func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbtypes.Step, queryName string) (*qbtypes.TimeSeriesData, error) {
|
||||
|
||||
colTypes := rows.ColumnTypes()
|
||||
colNames := rows.Columns()
|
||||
|
||||
@@ -354,10 +354,22 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
colTypes := rows.ColumnTypes()
|
||||
colCnt := len(colNames)
|
||||
|
||||
// Helper that decides scan target per column based on DB type
|
||||
makeScanTarget := func(i int) any {
|
||||
dbt := strings.ToUpper(colTypes[i].DatabaseTypeName())
|
||||
if strings.HasPrefix(dbt, "JSON") {
|
||||
// Since the driver fails to decode JSON/Dynamic into native Go values, we read it as raw bytes
|
||||
// TODO: check in future if fixed in the driver
|
||||
var v []byte
|
||||
return &v
|
||||
}
|
||||
return reflect.New(colTypes[i].ScanType()).Interface()
|
||||
}
|
||||
|
||||
// Build a template slice of correctly-typed pointers once
|
||||
scanTpl := make([]any, colCnt)
|
||||
for i, ct := range colTypes {
|
||||
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
|
||||
for i := range colTypes {
|
||||
scanTpl[i] = makeScanTarget(i)
|
||||
}
|
||||
|
||||
var outRows []*qbtypes.RawRow
|
||||
@@ -366,7 +378,7 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// fresh copy of the scan slice (otherwise the driver reuses pointers)
|
||||
scan := make([]any, colCnt)
|
||||
for i := range scanTpl {
|
||||
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
|
||||
scan[i] = makeScanTarget(i)
|
||||
}
|
||||
|
||||
if err := rows.Scan(scan...); err != nil {
|
||||
@@ -383,6 +395,28 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
case string:
|
||||
if x != "" {
|
||||
var v any
|
||||
if err := sonic.Unmarshal([]byte(x), &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
}
|
||||
|
||||
// special-case: timestamp column
|
||||
if name == "timestamp" || name == "timestamp_datetime" {
|
||||
switch t := val.(type) {
|
||||
|
||||
@@ -78,7 +78,7 @@ func newProvider(
|
||||
telemetryMetadataStore,
|
||||
)
|
||||
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
|
||||
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
@@ -102,14 +102,13 @@ func newProvider(
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, telemetryMetadataStore)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
@@ -117,7 +116,6 @@ func newProvider(
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
logFieldMapper,
|
||||
logConditionBuilder,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder(
|
||||
@@ -128,7 +126,6 @@ func newProvider(
|
||||
logResourceFilterStmtBuilder,
|
||||
logAggExprRewriter,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
|
||||
|
||||
@@ -11,13 +11,16 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -128,6 +131,40 @@ func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
|
||||
return err
|
||||
}
|
||||
|
||||
func (ic *LogParsingPipelineController) getDefaultPipelines() ([]pipelinetypes.GettablePipeline, error) {
|
||||
defaultPipelines := []pipelinetypes.GettablePipeline{}
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
preprocessingPipeline := pipelinetypes.GettablePipeline{
|
||||
StoreablePipeline: pipelinetypes.StoreablePipeline{
|
||||
Name: "Default Pipeline - PreProcessing Body",
|
||||
Alias: "NormalizeBodyDefault",
|
||||
Enabled: true,
|
||||
},
|
||||
Filter: &v3.FilterSet{
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body",
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []pipelinetypes.PipelineOperator{
|
||||
{
|
||||
ID: uuid.NewString(),
|
||||
Type: "normalize",
|
||||
Enabled: true,
|
||||
If: "body != nil",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
defaultPipelines = append(defaultPipelines, preprocessingPipeline)
|
||||
}
|
||||
return defaultPipelines, nil
|
||||
}
|
||||
|
||||
// Returns effective list of pipelines including user created
|
||||
// pipelines and pipelines for installed integrations
|
||||
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
@@ -258,6 +295,13 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
// recommend default pipelines along with user created pipelines
|
||||
defaultPipelines, err := pc.getDefaultPipelines()
|
||||
if err != nil {
|
||||
return nil, "", model.InternalError(fmt.Errorf("failed to get default pipelines: %w", err))
|
||||
}
|
||||
pipelinesResp.Pipelines = append(pipelinesResp.Pipelines, defaultPipelines...)
|
||||
|
||||
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
|
||||
@@ -132,7 +132,7 @@ func SignozLogsToPLogs(logs []model.SignozLog) []plog.Logs {
|
||||
slRecord.SetSeverityText(log.SeverityText)
|
||||
slRecord.SetSeverityNumber(plog.SeverityNumber(log.SeverityNumber))
|
||||
|
||||
slRecord.Body().SetStr(log.Body)
|
||||
slRecord.Body().FromRaw(log.Body)
|
||||
|
||||
slAttribs := slRecord.Attributes()
|
||||
for k, v := range log.Attributes_int64 {
|
||||
|
||||
@@ -20,7 +20,6 @@ type aggExprRewriter struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -31,7 +30,6 @@ func NewAggExprRewriter(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *aggExprRewriter {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
|
||||
@@ -41,7 +39,6 @@ func NewAggExprRewriter(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -81,7 +78,6 @@ func (r *aggExprRewriter) Rewrite(
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
r.jsonBodyPrefix,
|
||||
r.jsonKeyToKey,
|
||||
)
|
||||
// Rewrite the first select item (our expression)
|
||||
@@ -129,7 +125,6 @@ type exprVisitor struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
Modified bool
|
||||
chArgs []any
|
||||
@@ -142,7 +137,6 @@ func newExprVisitor(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
@@ -151,7 +145,6 @@ func newExprVisitor(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -190,7 +183,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
if aggFunc.FuncCombinator {
|
||||
// Map the predicate (last argument)
|
||||
origPred := args[len(args)-1].String()
|
||||
whereClause, err := PrepareWhereClause(
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Logger: v.logger,
|
||||
@@ -199,7 +192,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, 0, 0,
|
||||
}, 0, 0,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -219,7 +212,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -237,7 +230,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -24,7 +24,6 @@ func CollisionHandledFinalExpr(
|
||||
cb qbtypes.ConditionBuilder,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
requiredDataType telemetrytypes.FieldDataType,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) (string, []any, error) {
|
||||
|
||||
@@ -45,7 +44,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -58,8 +57,8 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, err := fm.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
colName, fieldForErr := fm.FieldFor(ctx, field)
|
||||
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
keysForField := keys[field.Name]
|
||||
@@ -82,10 +81,10 @@ func CollisionHandledFinalExpr(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "%s", correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "field `%s` not found", field.Name)
|
||||
}
|
||||
} else {
|
||||
for _, key := range keysForField {
|
||||
@@ -104,10 +103,11 @@ func CollisionHandledFinalExpr(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
|
||||
// TODO(nitya): enable group by on body column?
|
||||
// first if condition covers the older tests and second if condition covers the array conditions
|
||||
if !BodyJSONQueryEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
|
||||
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
|
||||
} else if strings.Contains(field.Name, telemetrytypes.ArraySep) || strings.Contains(field.Name, telemetrytypes.ArrayAnyIndex) {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", field.Name)
|
||||
} else {
|
||||
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
}
|
||||
@@ -204,7 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString:
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -219,8 +219,36 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeFloat64,
|
||||
telemetrytypes.FieldDataTypeArrayFloat64:
|
||||
switch v := value.(type) {
|
||||
case float32, float64:
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
case string:
|
||||
// check if it's a number inside a string
|
||||
isNumber := false
|
||||
if _, err := strconv.ParseFloat(v, 64); err == nil {
|
||||
isNumber = true
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
|
||||
if !operator.IsComparisonOperator() || !isNumber {
|
||||
// try to convert the number attribute to string
|
||||
tblFieldName = castString(tblFieldName) // numeric col vs string literal
|
||||
} else {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
}
|
||||
case []any:
|
||||
if allFloats(v) {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
} else if hasString(v) {
|
||||
tblFieldName, value = castString(tblFieldName), toStrings(v)
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
telemetrytypes.FieldDataTypeArrayNumber:
|
||||
switch v := value.(type) {
|
||||
// why? ; CH returns an error for a simple check
|
||||
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
|
||||
@@ -258,7 +286,8 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeBool:
|
||||
case telemetrytypes.FieldDataTypeBool,
|
||||
telemetrytypes.FieldDataTypeArrayBool:
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
tblFieldName = castString(tblFieldName)
|
||||
|
||||
@@ -43,7 +43,6 @@ type resourceFilterStatementBuilder[T any] struct {
|
||||
signal telemetrytypes.Signal
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -76,7 +75,6 @@ func NewLogResourceFilterStatementBuilder(
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
|
||||
@@ -87,7 +85,6 @@ func NewLogResourceFilterStatementBuilder(
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalLogs,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -100,12 +97,18 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
|
||||
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||
}
|
||||
|
||||
// exclude out the body related key selectors
|
||||
filteredKeySelectors := []*telemetrytypes.FieldKeySelector{}
|
||||
for idx := range keySelectors {
|
||||
if keySelectors[idx].FieldContext == telemetrytypes.FieldContextBody {
|
||||
continue
|
||||
}
|
||||
keySelectors[idx].Signal = b.signal
|
||||
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
|
||||
}
|
||||
|
||||
return keySelectors
|
||||
return filteredKeySelectors
|
||||
}
|
||||
|
||||
// Build builds a SQL query based on the given parameters
|
||||
@@ -168,7 +171,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
IgnoreNotFoundKeys: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -16,11 +16,12 @@ import (
|
||||
)
|
||||
|
||||
type conditionBuilder struct {
|
||||
fm qbtypes.FieldMapper
|
||||
fm qbtypes.FieldMapper
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
}
|
||||
|
||||
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
return &conditionBuilder{fm: fm}
|
||||
func NewConditionBuilder(fm qbtypes.FieldMapper, metadataStore telemetrytypes.MetadataStore) *conditionBuilder {
|
||||
return &conditionBuilder{fm: fm, metadataStore: metadataStore}
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
@@ -30,22 +31,34 @@ func (c *conditionBuilder) conditionFor(
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
column, err := c.fm.ColumnFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// For JSON columns, preserve the original value type (numeric, bool, etc.)
|
||||
// Only format to string for non-JSON columns that need string formatting
|
||||
isJSONColumn := column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled && key.FieldContext == telemetrytypes.FieldContextBody
|
||||
if !isJSONColumn {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
}
|
||||
|
||||
if isJSONColumn {
|
||||
cond, err := c.buildJSONCondition(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -163,9 +176,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
// in the UI based query builder, `exists` and `not exists` are used for
|
||||
// key membership checks, so depending on the column type, the condition changes
|
||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||
|
||||
// Check if this is a body JSON search - by FieldContext
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody {
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled {
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
|
||||
} else {
|
||||
@@ -247,7 +258,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
|
||||
@@ -373,7 +373,8 @@ func TestConditionFor(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -426,7 +427,8 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -685,7 +687,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
@@ -2,7 +2,6 @@ package telemetrylogs
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -37,8 +36,6 @@ const (
|
||||
|
||||
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
|
||||
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArrayAnyIndex = "[*]."
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -48,8 +45,7 @@ var (
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
}
|
||||
BodyJSONStringSearchPrefix = `body.`
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"body": {
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
|
||||
@@ -6,7 +6,9 @@ import (
|
||||
"strings"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/utils"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
@@ -28,6 +30,11 @@ var (
|
||||
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
|
||||
MaxDynamicTypes: utils.ToPointer(uint(32)),
|
||||
MaxDynamicPaths: utils.ToPointer(uint(0)),
|
||||
}},
|
||||
LogsV2BodyPromotedColumn: {Name: LogsV2BodyPromotedColumn, Type: schema.JSONColumnType{}},
|
||||
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{
|
||||
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||
ValueType: schema.ColumnTypeString,
|
||||
@@ -83,13 +90,23 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return logsV2Columns["attributes_bool"], nil
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// body context fields are stored in the body column
|
||||
// Body context is for JSON body fields
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
|
||||
col, ok := logsV2Columns[key.Name]
|
||||
if !ok {
|
||||
// check if the key has body JSON search (backward compatibility)
|
||||
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
// check if the key has body JSON search
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
@@ -109,20 +126,33 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
// json is only supported for resource context as of now
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
} else {
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
}
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
case telemetrytypes.FieldContextBody:
|
||||
if strings.Contains(key.Name, telemetrytypes.ArraySep) || strings.Contains(key.Name, telemetrytypes.ArrayAnyIndex) {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", key.Name)
|
||||
}
|
||||
fieldExpr := BodyJSONColumnPrefix + fmt.Sprintf("`%s`", key.Name)
|
||||
expr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldExpr, key.JSONDataType.StringValue())
|
||||
if key.Materialized {
|
||||
promotedFieldExpr := BodyPromotedColumnPrefix + fmt.Sprintf("`%s`", key.Name)
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)", expr, fmt.Sprintf("dynamicElement(%s, '%s')", promotedFieldExpr, key.JSONDataType.StringValue()))
|
||||
}
|
||||
// returning qbtypes.ErrColumnNotFound is a hack that will trigger the fallback expr logic to include all the types for the key
|
||||
return expr, qbtypes.ErrColumnNotFound
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -33,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -47,7 +47,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
|
||||
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -69,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -14,8 +15,7 @@ import (
|
||||
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
|
||||
func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
cb := NewConditionBuilder(fm, telemetrytypestest.NewMockMetadataStore())
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -2423,7 +2423,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
149
pkg/telemetrylogs/json_access_pb.go
Normal file
149
pkg/telemetrylogs/json_access_pb.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
var (
|
||||
CodePlanIndexOutOfBounds = errors.MustNewCode("plan_index_out_of_bounds")
|
||||
)
|
||||
|
||||
type JSONAccessPlanBuilder struct {
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
value any
|
||||
op qbtypes.FilterOperator
|
||||
parts []string
|
||||
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)
|
||||
isPromoted bool
|
||||
}
|
||||
|
||||
// buildPlan recursively builds the path plan tree
|
||||
func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, parent *telemetrytypes.JSONAccessNode, isDynArrChild bool) (*telemetrytypes.JSONAccessNode, error) {
|
||||
if index >= len(pb.parts) {
|
||||
return nil, errors.NewInvalidInputf(CodePlanIndexOutOfBounds, "index is out of bounds")
|
||||
}
|
||||
|
||||
part := pb.parts[index]
|
||||
pathSoFar := strings.Join(pb.parts[:index+1], telemetrytypes.ArraySep)
|
||||
isTerminal := index == len(pb.parts)-1
|
||||
|
||||
// Calculate progression parameters based on parent's values
|
||||
var maxTypes, maxPaths int
|
||||
if isDynArrChild {
|
||||
// Child of Dynamic array - reset progression to base values (16, 256)
|
||||
// This happens when we switch from Array(Dynamic) to Array(JSON)
|
||||
maxTypes = 16
|
||||
maxPaths = 256
|
||||
} else if parent != nil {
|
||||
// Child of JSON array - use parent's progression divided by 2 and 4
|
||||
maxTypes = parent.MaxDynamicTypes / 2
|
||||
maxPaths = parent.MaxDynamicPaths / 4
|
||||
if maxTypes < 0 {
|
||||
maxTypes = 0
|
||||
}
|
||||
if maxPaths < 0 {
|
||||
maxPaths = 0
|
||||
}
|
||||
}
|
||||
|
||||
types, err := pb.getTypes(ctx, pathSoFar)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create node for this path segment
|
||||
node := &telemetrytypes.JSONAccessNode{
|
||||
Name: part,
|
||||
IsTerminal: isTerminal,
|
||||
AvailableTypes: types,
|
||||
Branches: make(map[telemetrytypes.JSONAccessBranchType]*telemetrytypes.JSONAccessNode),
|
||||
Parent: parent,
|
||||
MaxDynamicTypes: maxTypes,
|
||||
MaxDynamicPaths: maxPaths,
|
||||
}
|
||||
|
||||
hasJSON := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayJSON)
|
||||
hasDynamic := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayDynamic)
|
||||
|
||||
// Configure terminal if this is the last part
|
||||
if isTerminal {
|
||||
valueType, _ := inferDataType(pb.value, pb.op, pb.key)
|
||||
node.TerminalConfig = &telemetrytypes.TerminalConfig{
|
||||
Key: pb.key,
|
||||
ElemType: *pb.key.JSONDataType,
|
||||
ValueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType],
|
||||
}
|
||||
} else {
|
||||
if hasJSON {
|
||||
node.Branches[telemetrytypes.BranchJSON], err = pb.buildPlan(ctx, index+1, node, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if hasDynamic {
|
||||
node.Branches[telemetrytypes.BranchDynamic], err = pb.buildPlan(ctx, index+1, node, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
// PlanJSON builds a tree structure representing the complete JSON path traversal
|
||||
// that precomputes all possible branches and their types
|
||||
func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbtypes.FilterOperator,
|
||||
value any,
|
||||
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error),
|
||||
) (telemetrytypes.JSONAccessPlan, error) {
|
||||
// if path is empty, return nil
|
||||
if key.Name == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "path is empty")
|
||||
}
|
||||
|
||||
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
|
||||
// creation time in distributed_promoted_paths
|
||||
path := strings.ReplaceAll(key.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
|
||||
parts := strings.Split(path, telemetrytypes.ArraySep)
|
||||
|
||||
pb := &JSONAccessPlanBuilder{
|
||||
key: key,
|
||||
op: op,
|
||||
value: value,
|
||||
parts: parts,
|
||||
getTypes: getTypes,
|
||||
isPromoted: key.Materialized,
|
||||
}
|
||||
plans := telemetrytypes.JSONAccessPlan{}
|
||||
|
||||
node, err := pb.buildPlan(ctx, 0,
|
||||
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn,
|
||||
32, 0),
|
||||
false,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
plans = append(plans, node)
|
||||
|
||||
if pb.isPromoted {
|
||||
node, err := pb.buildPlan(ctx, 0,
|
||||
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyPromotedColumn,
|
||||
32, 1024),
|
||||
true,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
plans = append(plans, node)
|
||||
}
|
||||
|
||||
return plans, nil
|
||||
}
|
||||
877
pkg/telemetrylogs/json_access_pb_test.go
Normal file
877
pkg/telemetrylogs/json_access_pb_test.go
Normal file
@@ -0,0 +1,877 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Test Data Creation
|
||||
// ============================================================================
|
||||
|
||||
// makeKey creates a TelemetryFieldKey for testing
|
||||
func makeKey(name string, dataType telemetrytypes.JSONDataType, materialized bool) *telemetrytypes.TelemetryFieldKey {
|
||||
return &telemetrytypes.TelemetryFieldKey{
|
||||
Name: name,
|
||||
JSONDataType: &dataType,
|
||||
Materialized: materialized,
|
||||
}
|
||||
}
|
||||
|
||||
// makeGetTypes creates a getTypes function from a map of path -> types
|
||||
func makeGetTypes(typesMap map[string][]telemetrytypes.JSONDataType) func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
return func(_ context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
return typesMap[path], nil
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Node Validation
|
||||
// ============================================================================
|
||||
|
||||
// jsonAccessTestNode is a test-only, YAML-friendly view of JSONAccessNode.
|
||||
// It intentionally omits Parent to avoid cycles and only keeps the fields
|
||||
// that are useful for understanding / asserting the plan structure.
|
||||
type jsonAccessTestNode struct {
|
||||
Name string `yaml:"name"`
|
||||
Column string `yaml:"column,omitempty"`
|
||||
IsTerminal bool `yaml:"isTerminal,omitempty"`
|
||||
MaxDynamicTypes int `yaml:"maxDynamicTypes,omitempty"`
|
||||
MaxDynamicPaths int `yaml:"maxDynamicPaths,omitempty"`
|
||||
ElemType string `yaml:"elemType,omitempty"`
|
||||
ValueType string `yaml:"valueType,omitempty"`
|
||||
AvailableTypes []string `yaml:"availableTypes,omitempty"`
|
||||
Branches map[string]*jsonAccessTestNode `yaml:"branches,omitempty"`
|
||||
}
|
||||
|
||||
// toTestNode converts a JSONAccessNode tree into jsonAccessTestNode so that
|
||||
// it can be serialized to YAML for easy visual comparison in tests.
|
||||
func toTestNode(n *telemetrytypes.JSONAccessNode) *jsonAccessTestNode {
|
||||
if n == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
out := &jsonAccessTestNode{
|
||||
Name: n.Name,
|
||||
IsTerminal: n.IsTerminal,
|
||||
MaxDynamicTypes: n.MaxDynamicTypes,
|
||||
MaxDynamicPaths: n.MaxDynamicPaths,
|
||||
}
|
||||
|
||||
// Column information for top-level plan nodes: their parent is the root,
|
||||
// whose parent is nil.
|
||||
if n.Parent != nil && n.Parent.Parent == nil {
|
||||
out.Column = n.Parent.Name
|
||||
}
|
||||
|
||||
// AvailableTypes as strings (using StringValue for stable representation)
|
||||
if len(n.AvailableTypes) > 0 {
|
||||
out.AvailableTypes = make([]string, 0, len(n.AvailableTypes))
|
||||
for _, t := range n.AvailableTypes {
|
||||
out.AvailableTypes = append(out.AvailableTypes, t.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
// Terminal config
|
||||
if n.TerminalConfig != nil {
|
||||
out.ElemType = n.TerminalConfig.ElemType.StringValue()
|
||||
out.ValueType = n.TerminalConfig.ValueType.StringValue()
|
||||
}
|
||||
|
||||
// Branches
|
||||
if len(n.Branches) > 0 {
|
||||
out.Branches = make(map[string]*jsonAccessTestNode, len(n.Branches))
|
||||
for bt, child := range n.Branches {
|
||||
out.Branches[bt.StringValue()] = toTestNode(child)
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// plansToYAML converts a slice of JSONAccessNode plans to a YAML string that
|
||||
// can be compared against a per-test expectedTree.
|
||||
func plansToYAML(t *testing.T, plans []*telemetrytypes.JSONAccessNode) string {
|
||||
t.Helper()
|
||||
|
||||
testNodes := make([]*jsonAccessTestNode, 0, len(plans))
|
||||
for _, p := range plans {
|
||||
testNodes = append(testNodes, toTestNode(p))
|
||||
}
|
||||
|
||||
got, err := yaml.Marshal(testNodes)
|
||||
require.NoError(t, err)
|
||||
return string(got)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Cases for Node Methods
|
||||
// ============================================================================
|
||||
|
||||
func TestNode_Alias(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
node *telemetrytypes.JSONAccessNode
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Root node returns name as-is",
|
||||
node: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
expected: LogsV2BodyJSONColumn,
|
||||
},
|
||||
{
|
||||
name: "Node without parent returns backticked name",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: nil,
|
||||
},
|
||||
expected: "`user`",
|
||||
},
|
||||
{
|
||||
name: "Node with root parent uses dot separator",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".age`",
|
||||
},
|
||||
{
|
||||
name: "Node with non-root parent uses array separator",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "name",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education[].name`",
|
||||
},
|
||||
{
|
||||
name: "Nested array path with multiple levels",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "type",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "awards",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education[].awards[].type`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tt.node.Alias()
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNode_FieldPath(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
node *telemetrytypes.JSONAccessNode
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Simple field path from root",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: LogsV2BodyJSONColumn + ".`user`",
|
||||
},
|
||||
{
|
||||
name: "Field path with backtick-required key",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user-name", // requires backtick
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
expected: LogsV2BodyJSONColumn + ".`user-name`",
|
||||
},
|
||||
{
|
||||
name: "Nested field path",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".user`.`age`",
|
||||
},
|
||||
{
|
||||
name: "Array element field path",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "name",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education`.`name`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tt.node.FieldPath()
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Cases for PlanJSON
|
||||
// ============================================================================
|
||||
|
||||
func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
expectErr bool
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Simple path not promoted",
|
||||
key: makeKey("user.name", telemetrytypes.String, false),
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Simple path promoted",
|
||||
key: makeKey("user.name", telemetrytypes.String, true),
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
- name: user.name
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Empty path returns error",
|
||||
key: makeKey("", telemetrytypes.String, false),
|
||||
expectErr: true,
|
||||
expectedYAML: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
plans, err := PlanJSON(context.Background(), tt.key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
require.Nil(t, plans)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Single array level - JSON branch only",
|
||||
path: "education[].name",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Single array level - both JSON and Dynamic branches",
|
||||
path: "education[].awards[].type",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Deeply nested array path",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: entities
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: reviews
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: entries
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: metadata
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 1
|
||||
branches:
|
||||
json:
|
||||
name: positions
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "ArrayAnyIndex replacement [*] to []",
|
||||
path: "education[*].name",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
key := makeKey(tt.path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, plans)
|
||||
require.Len(t, plans, 1)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
path := "education[].awards[].type"
|
||||
value := "sports"
|
||||
|
||||
t.Run("Non-promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 1)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
|
||||
t.Run("Promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, telemetrytypes.String, true)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 2)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
- name: education
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
maxDynamicPaths: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
}
|
||||
|
||||
func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
value any
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Path with no available types",
|
||||
path: "unknown.path",
|
||||
value: "test",
|
||||
expectedYAML: `
|
||||
- name: unknown.path
|
||||
column: body_json
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
value: "Engineer",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: entities
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: reviews
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: entries
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: metadata
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 1
|
||||
branches:
|
||||
json:
|
||||
name: positions
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
value: "high_school",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
- Int64
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
value: nil,
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: Array(JSON)
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Choose key type based on path; operator does not affect the tree shape asserted here.
|
||||
keyType := telemetrytypes.String
|
||||
switch tt.path {
|
||||
case "education":
|
||||
keyType = telemetrytypes.ArrayJSON
|
||||
case "education[].type":
|
||||
keyType = telemetrytypes.String
|
||||
}
|
||||
key := makeKey(tt.path, keyType, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, tt.value, getTypes)
|
||||
require.NoError(t, err)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
path := "education[].awards[].participated[].team[].branch"
|
||||
key := makeKey(path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 1)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: participated
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 1
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: participated
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
maxDynamicPaths: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Data Setup
|
||||
// ============================================================================
|
||||
|
||||
// testTypeSet returns a map of path->types and a getTypes function for testing
|
||||
// This represents the type information available in the test JSON structure
|
||||
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
|
||||
types := map[string][]telemetrytypes.JSONDataType{
|
||||
"user.name": {telemetrytypes.String},
|
||||
"user.age": {telemetrytypes.Int64, telemetrytypes.String},
|
||||
"user.height": {telemetrytypes.Float64},
|
||||
"education": {telemetrytypes.ArrayJSON},
|
||||
"education[].name": {telemetrytypes.String},
|
||||
"education[].type": {telemetrytypes.String, telemetrytypes.Int64},
|
||||
"education[].internal_type": {telemetrytypes.String},
|
||||
"education[].metadata.location": {telemetrytypes.String},
|
||||
"education[].parameters": {telemetrytypes.ArrayFloat64, telemetrytypes.ArrayDynamic},
|
||||
"education[].duration": {telemetrytypes.String},
|
||||
"education[].mode": {telemetrytypes.String},
|
||||
"education[].year": {telemetrytypes.Int64},
|
||||
"education[].field": {telemetrytypes.String},
|
||||
"education[].awards": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].name": {telemetrytypes.String},
|
||||
"education[].awards[].rank": {telemetrytypes.Int64},
|
||||
"education[].awards[].medal": {telemetrytypes.String},
|
||||
"education[].awards[].type": {telemetrytypes.String},
|
||||
"education[].awards[].semester": {telemetrytypes.Int64},
|
||||
"education[].awards[].participated": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].participated[].type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].field": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].project_type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].project_name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].race_type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team_based": {telemetrytypes.Bool},
|
||||
"education[].awards[].participated[].team_name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team": {telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].participated[].team[].name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team[].branch": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team[].semester": {telemetrytypes.Int64},
|
||||
"interests": {telemetrytypes.ArrayJSON},
|
||||
"interests[].type": {telemetrytypes.String},
|
||||
"interests[].entities": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities.application_date": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].given_by": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].remarks": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].weight": {telemetrytypes.Float64},
|
||||
"interests[].entities[].reviews[].passed": {telemetrytypes.Bool},
|
||||
"interests[].entities[].reviews[].type": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].analysis_type": {telemetrytypes.Int64},
|
||||
"interests[].entities[].reviews[].entries": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].subject": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].status": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].company": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].experience": {telemetrytypes.Int64},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].unit": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].name": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].duration": {telemetrytypes.Int64, telemetrytypes.Float64},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {telemetrytypes.ArrayInt64, telemetrytypes.ArrayString},
|
||||
"message": {telemetrytypes.String},
|
||||
}
|
||||
|
||||
return types, makeGetTypes(types)
|
||||
}
|
||||
455
pkg/telemetrylogs/json_condition_builder.go
Normal file
455
pkg/telemetrylogs/json_condition_builder.go
Normal file
@@ -0,0 +1,455 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeCurrentNodeNil = errors.MustNewCode("current_node_nil")
|
||||
CodeNextNodeNil = errors.MustNewCode("next_node_nil")
|
||||
CodeNestedExpressionsEmpty = errors.MustNewCode("nested_expressions_empty")
|
||||
CodeGroupByPlanEmpty = errors.MustNewCode("group_by_plan_empty")
|
||||
CodeArrayMapExpressionsEmpty = errors.MustNewCode("array_map_expressions_empty")
|
||||
CodePromotedPlanMissing = errors.MustNewCode("promoted_plan_missing")
|
||||
CodeArrayNavigationFailed = errors.MustNewCode("array_navigation_failed")
|
||||
)
|
||||
|
||||
func (c *conditionBuilder) getTypes(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
keys, _, err := c.metadataStore.GetKeys(ctx, &telemetrytypes.FieldKeySelector{
|
||||
Name: path,
|
||||
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Limit: 1,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
types := []telemetrytypes.JSONDataType{}
|
||||
for _, key := range keys[path] {
|
||||
if key.JSONDataType != nil {
|
||||
types = append(types, *key.JSONDataType)
|
||||
}
|
||||
}
|
||||
return types, nil
|
||||
}
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *conditionBuilder) buildJSONCondition(ctx context.Context, key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
|
||||
plan, err := PlanJSON(ctx, key, operator, value, c.getTypes)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
conditions := []string{}
|
||||
for _, plan := range plan {
|
||||
condition, err := c.emitPlannedCondition(plan, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, condition)
|
||||
}
|
||||
return sb.Or(conditions...), nil
|
||||
}
|
||||
|
||||
// emitPlannedCondition handles paths with array traversal
|
||||
func (c *conditionBuilder) emitPlannedCondition(plan *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
// Build traversal + terminal recursively per-hop
|
||||
compiled, err := c.recurseArrayHops(plan, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// sb.AddWhereClause(sqlbuilder.NewWhereClause().AddWhereExpr(sb.Args, compiled))
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// buildTerminalCondition creates the innermost condition
|
||||
func (c *conditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
// Use the parent's alias + current field name for the full path
|
||||
fieldPath := node.FieldPath()
|
||||
|
||||
if node.TerminalConfig.ElemType.IsArray {
|
||||
// switch operator for array membership checks
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
}
|
||||
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return arrayCond, nil
|
||||
}
|
||||
conditions := []string{}
|
||||
|
||||
elemType := node.TerminalConfig.ElemType
|
||||
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
|
||||
fieldExpr, value = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, fieldExpr, operator)
|
||||
|
||||
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
|
||||
return index.Type == elemType && index.ColumnExpression == fieldPath
|
||||
})
|
||||
if elemType.IndexSupported && indexed {
|
||||
indexedExpr := assumeNotNull(fieldPath, elemType)
|
||||
emptyValue := func() any {
|
||||
switch elemType {
|
||||
case telemetrytypes.String:
|
||||
return ""
|
||||
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
|
||||
return 0
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}()
|
||||
|
||||
// switch the operator and value for exists and not exists
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorExists:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
value = emptyValue
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
value = emptyValue
|
||||
default:
|
||||
// do nothing
|
||||
}
|
||||
|
||||
cond, err := c.applyOperator(sb, indexedExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, cond)
|
||||
// Switch operator to EXISTS
|
||||
operator = qbtypes.FilterOperatorExists
|
||||
}
|
||||
|
||||
cond, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, cond)
|
||||
if len(conditions) > 1 {
|
||||
return sb.And(conditions...), nil
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
// buildArrayMembershipCondition handles array membership checks
|
||||
func (c *conditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
arrayPath := node.FieldPath()
|
||||
|
||||
// create typed array out of a dynamic array
|
||||
filteredDynamicExpr := func() string {
|
||||
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
|
||||
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
|
||||
node.TerminalConfig.ValueType.StringValue(),
|
||||
node.TerminalConfig.ValueType.StringValue(),
|
||||
baseArrayDynamicExpr)
|
||||
}
|
||||
typedArrayExpr := func() string {
|
||||
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
|
||||
}
|
||||
|
||||
var arrayExpr string
|
||||
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
|
||||
arrayExpr = filteredDynamicExpr()
|
||||
} else {
|
||||
arrayExpr = typedArrayExpr()
|
||||
}
|
||||
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, "x", operator)
|
||||
op, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
|
||||
}
|
||||
|
||||
// recurseArrayHops recursively builds array traversal conditions
|
||||
func (c *conditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
if current == nil {
|
||||
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
|
||||
}
|
||||
|
||||
if current.IsTerminal {
|
||||
terminalCond, err := c.buildTerminalCondition(current, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return terminalCond, nil
|
||||
}
|
||||
|
||||
currAlias := current.Alias()
|
||||
fieldPath := current.FieldPath()
|
||||
// Determine availability of Array(JSON) and Array(Dynamic) at this hop
|
||||
hasArrayJSON := current.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasArrayDynamic := current.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
// Then, at this hop, compute child per branch and wrap
|
||||
branches := make([]string, 0, 2)
|
||||
if hasArrayJSON {
|
||||
jsonArrayExpr := fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')", fieldPath, current.MaxDynamicTypes, current.MaxDynamicPaths)
|
||||
childGroupJSON, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchJSON], operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupJSON, jsonArrayExpr))
|
||||
}
|
||||
if hasArrayDynamic {
|
||||
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", fieldPath)
|
||||
dynFilteredExpr := fmt.Sprintf("arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
|
||||
|
||||
// Create the Query for Dynamic array
|
||||
childGroupDyn, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchDynamic], operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupDyn, dynFilteredExpr))
|
||||
}
|
||||
|
||||
if len(branches) == 1 {
|
||||
return branches[0], nil
|
||||
}
|
||||
return fmt.Sprintf("(%s)", strings.Join(branches, " OR ")), nil
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, fieldExpr string, operator qbtypes.FilterOperator, value any) (string, error) {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return sb.E(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotEqual:
|
||||
return sb.NE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorGreaterThan:
|
||||
return sb.G(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorGreaterThanOrEq:
|
||||
return sb.GE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLessThan:
|
||||
return sb.LT(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLessThanOrEq:
|
||||
return sb.LE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLike:
|
||||
return sb.Like(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotLike:
|
||||
return sb.NotLike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorILike:
|
||||
return sb.ILike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotILike:
|
||||
return sb.NotILike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
return fmt.Sprintf("match(%s, %s)", fieldExpr, sb.Var(value)), nil
|
||||
case qbtypes.FilterOperatorNotRegexp:
|
||||
return fmt.Sprintf("NOT match(%s, %s)", fieldExpr, sb.Var(value)), nil
|
||||
case qbtypes.FilterOperatorContains:
|
||||
return sb.ILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
|
||||
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
values = []any{value}
|
||||
}
|
||||
conds := []string{}
|
||||
for _, v := range values {
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
conds = append(conds, sb.E(fieldExpr, v))
|
||||
} else {
|
||||
conds = append(conds, sb.NE(fieldExpr, v))
|
||||
}
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
return sb.Or(conds...), nil
|
||||
}
|
||||
return sb.And(conds...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
default:
|
||||
return "", qbtypes.ErrUnsupportedOperator
|
||||
}
|
||||
}
|
||||
|
||||
// GroupByArrayJoinInfo contains information about array joins needed for GroupBy
|
||||
type GroupByArrayJoinInfo struct {
|
||||
ArrayJoinClauses []string // ARRAY JOIN clauses to add to FROM clause
|
||||
TerminalExpr string // Terminal field expression for SELECT/GROUP BY
|
||||
}
|
||||
|
||||
// BuildGroupBy builds GroupBy information for body JSON fields using arrayConcat pattern
|
||||
func (c *conditionBuilder) BuildGroupBy(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*GroupByArrayJoinInfo, error) {
|
||||
path := strings.TrimPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
|
||||
plan, err := PlanJSON(ctx, key, qbtypes.FilterOperatorExists, nil, c.getTypes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(plan) == 0 {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
|
||||
"Could not find any valid paths for: %s", path)
|
||||
}
|
||||
|
||||
if plan[0].IsTerminal {
|
||||
node := plan[0]
|
||||
|
||||
expr := fmt.Sprintf("dynamicElement(%s, '%s')", node.FieldPath(), node.TerminalConfig.ElemType.StringValue())
|
||||
if key.Materialized {
|
||||
if len(plan) < 2 {
|
||||
return nil, errors.Newf(errors.TypeUnexpected, CodePromotedPlanMissing,
|
||||
"plan length is less than 2 for promoted path: %s", path)
|
||||
}
|
||||
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)",
|
||||
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
|
||||
expr,
|
||||
)
|
||||
}
|
||||
|
||||
return &GroupByArrayJoinInfo{
|
||||
ArrayJoinClauses: []string{},
|
||||
TerminalExpr: expr,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Build arrayConcat pattern directly from the tree structure
|
||||
arrayConcatExpr, err := c.buildArrayConcat(plan)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create single ARRAY JOIN clause with arrayFlatten
|
||||
arrayJoinClause := fmt.Sprintf("ARRAY JOIN %s AS `%s`", arrayConcatExpr, key.Name)
|
||||
|
||||
return &GroupByArrayJoinInfo{
|
||||
ArrayJoinClauses: []string{arrayJoinClause},
|
||||
TerminalExpr: fmt.Sprintf("`%s`", key.Name),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// buildArrayConcat builds the arrayConcat pattern directly from the tree structure
|
||||
func (c *conditionBuilder) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (string, error) {
|
||||
if len(plan) == 0 {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeGroupByPlanEmpty, "group by plan is empty while building arrayConcat")
|
||||
}
|
||||
|
||||
// Build arrayMap expressions for ALL available branches at the root level
|
||||
var arrayMapExpressions []string
|
||||
for _, node := range plan {
|
||||
hasJSON := node.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasDynamic := node.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
if hasJSON {
|
||||
jsonExpr, err := c.buildArrayMap(node, telemetrytypes.BranchJSON)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
arrayMapExpressions = append(arrayMapExpressions, jsonExpr)
|
||||
}
|
||||
|
||||
if hasDynamic {
|
||||
dynamicExpr, err := c.buildArrayMap(node, telemetrytypes.BranchDynamic)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
arrayMapExpressions = append(arrayMapExpressions, dynamicExpr)
|
||||
}
|
||||
}
|
||||
if len(arrayMapExpressions) == 0 {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeArrayMapExpressionsEmpty, "array map expressions are empty while building arrayConcat")
|
||||
}
|
||||
|
||||
// Build the arrayConcat expression
|
||||
arrayConcatExpr := fmt.Sprintf("arrayConcat(%s)", strings.Join(arrayMapExpressions, ", "))
|
||||
|
||||
// Wrap with arrayFlatten
|
||||
arrayFlattenExpr := fmt.Sprintf("arrayFlatten(%s)", arrayConcatExpr)
|
||||
|
||||
return arrayFlattenExpr, nil
|
||||
}
|
||||
|
||||
// buildArrayMap builds the arrayMap expression for a specific branch, handling all sub-branches
|
||||
func (c *conditionBuilder) buildArrayMap(currentNode *telemetrytypes.JSONAccessNode, branchType telemetrytypes.JSONAccessBranchType) (string, error) {
|
||||
if currentNode == nil {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeCurrentNodeNil, "current node is nil while building arrayMap")
|
||||
}
|
||||
|
||||
nextNode := currentNode.Branches[branchType]
|
||||
if nextNode == nil {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeNextNodeNil, "next node is nil while building arrayMap")
|
||||
}
|
||||
|
||||
// Build the array expression for this level
|
||||
var arrayExpr string
|
||||
if branchType == telemetrytypes.BranchJSON {
|
||||
// Array(JSON) branch
|
||||
arrayExpr = fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')",
|
||||
currentNode.FieldPath(), currentNode.MaxDynamicTypes, currentNode.MaxDynamicPaths)
|
||||
} else {
|
||||
// Array(Dynamic) branch - filter for JSON objects
|
||||
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", currentNode.FieldPath())
|
||||
arrayExpr = fmt.Sprintf("arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
|
||||
}
|
||||
|
||||
// If this is the terminal level, return the simple arrayMap
|
||||
if nextNode.IsTerminal {
|
||||
dynamicElementExpr := fmt.Sprintf("dynamicElement(%s, '%s')", nextNode.FieldPath(),
|
||||
nextNode.TerminalConfig.ElemType.StringValue(),
|
||||
)
|
||||
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), dynamicElementExpr, arrayExpr), nil
|
||||
}
|
||||
|
||||
// For non-terminal nodes, we need to handle ALL possible branches at the next level
|
||||
var nestedExpressions []string
|
||||
hasJSON := nextNode.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasDynamic := nextNode.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
if hasJSON {
|
||||
jsonNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchJSON)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nestedExpressions = append(nestedExpressions, jsonNested)
|
||||
}
|
||||
|
||||
if hasDynamic {
|
||||
dynamicNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchDynamic)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nestedExpressions = append(nestedExpressions, dynamicNested)
|
||||
}
|
||||
|
||||
// If we have multiple nested expressions, we need to concat them
|
||||
var nestedExpr string
|
||||
if len(nestedExpressions) == 1 {
|
||||
nestedExpr = nestedExpressions[0]
|
||||
} else if len(nestedExpressions) > 1 {
|
||||
// This shouldn't happen in our current tree structure, but handle it just in case
|
||||
nestedExpr = fmt.Sprintf("arrayConcat(%s)", strings.Join(nestedExpressions, ", "))
|
||||
} else {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeNestedExpressionsEmpty, "nested expressions are empty while building arrayMap")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), nestedExpr, arrayExpr), nil
|
||||
}
|
||||
|
||||
func assumeNotNull(column string, elemType telemetrytypes.JSONDataType) string {
|
||||
return fmt.Sprintf("assumeNotNull(dynamicElement(%s, '%s'))", column, elemType.StringValue())
|
||||
}
|
||||
713
pkg/telemetrylogs/json_stmt_builder_test.go
Normal file
713
pkg/telemetrylogs/json_stmt_builder_test.go
Normal file
File diff suppressed because one or more lines are too long
@@ -84,7 +84,6 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
}
|
||||
|
||||
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
|
||||
|
||||
dataType, value := inferDataType(value, operator, key)
|
||||
|
||||
// for array types, we need to extract the value from the JSON_QUERY
|
||||
@@ -23,7 +23,6 @@ type logQueryStatementBuilder struct {
|
||||
aggExprRewriter qbtypes.AggExprRewriter
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -37,7 +36,6 @@ func NewLogQueryStatementBuilder(
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *logQueryStatementBuilder {
|
||||
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
|
||||
@@ -50,7 +48,6 @@ func NewLogQueryStatementBuilder(
|
||||
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||
aggExprRewriter: aggExprRewriter,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -171,6 +168,25 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
|
||||
overallMatch = overallMatch || findMatch(IntrinsicFields)
|
||||
}
|
||||
|
||||
if strings.Contains(k.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
k.Name = strings.TrimPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
fieldKeys, found := keys[k.Name]
|
||||
if found && len(fieldKeys) > 0 {
|
||||
k.FieldContext = fieldKeys[0].FieldContext
|
||||
k.FieldDataType = fieldKeys[0].FieldDataType
|
||||
k.Materialized = fieldKeys[0].Materialized
|
||||
k.JSONDataType = fieldKeys[0].JSONDataType
|
||||
k.Indexes = fieldKeys[0].Indexes
|
||||
|
||||
overallMatch = true // because we found a match
|
||||
} else {
|
||||
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
|
||||
k.FieldContext = telemetrytypes.FieldContextBody
|
||||
k.FieldDataType = telemetrytypes.FieldDataTypeString
|
||||
k.JSONDataType = &telemetrytypes.String
|
||||
}
|
||||
}
|
||||
|
||||
if !overallMatch {
|
||||
// check if all the key for the given field have been materialized, if so
|
||||
// set the key to materialized
|
||||
@@ -221,6 +237,9 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
// Select timestamp and id by default
|
||||
sb.Select(LogsV2TimestampColumn)
|
||||
sb.SelectMore(LogsV2IDColumn)
|
||||
@@ -234,6 +253,10 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.SelectMore(LogsV2ScopeNameColumn)
|
||||
sb.SelectMore(LogsV2ScopeVersionColumn)
|
||||
sb.SelectMore(LogsV2BodyColumn)
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
sb.SelectMore(LogsV2BodyJSONColumn)
|
||||
sb.SelectMore(LogsV2BodyPromotedColumn)
|
||||
}
|
||||
sb.SelectMore(LogsV2AttributesStringColumn)
|
||||
sb.SelectMore(LogsV2AttributesNumberColumn)
|
||||
sb.SelectMore(LogsV2AttributesBoolColumn)
|
||||
@@ -246,6 +269,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn {
|
||||
continue
|
||||
}
|
||||
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
@@ -255,8 +279,12 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
}
|
||||
}
|
||||
|
||||
// From table
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
// From table (inject ARRAY JOINs if collected)
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
@@ -330,13 +358,17 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
@@ -358,7 +390,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
|
||||
}
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
// Add FROM clause
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
|
||||
if err != nil {
|
||||
@@ -404,7 +442,6 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
|
||||
// Stitch it all together: WITH … SELECT …
|
||||
@@ -431,7 +468,6 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
|
||||
// Stitch it all together: WITH … SELECT …
|
||||
@@ -478,11 +514,15 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
@@ -508,8 +548,12 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
}
|
||||
}
|
||||
|
||||
// From table
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
// From table (inject ARRAY JOINs if collected)
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
@@ -654,7 +698,6 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
|
||||
start, end uint64,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
return b.resourceFilterStmtBuilder.Build(
|
||||
ctx,
|
||||
start,
|
||||
|
||||
@@ -32,7 +32,6 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
cb,
|
||||
mockMetadataStore,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
}
|
||||
@@ -197,11 +196,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -213,7 +212,6 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -318,11 +316,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -334,7 +332,6 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -427,11 +424,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -443,10 +440,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
//
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
@@ -491,7 +489,8 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body.status",
|
||||
Name: "status",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -501,11 +500,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -517,7 +516,6 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -597,11 +595,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -613,7 +611,6 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
|
||||
@@ -47,9 +47,6 @@ var (
|
||||
//
|
||||
// searchOperator: LIKE for pattern matching, EQUAL for exact match
|
||||
// Returns: (paths, error)
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
|
||||
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||
|
||||
@@ -135,7 +132,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
orClauses := []string{}
|
||||
for _, fieldKeySelector := range fieldKeySelectors {
|
||||
// replace [*] with []
|
||||
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrylogs.ArrayAnyIndex, telemetrylogs.ArraySep)
|
||||
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
|
||||
// Extract search text for body JSON keys
|
||||
keyName := CleanPathPrefixes(fieldKeySelector.Name)
|
||||
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
||||
@@ -162,13 +159,11 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
return query, args, limit, nil
|
||||
}
|
||||
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
|
||||
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
filteredPaths := []string{}
|
||||
for _, path := range paths {
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
|
||||
continue
|
||||
}
|
||||
filteredPaths = append(filteredPaths, path)
|
||||
@@ -245,7 +240,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
indexesMap := make(map[string][]schemamigrator.Index)
|
||||
indexes := make(map[string][]schemamigrator.Index)
|
||||
for rows.Next() {
|
||||
var name string
|
||||
var typeFull string
|
||||
@@ -254,7 +249,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
if err := rows.Scan(&name, &typeFull, &expr, &granularity); err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to scan string indexed column")
|
||||
}
|
||||
indexesMap[name] = append(indexesMap[name], schemamigrator.Index{
|
||||
indexes[name] = append(indexes[name], schemamigrator.Index{
|
||||
Name: name,
|
||||
Type: typeFull,
|
||||
Expression: expr,
|
||||
@@ -262,7 +257,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
})
|
||||
}
|
||||
|
||||
return indexesMap, nil
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
@@ -296,7 +291,7 @@ func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...str
|
||||
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
path = CleanPathPrefixes(path)
|
||||
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
|
||||
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
|
||||
}
|
||||
|
||||
@@ -456,7 +451,7 @@ func derefValue(v any) any {
|
||||
|
||||
// IsPathPromoted checks if a specific path is promoted
|
||||
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
|
||||
split := strings.Split(path, telemetrylogs.ArraySep)
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
|
||||
if err != nil {
|
||||
|
||||
@@ -572,6 +572,14 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
}
|
||||
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
bodyJSONPaths, finished, err := t.getBodyJSONPaths(ctx, fieldKeySelectors) // LIKE for pattern matching
|
||||
if err != nil {
|
||||
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
|
||||
}
|
||||
keys = append(keys, bodyJSONPaths...)
|
||||
complete = complete && finished
|
||||
}
|
||||
return keys, complete, nil
|
||||
}
|
||||
|
||||
@@ -978,7 +986,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldMapper: t.fm,
|
||||
ConditionBuilder: t.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
}, 0, 0)
|
||||
}, 0, 0)
|
||||
if err == nil {
|
||||
sb.AddWhereClause(whereClause.WhereClause)
|
||||
} else {
|
||||
@@ -1002,20 +1010,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
|
||||
// search on attributes
|
||||
key.FieldContext = telemetrytypes.FieldContextAttribute
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
// search on resource
|
||||
key.FieldContext = telemetrytypes.FieldContextResource
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
key.FieldContext = origContext
|
||||
} else {
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
@@ -1164,6 +1172,10 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
|
||||
limit = 50
|
||||
}
|
||||
|
||||
if strings.HasPrefix(fieldValueSelector.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
return t.ListJSONValues(ctx, fieldValueSelector.Name, limit)
|
||||
}
|
||||
|
||||
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
|
||||
|
||||
if fieldValueSelector.Name != "" {
|
||||
|
||||
@@ -32,6 +32,8 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
options.MaxIdleConns = config.Connection.MaxIdleConns
|
||||
options.MaxOpenConns = config.Connection.MaxOpenConns
|
||||
options.DialTimeout = config.Connection.DialTimeout
|
||||
// This is to avoid the driver decoding issues with JSON columns
|
||||
options.Settings["output_format_native_write_json_as_string"] = 1
|
||||
|
||||
chConn, err := clickhouse.Open(options)
|
||||
if err != nil {
|
||||
|
||||
@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -357,7 +357,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -525,7 +525,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -681,7 +681,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
|
||||
@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
}, b.start, b.end,
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -552,7 +552,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -662,7 +661,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -802,7 +800,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -390,7 +390,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
@@ -506,7 +506,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
|
||||
@@ -44,7 +44,7 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
|
||||
mockMetadataStore,
|
||||
)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
@@ -33,7 +32,7 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, telemetrylogs.ArraySep) || strings.Contains(i.Path, telemetrylogs.ArrayAnyIndex) {
|
||||
if strings.Contains(i.Path, telemetrytypes.ArraySep) || strings.Contains(i.Path, telemetrytypes.ArrayAnyIndex) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
@@ -41,12 +40,12 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix) {
|
||||
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
|
||||
}
|
||||
|
||||
// remove the "body." prefix from the path
|
||||
i.Path = strings.TrimPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix)
|
||||
i.Path = strings.TrimPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
|
||||
isCardinal := keycheck.IsCardinal(i.Path)
|
||||
if isCardinal {
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -17,9 +18,13 @@ var (
|
||||
FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")}
|
||||
)
|
||||
|
||||
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
const BodyJSONStringSearchPrefix = `body.`
|
||||
const (
|
||||
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
BodyJSONStringSearchPrefix = "body."
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArrayAnyIndex = "[*]."
|
||||
)
|
||||
|
||||
type TelemetryFieldKey struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
82
pkg/types/telemetrytypes/json_access_plan.go
Normal file
82
pkg/types/telemetrytypes/json_access_plan.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type JSONAccessBranchType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
BranchJSON = JSONAccessBranchType{valuer.NewString("json")}
|
||||
BranchDynamic = JSONAccessBranchType{valuer.NewString("dynamic")}
|
||||
)
|
||||
|
||||
type JSONAccessPlan = []*JSONAccessNode
|
||||
|
||||
type TerminalConfig struct {
|
||||
Key *TelemetryFieldKey
|
||||
ElemType JSONDataType
|
||||
ValueType JSONDataType
|
||||
}
|
||||
|
||||
// Node is now a tree structure representing the complete JSON path traversal
|
||||
// that precomputes all possible branches and their types
|
||||
type JSONAccessNode struct {
|
||||
// Node information
|
||||
Name string
|
||||
IsTerminal bool
|
||||
isRoot bool // marked true for only body_json and body_json_promoted
|
||||
|
||||
// Precomputed type information (single source of truth)
|
||||
AvailableTypes []JSONDataType
|
||||
|
||||
// Array type branches (Array(JSON) vs Array(Dynamic))
|
||||
Branches map[JSONAccessBranchType]*JSONAccessNode
|
||||
|
||||
// Terminal configuration
|
||||
TerminalConfig *TerminalConfig
|
||||
|
||||
// Parent reference for traversal
|
||||
Parent *JSONAccessNode
|
||||
|
||||
// JSON progression parameters (precomputed during planning)
|
||||
MaxDynamicTypes int
|
||||
MaxDynamicPaths int
|
||||
}
|
||||
|
||||
func NewRootJSONAccessNode(name string, maxDynamicTypes, maxDynamicPaths int) *JSONAccessNode {
|
||||
return &JSONAccessNode{
|
||||
Name: name,
|
||||
isRoot: true,
|
||||
MaxDynamicTypes: maxDynamicTypes,
|
||||
MaxDynamicPaths: maxDynamicPaths,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *JSONAccessNode) Alias() string {
|
||||
if n.isRoot {
|
||||
return n.Name
|
||||
} else if n.Parent == nil {
|
||||
return fmt.Sprintf("`%s`", n.Name)
|
||||
}
|
||||
|
||||
parentAlias := strings.TrimLeft(n.Parent.Alias(), "`")
|
||||
parentAlias = strings.TrimRight(parentAlias, "`")
|
||||
|
||||
sep := jsontypeexporter.ArraySeparator
|
||||
if n.Parent.isRoot {
|
||||
sep = "."
|
||||
}
|
||||
return fmt.Sprintf("`%s%s%s`", parentAlias, sep, n.Name)
|
||||
}
|
||||
|
||||
func (n *JSONAccessNode) FieldPath() string {
|
||||
key := "`" + n.Name + "`"
|
||||
return n.Parent.Alias() + "." + key
|
||||
}
|
||||
Reference in New Issue
Block a user