mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-28 04:22:12 +00:00
Compare commits
2 Commits
issue_3017
...
SIG-3496
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d228d8c1e0 | ||
|
|
10ba210d2a |
29
frontend/src/api/metricsExplorer/v2/getMetricAlerts.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricAlerts.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricAlertsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricAlerts = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricAlertsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/alerts', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
37
frontend/src/api/metricsExplorer/v2/getMetricAttributes.ts
Normal file
37
frontend/src/api/metricsExplorer/v2/getMetricAttributes.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAttributesRequest,
|
||||
GetMetricAttributesResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricAttributes = async (
|
||||
{ metricName, start, end }: GetMetricAttributesRequest,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricAttributesResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.post(
|
||||
'/metrics/attributes',
|
||||
{
|
||||
metricName: encodedMetricName,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
signal,
|
||||
headers,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricDashboards.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricDashboards.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricDashboardsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricDashboards = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricDashboardsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/dashboards', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricHighlights.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricHighlights.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricHighlightsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricHighlights = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricHighlightsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/highlights', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricMetadata.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricMetadata.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricMetadataResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricMetadata = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricMetadataResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metrics/metadata', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
28
frontend/src/api/metricsExplorer/v2/updateMetricMetadata.ts
Normal file
28
frontend/src/api/metricsExplorer/v2/updateMetricMetadata.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
UpdateMetricMetadataRequest,
|
||||
UpdateMetricMetadataResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
const updateMetricMetadata = async (
|
||||
metricName: string,
|
||||
props: UpdateMetricMetadataRequest,
|
||||
): Promise<SuccessResponseV2<UpdateMetricMetadataResponse>> => {
|
||||
try {
|
||||
const response = await axios.post(`/metrics/${metricName}/metadata`, {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateMetricMetadata;
|
||||
@@ -56,6 +56,13 @@ export const REACT_QUERY_KEY = {
|
||||
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
|
||||
GET_INSPECT_METRICS_DETAILS: 'GET_INSPECT_METRICS_DETAILS',
|
||||
|
||||
// Metrics Explorer V2 Query Keys
|
||||
GET_METRIC_HIGHLIGHTS: 'GET_METRIC_HIGHLIGHTS',
|
||||
GET_METRIC_METADATA: 'GET_METRIC_METADATA',
|
||||
GET_METRIC_ATTRIBUTES: 'GET_METRIC_ATTRIBUTES',
|
||||
GET_METRIC_ALERTS: 'GET_METRIC_ALERTS',
|
||||
GET_METRIC_DASHBOARDS: 'GET_METRIC_DASHBOARDS',
|
||||
|
||||
// Traces Funnels Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
GET_DOMAIN_METRICS_DATA: 'GET_DOMAIN_METRICS_DATA',
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
import { Button, Collapse, Input, Menu, Popover, Typography } from 'antd';
|
||||
import {
|
||||
Button,
|
||||
Collapse,
|
||||
Input,
|
||||
Menu,
|
||||
Popover,
|
||||
Skeleton,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useGetMetricAttributes } from 'hooks/metricsExplorer/v2/useGetMetricAttributes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Compass, Copy, Search } from 'lucide-react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
@@ -13,7 +22,9 @@ import ROUTES from '../../../constants/routes';
|
||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { AllAttributesProps, AllAttributesValueProps } from './types';
|
||||
import { getMetricDetailsQuery } from './utils';
|
||||
import { getMetricDetailsQuery, transformMetricAttributes } from './utils';
|
||||
|
||||
const ALL_ATTRIBUTES_KEY = 'all-attributes';
|
||||
|
||||
export function AllAttributesValue({
|
||||
filterKey,
|
||||
@@ -110,13 +121,20 @@ export function AllAttributesValue({
|
||||
|
||||
function AllAttributes({
|
||||
metricName,
|
||||
attributes,
|
||||
metricType,
|
||||
}: AllAttributesProps): JSX.Element {
|
||||
const [searchString, setSearchString] = useState('');
|
||||
const [activeKey, setActiveKey] = useState<string | string[]>(
|
||||
'all-attributes',
|
||||
);
|
||||
const [activeKey, setActiveKey] = useState<string[]>([ALL_ATTRIBUTES_KEY]);
|
||||
|
||||
const {
|
||||
data: attributesData,
|
||||
isLoading: isLoadingAttributes,
|
||||
isError: isErrorAttributes,
|
||||
} = useGetMetricAttributes({
|
||||
metricName,
|
||||
});
|
||||
|
||||
const { attributes } = transformMetricAttributes(attributesData);
|
||||
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
@@ -178,7 +196,7 @@ function AllAttributes({
|
||||
attributes.filter(
|
||||
(attribute) =>
|
||||
attribute.key.toLowerCase().includes(searchString.toLowerCase()) ||
|
||||
attribute.value.some((value) =>
|
||||
attribute.values.some((value) =>
|
||||
value.toLowerCase().includes(searchString.toLowerCase()),
|
||||
),
|
||||
),
|
||||
@@ -195,7 +213,7 @@ function AllAttributes({
|
||||
},
|
||||
value: {
|
||||
key: attribute.key,
|
||||
value: attribute.value,
|
||||
value: attribute.values,
|
||||
},
|
||||
}))
|
||||
: [],
|
||||
@@ -252,8 +270,38 @@ function AllAttributes({
|
||||
],
|
||||
);
|
||||
|
||||
const items = useMemo(
|
||||
() => [
|
||||
const emptyText = useMemo(
|
||||
() =>
|
||||
isErrorAttributes ? 'Error fetching attributes' : 'No attributes found',
|
||||
[isErrorAttributes],
|
||||
);
|
||||
|
||||
const items = useMemo(() => {
|
||||
let children;
|
||||
if (isLoadingAttributes) {
|
||||
children = (
|
||||
<div className="all-attributes-skeleton-container">
|
||||
<Skeleton active title={false} paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
children = (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
loading={isLoadingAttributes}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content all-attributes-content"
|
||||
scroll={{ y: 600 }}
|
||||
locale={{
|
||||
emptyText,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return [
|
||||
{
|
||||
label: (
|
||||
<div className="metrics-accordion-header">
|
||||
@@ -270,32 +318,22 @@ function AllAttributes({
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
disabled={isLoadingAttributes}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
key: 'all-attributes',
|
||||
children: (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content all-attributes-content"
|
||||
scroll={{ y: 600 }}
|
||||
/>
|
||||
),
|
||||
children,
|
||||
},
|
||||
],
|
||||
[columns, tableData, searchString],
|
||||
);
|
||||
];
|
||||
}, [searchString, columns, isLoadingAttributes, tableData, emptyText]);
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
bordered
|
||||
className="metrics-accordion metrics-metadata-accordion"
|
||||
className="metrics-accordion metrics-all-attributes-accordion"
|
||||
activeKey={activeKey}
|
||||
onChange={(keys): void => setActiveKey(keys)}
|
||||
onChange={(keys): void => setActiveKey(keys as string[])}
|
||||
items={items}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -1,37 +1,56 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Dropdown, Typography } from 'antd';
|
||||
import { Skeleton } from 'antd/lib';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetMetricAlerts } from 'hooks/metricsExplorer/v2/useGetMetricAlerts';
|
||||
import { useGetMetricDashboards } from 'hooks/metricsExplorer/v2/useGetMetricDashboards';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import { Bell, Grid } from 'lucide-react';
|
||||
import { useMemo } from 'react';
|
||||
import { generatePath } from 'react-router-dom';
|
||||
import { pluralize } from 'utils/pluralize';
|
||||
|
||||
import { DashboardsAndAlertsPopoverProps } from './types';
|
||||
import { transformMetricAlerts, transformMetricDashboards } from './utils';
|
||||
|
||||
function DashboardsAndAlertsPopover({
|
||||
alerts,
|
||||
dashboards,
|
||||
metricName,
|
||||
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const params = useUrlQuery();
|
||||
|
||||
const {
|
||||
data: alertsData,
|
||||
isLoading: isLoadingAlerts,
|
||||
isError: isErrorAlerts,
|
||||
} = useGetMetricAlerts(metricName);
|
||||
|
||||
const {
|
||||
data: dashboardsData,
|
||||
isLoading: isLoadingDashboards,
|
||||
isError: isErrorDashboards,
|
||||
} = useGetMetricDashboards(metricName);
|
||||
|
||||
const alerts = transformMetricAlerts(alertsData);
|
||||
const dashboards = transformMetricDashboards(dashboardsData);
|
||||
|
||||
const alertsPopoverContent = useMemo(() => {
|
||||
if (alerts && alerts.length > 0) {
|
||||
return alerts.map((alert) => ({
|
||||
key: alert.alert_id,
|
||||
key: alert.alertId,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={alert.alert_id}
|
||||
key={alert.alertId}
|
||||
onClick={(): void => {
|
||||
params.set(QueryParams.ruleId, alert.alert_id);
|
||||
params.set(QueryParams.ruleId, alert.alertId);
|
||||
history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{alert.alert_name || alert.alert_id}
|
||||
{alert.alertName || alert.alertId}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
@@ -39,41 +58,44 @@ function DashboardsAndAlertsPopover({
|
||||
return null;
|
||||
}, [alerts, params]);
|
||||
|
||||
const uniqueDashboards = useMemo(
|
||||
() =>
|
||||
dashboards?.filter(
|
||||
(item, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboard_id === item.dashboard_id),
|
||||
),
|
||||
[dashboards],
|
||||
);
|
||||
|
||||
const dashboardsPopoverContent = useMemo(() => {
|
||||
if (uniqueDashboards && uniqueDashboards.length > 0) {
|
||||
return uniqueDashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboard_id,
|
||||
if (dashboards && dashboards.length > 0) {
|
||||
return dashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboardId,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={dashboard.dashboard_id}
|
||||
key={dashboard.dashboardId}
|
||||
onClick={(): void => {
|
||||
safeNavigate(
|
||||
generatePath(ROUTES.DASHBOARD, {
|
||||
dashboardId: dashboard.dashboard_id,
|
||||
dashboardId: dashboard.dashboardId,
|
||||
}),
|
||||
);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{dashboard.dashboard_name || dashboard.dashboard_id}
|
||||
{dashboard.dashboardName || dashboard.dashboardId}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
}
|
||||
return null;
|
||||
}, [uniqueDashboards, safeNavigate]);
|
||||
}, [dashboards, safeNavigate]);
|
||||
|
||||
if (!dashboardsPopoverContent && !alertsPopoverContent) {
|
||||
return null;
|
||||
if (isLoadingAlerts || isLoadingDashboards) {
|
||||
return (
|
||||
<div className="dashboards-and-alerts-popover-container">
|
||||
<Skeleton title={false} paragraph={{ rows: 1 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// If there are no dashboards or alerts or both have errors, don't show the popover
|
||||
const hidePopover =
|
||||
(!dashboardsPopoverContent && !alertsPopoverContent) ||
|
||||
(isErrorAlerts && isErrorDashboards);
|
||||
if (hidePopover) {
|
||||
return <div className="dashboards-and-alerts-popover-container" />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -92,8 +114,7 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Grid size={12} color={Color.BG_SIENNA_500} />
|
||||
<Typography.Text>
|
||||
{uniqueDashboards?.length} dashboard
|
||||
{uniqueDashboards?.length === 1 ? '' : 's'}
|
||||
{pluralize(dashboards.length, 'dashboard', 'dashboards')}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
@@ -112,7 +133,7 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Bell size={12} color={Color.BG_SAKURA_500} />
|
||||
<Typography.Text>
|
||||
{alerts?.length} alert {alerts?.length === 1 ? 'rule' : 'rules'}
|
||||
{pluralize(alerts.length, 'alert rule', 'alert rules')}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
import { Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { useGetMetricHighlights } from 'hooks/metricsExplorer/v2/useGetMetricHighlights';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import { HighlightsProps } from './types';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
transformMetricHighlights,
|
||||
} from './utils';
|
||||
|
||||
function Highlights({ metricName }: HighlightsProps): JSX.Element {
|
||||
const {
|
||||
data: metricHighlightsData,
|
||||
isLoading: isLoadingMetricHighlights,
|
||||
isError: isErrorMetricHighlights,
|
||||
} = useGetMetricHighlights(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
|
||||
const metricHighlights = transformMetricHighlights(metricHighlightsData);
|
||||
|
||||
const dataPoints = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metricHighlights?.dataPoints.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metricHighlights?.dataPoints ?? 0)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
const timeSeriesActive = formatNumberToCompactFormat(
|
||||
metricHighlights.activeTimeSeries,
|
||||
);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(
|
||||
metricHighlights.totalTimeSeries,
|
||||
);
|
||||
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
const displayText = formatTimestampToReadableDate(
|
||||
metricHighlights.lastReceived,
|
||||
);
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={displayText}>{displayText}</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
if (isLoadingMetricHighlights) {
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<Skeleton title={false} paragraph={{ rows: 2 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
{dataPoints}
|
||||
{timeSeries}
|
||||
{lastReceived}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Highlights;
|
||||
@@ -1,19 +1,19 @@
|
||||
import { Button, Collapse, Input, Select, Typography } from 'antd';
|
||||
import { Button, Collapse, Input, Select, Skeleton, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { UpdateMetricMetadataProps } from 'api/metricsExplorer/updateMetricMetadata';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import FieldRenderer from 'container/LogDetailedView/FieldRenderer';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/v2/useUpdateMetricMetadata';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Edit2, Save, X } from 'lucide-react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
@@ -23,23 +23,22 @@ import {
|
||||
} from '../Summary/constants';
|
||||
import { MetricTypeRenderer } from '../Summary/utils';
|
||||
import { METRIC_METADATA_KEYS } from './constants';
|
||||
import { MetadataProps } from './types';
|
||||
import { determineIsMonotonic } from './utils';
|
||||
import { MetadataProps, MetricMetadataState, TableFields } from './types';
|
||||
import { transformUpdateMetricMetadataRequest } from './utils';
|
||||
|
||||
function Metadata({
|
||||
metricName,
|
||||
metadata,
|
||||
refetchMetricDetails,
|
||||
isErrorMetricMetadata,
|
||||
isLoadingMetricMetadata,
|
||||
}: MetadataProps): JSX.Element {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [
|
||||
metricMetadata,
|
||||
setMetricMetadata,
|
||||
] = useState<UpdateMetricMetadataProps>({
|
||||
metricType: metadata?.metric_type || MetricType.SUM,
|
||||
description: metadata?.description || '',
|
||||
temporality: metadata?.temporality,
|
||||
unit: metadata?.unit,
|
||||
|
||||
const [metricMetadata, setMetricMetadata] = useState<MetricMetadataState>({
|
||||
metricType: MetricType.SUM,
|
||||
description: '',
|
||||
temporality: undefined,
|
||||
unit: undefined,
|
||||
});
|
||||
const { notifications } = useNotifications();
|
||||
const {
|
||||
@@ -51,6 +50,18 @@ function Metadata({
|
||||
);
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Initialize state from metadata api data
|
||||
useEffect(() => {
|
||||
if (metadata) {
|
||||
setMetricMetadata({
|
||||
metricType: metadata.metricType,
|
||||
description: metadata.description,
|
||||
temporality: metadata.temporality,
|
||||
unit: metadata.unit,
|
||||
});
|
||||
}
|
||||
}, [metadata]);
|
||||
|
||||
const tableData = useMemo(
|
||||
() =>
|
||||
metadata
|
||||
@@ -59,7 +70,7 @@ function Metadata({
|
||||
temporality: metadata?.temporality,
|
||||
})
|
||||
// Filter out monotonic as user input is not required
|
||||
.filter((key) => key !== 'monotonic')
|
||||
.filter((key) => key !== TableFields.IS_MONOTONIC)
|
||||
.map((key) => ({
|
||||
key,
|
||||
value: {
|
||||
@@ -72,30 +83,37 @@ function Metadata({
|
||||
);
|
||||
|
||||
// Render un-editable field value
|
||||
const renderUneditableField = useCallback((key: string, value: string) => {
|
||||
if (key === 'metric_type') {
|
||||
return <MetricTypeRenderer type={value as MetricType} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === 'unit') {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
}, []);
|
||||
const renderUneditableField = useCallback(
|
||||
(key: keyof MetricMetadataState, value: string) => {
|
||||
if (isErrorMetricMetadata) {
|
||||
return <FieldRenderer field="-" />;
|
||||
}
|
||||
if (key === TableFields.METRIC_TYPE) {
|
||||
return <MetricTypeRenderer type={value as MetricType} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === TableFields.UNIT) {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
},
|
||||
[isErrorMetricMetadata],
|
||||
);
|
||||
|
||||
const renderColumnValue = useCallback(
|
||||
(field: { value: string; key: string }): JSX.Element => {
|
||||
(field: { value: string; key: keyof MetricMetadataState }): JSX.Element => {
|
||||
if (!isEditing) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
// Don't allow editing of unit if it's already set
|
||||
const metricUnitAlreadySet = field.key === 'unit' && Boolean(metadata?.unit);
|
||||
const metricUnitAlreadySet =
|
||||
field.key === TableFields.UNIT && Boolean(metadata?.unit);
|
||||
if (metricUnitAlreadySet) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
if (field.key === 'metric_type') {
|
||||
if (field.key === TableFields.METRIC_TYPE) {
|
||||
return (
|
||||
<Select
|
||||
data-testid="metric-type-select"
|
||||
@@ -113,7 +131,7 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === 'unit') {
|
||||
if (field.key === TableFields.UNIT) {
|
||||
return (
|
||||
<YAxisUnitSelector
|
||||
value={metricMetadata.unit}
|
||||
@@ -125,7 +143,7 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === 'temporality') {
|
||||
if (field.key === TableFields.Temporality) {
|
||||
return (
|
||||
<Select
|
||||
data-testid="temporality-select"
|
||||
@@ -143,16 +161,12 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === 'description') {
|
||||
if (field.key === TableFields.DESCRIPTION) {
|
||||
return (
|
||||
<Input
|
||||
data-testid="description-input"
|
||||
name={field.key}
|
||||
defaultValue={
|
||||
metricMetadata[
|
||||
field.key as Exclude<keyof UpdateMetricMetadataProps, 'isMonotonic'>
|
||||
]
|
||||
}
|
||||
defaultValue={metricMetadata.description}
|
||||
onChange={(e): void => {
|
||||
setMetricMetadata((prev) => ({
|
||||
...prev,
|
||||
@@ -202,17 +216,11 @@ function Metadata({
|
||||
updateMetricMetadata(
|
||||
{
|
||||
metricName,
|
||||
payload: {
|
||||
...metricMetadata,
|
||||
isMonotonic: determineIsMonotonic(
|
||||
metricMetadata.metricType,
|
||||
metricMetadata.temporality,
|
||||
),
|
||||
},
|
||||
payload: transformUpdateMetricMetadataRequest(metricMetadata),
|
||||
},
|
||||
{
|
||||
onSuccess: (response): void => {
|
||||
if (response?.statusCode === 200) {
|
||||
if (response?.httpStatusCode === 200) {
|
||||
logEvent(MetricsExplorerEvents.MetricMetadataUpdated, {
|
||||
[MetricsExplorerEventKeys.MetricName]: metricName,
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
@@ -221,9 +229,12 @@ function Metadata({
|
||||
notifications.success({
|
||||
message: 'Metadata updated successfully',
|
||||
});
|
||||
refetchMetricDetails();
|
||||
setIsEditing(false);
|
||||
queryClient.invalidateQueries(['metricsList']);
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_METRICS_LIST]);
|
||||
queryClient.invalidateQueries([
|
||||
REACT_QUERY_KEY.GET_METRIC_METADATA,
|
||||
metricName,
|
||||
]);
|
||||
} else {
|
||||
notifications.error({
|
||||
message:
|
||||
@@ -243,21 +254,36 @@ function Metadata({
|
||||
metricName,
|
||||
metricMetadata,
|
||||
notifications,
|
||||
refetchMetricDetails,
|
||||
queryClient,
|
||||
]);
|
||||
|
||||
const cancelEdit = useCallback(
|
||||
(e: React.MouseEvent<HTMLElement, MouseEvent>): void => {
|
||||
e.stopPropagation();
|
||||
if (metadata) {
|
||||
setMetricMetadata({
|
||||
metricType: metadata.metricType,
|
||||
description: metadata.description,
|
||||
temporality: metadata.temporality,
|
||||
unit: metadata.unit,
|
||||
});
|
||||
}
|
||||
setIsEditing(false);
|
||||
},
|
||||
[metadata],
|
||||
);
|
||||
|
||||
const actionButton = useMemo(() => {
|
||||
if (isLoadingMetricMetadata) {
|
||||
return null;
|
||||
}
|
||||
if (isEditing) {
|
||||
return (
|
||||
<div className="action-menu">
|
||||
<Button
|
||||
className="action-button"
|
||||
type="text"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
setIsEditing(false);
|
||||
}}
|
||||
onClick={cancelEdit}
|
||||
disabled={isUpdatingMetricsMetadata}
|
||||
>
|
||||
<X size={14} />
|
||||
@@ -294,10 +320,35 @@ function Metadata({
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}, [handleSave, isEditing, isUpdatingMetricsMetadata]);
|
||||
}, [
|
||||
isLoadingMetricMetadata,
|
||||
isEditing,
|
||||
isUpdatingMetricsMetadata,
|
||||
cancelEdit,
|
||||
handleSave,
|
||||
]);
|
||||
|
||||
const items = useMemo(
|
||||
() => [
|
||||
const items = useMemo(() => {
|
||||
let children;
|
||||
if (isLoadingMetricMetadata) {
|
||||
children = (
|
||||
<div className="metrics-metadata-skeleton-container">
|
||||
<Skeleton active title={false} paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
children = (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content metrics-metadata-container"
|
||||
/>
|
||||
);
|
||||
}
|
||||
return [
|
||||
{
|
||||
label: (
|
||||
<div className="metrics-accordion-header metrics-metadata-header">
|
||||
@@ -306,20 +357,10 @@ function Metadata({
|
||||
</div>
|
||||
),
|
||||
key: 'metric-metadata',
|
||||
children: (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content metrics-metadata-container"
|
||||
/>
|
||||
),
|
||||
children,
|
||||
},
|
||||
],
|
||||
[actionButton, columns, tableData],
|
||||
);
|
||||
];
|
||||
}, [actionButton, columns, isLoadingMetricMetadata, tableData]);
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
|
||||
@@ -39,6 +39,7 @@
|
||||
gap: 12px;
|
||||
|
||||
.metric-details-content-grid {
|
||||
height: 50px;
|
||||
.labels-row,
|
||||
.values-row {
|
||||
display: grid;
|
||||
@@ -72,6 +73,7 @@
|
||||
.dashboards-and-alerts-popover-container {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
height: 32px;
|
||||
|
||||
.dashboards-and-alerts-popover {
|
||||
border-radius: 20px;
|
||||
@@ -148,7 +150,6 @@
|
||||
|
||||
.all-attributes-search-input {
|
||||
width: 300px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,6 +162,7 @@
|
||||
.ant-typography:first-child {
|
||||
font-family: 'Geist Mono';
|
||||
color: var(--bg-robin-400);
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
.all-attributes-contribution {
|
||||
@@ -237,6 +239,7 @@
|
||||
}
|
||||
|
||||
.metric-metadata-value {
|
||||
height: 67px;
|
||||
background: rgba(22, 25, 34, 0.4);
|
||||
overflow-x: scroll;
|
||||
.field-renderer-container {
|
||||
@@ -266,6 +269,33 @@
|
||||
border-top-width: 0.5px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-metadata-accordion {
|
||||
.ant-collapse-item {
|
||||
.ant-collapse-content {
|
||||
height: 268px;
|
||||
|
||||
.ant-collapse-content-box {
|
||||
.metrics-metadata-skeleton-container {
|
||||
margin: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-all-attributes-accordion {
|
||||
.ant-collapse-item {
|
||||
.ant-collapse-content {
|
||||
height: 600px;
|
||||
.ant-collapse-content-box {
|
||||
.all-attributes-skeleton-container {
|
||||
margin: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select {
|
||||
@@ -330,18 +360,26 @@
|
||||
.metric-details-content {
|
||||
.metrics-accordion {
|
||||
.metrics-accordion-header {
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
.action-menu {
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-accordion-content {
|
||||
.metric-metadata-key {
|
||||
.field-renderer-container {
|
||||
.label {
|
||||
color: var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
.all-attributes-key {
|
||||
.ant-typography:last-child {
|
||||
color: var(--bg-slate-400);
|
||||
color: var(--bg-vanilla-200);
|
||||
background-color: var(--bg-robin-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,17 +2,9 @@ import './MetricDetails.styles.scss';
|
||||
import '../Summary/Summary.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import {
|
||||
Button,
|
||||
Divider,
|
||||
Drawer,
|
||||
Empty,
|
||||
Skeleton,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Button, Divider, Drawer, Empty, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetMetricDetails } from 'hooks/metricsExplorer/useGetMetricDetails';
|
||||
import { useGetMetricMetadata } from 'hooks/metricsExplorer/v2/useGetMetricMetadata';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Compass, Crosshair, X } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
@@ -22,16 +14,12 @@ import ROUTES from '../../../constants/routes';
|
||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { isInspectEnabled } from '../Inspect/utils';
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import AllAttributes from './AllAttributes';
|
||||
import DashboardsAndAlertsPopover from './DashboardsAndAlertsPopover';
|
||||
import Highlights from './Highlights';
|
||||
import Metadata from './Metadata';
|
||||
import { MetricDetailsProps } from './types';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
getMetricDetailsQuery,
|
||||
} from './utils';
|
||||
import { getMetricDetailsQuery, transformMetricMetadata } from './utils';
|
||||
|
||||
function MetricDetails({
|
||||
onClose,
|
||||
@@ -43,50 +31,25 @@ function MetricDetails({
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
const {
|
||||
data,
|
||||
isLoading,
|
||||
isFetching,
|
||||
error: metricDetailsError,
|
||||
refetch: refetchMetricDetails,
|
||||
} = useGetMetricDetails(metricName ?? '', {
|
||||
data: metricMetadataResponse,
|
||||
isLoading: isLoadingMetricMetadata,
|
||||
isError: isErrorMetricMetadata,
|
||||
} = useGetMetricMetadata(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
|
||||
const metric = data?.payload?.data;
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metric) return null;
|
||||
return formatTimestampToReadableDate(metric.lastReceived);
|
||||
}, [metric]);
|
||||
const metadata = transformMetricMetadata(metricMetadataResponse);
|
||||
|
||||
const showInspectFeature = useMemo(
|
||||
() => isInspectEnabled(metric?.metadata?.metric_type),
|
||||
[metric],
|
||||
() => isInspectEnabled(metadata?.metricType),
|
||||
[metadata],
|
||||
);
|
||||
|
||||
const isMetricDetailsLoading = isLoading || isFetching;
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metric) return null;
|
||||
const timeSeriesActive = formatNumberToCompactFormat(metric.timeSeriesActive);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(metric.timeSeriesTotal);
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
);
|
||||
}, [metric]);
|
||||
|
||||
const goToMetricsExplorerwithSelectedMetric = useCallback(() => {
|
||||
if (metricName) {
|
||||
const compositeQuery = getMetricDetailsQuery(
|
||||
metricName,
|
||||
metric?.metadata?.metric_type,
|
||||
metadata?.metricType,
|
||||
);
|
||||
handleExplorerTabChange(
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
@@ -103,9 +66,7 @@ function MetricDetails({
|
||||
[MetricsExplorerEventKeys.Modal]: 'metric-details',
|
||||
});
|
||||
}
|
||||
}, [metricName, handleExplorerTabChange, metric?.metadata?.metric_type]);
|
||||
|
||||
const isMetricDetailsError = metricDetailsError || !metric;
|
||||
}, [metricName, handleExplorerTabChange, metadata?.metricType]);
|
||||
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.ModalOpened, {
|
||||
@@ -113,6 +74,10 @@ function MetricDetails({
|
||||
});
|
||||
}, []);
|
||||
|
||||
if (!metricName) {
|
||||
return <Empty description="Metric not found" />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
@@ -120,7 +85,7 @@ function MetricDetails({
|
||||
<div className="metric-details-header">
|
||||
<div className="metric-details-title">
|
||||
<Divider type="vertical" />
|
||||
<Typography.Text>{metric?.name}</Typography.Text>
|
||||
<Typography.Text>{metricName}</Typography.Text>
|
||||
</div>
|
||||
<div className="metric-details-header-buttons">
|
||||
<Button
|
||||
@@ -138,8 +103,8 @@ function MetricDetails({
|
||||
aria-label="Inspect Metric"
|
||||
icon={<Crosshair size={18} />}
|
||||
onClick={(): void => {
|
||||
if (metric?.name) {
|
||||
openInspectModal(metric.name);
|
||||
if (metricName) {
|
||||
openInspectModal(metricName);
|
||||
}
|
||||
}}
|
||||
data-testid="inspect-metric-button"
|
||||
@@ -159,60 +124,17 @@ function MetricDetails({
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} />}
|
||||
>
|
||||
{isMetricDetailsLoading && (
|
||||
<div data-testid="metric-details-skeleton">
|
||||
<Skeleton active />
|
||||
</div>
|
||||
)}
|
||||
{isMetricDetailsError && !isMetricDetailsLoading && (
|
||||
<Empty description="Error fetching metric details" />
|
||||
)}
|
||||
{!isMetricDetailsLoading && !isMetricDetailsError && (
|
||||
<div className="metric-details-content">
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metric?.samples.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric?.samples)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={timeSeries}>{timeSeries}</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={lastReceived}>{lastReceived}</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
<DashboardsAndAlertsPopover
|
||||
dashboards={metric.dashboards}
|
||||
alerts={metric.alerts}
|
||||
/>
|
||||
<Metadata
|
||||
metricName={metric?.name}
|
||||
metadata={metric.metadata}
|
||||
refetchMetricDetails={refetchMetricDetails}
|
||||
/>
|
||||
{metric.attributes && (
|
||||
<AllAttributes
|
||||
metricName={metric?.name}
|
||||
attributes={metric.attributes}
|
||||
metricType={metric?.metadata?.metric_type}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className="metric-details-content">
|
||||
<Highlights metricName={metricName} />
|
||||
<DashboardsAndAlertsPopover metricName={metricName} />
|
||||
<Metadata
|
||||
metricName={metricName}
|
||||
metadata={metadata}
|
||||
isErrorMetricMetadata={isErrorMetricMetadata}
|
||||
isLoadingMetricMetadata={isLoadingMetricMetadata}
|
||||
/>
|
||||
<AllAttributes metricName={metricName} metricType={metadata?.metricType} />
|
||||
</div>
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export const METRIC_METADATA_KEYS = {
|
||||
description: 'Description',
|
||||
unit: 'Unit',
|
||||
metric_type: 'Metric Type',
|
||||
metricType: 'Metric Type',
|
||||
temporality: 'Temporality',
|
||||
};
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
import {
|
||||
MetricDetails,
|
||||
MetricDetailsAlert,
|
||||
MetricDetailsAttribute,
|
||||
MetricDetailsDashboard,
|
||||
} from 'api/metricsExplorer/getMetricDetails';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
export interface MetricDetailsProps {
|
||||
@@ -14,19 +9,21 @@ export interface MetricDetailsProps {
|
||||
openInspectModal: (metricName: string) => void;
|
||||
}
|
||||
|
||||
export interface HighlightsProps {
|
||||
metricName: string;
|
||||
}
|
||||
export interface DashboardsAndAlertsPopoverProps {
|
||||
dashboards: MetricDetailsDashboard[] | null;
|
||||
alerts: MetricDetailsAlert[] | null;
|
||||
metricName: string;
|
||||
}
|
||||
|
||||
export interface MetadataProps {
|
||||
metricName: string;
|
||||
metadata: MetricDetails['metadata'] | undefined;
|
||||
refetchMetricDetails: () => void;
|
||||
metadata: MetricMetadata | null;
|
||||
isErrorMetricMetadata: boolean;
|
||||
isLoadingMetricMetadata: boolean;
|
||||
}
|
||||
|
||||
export interface AllAttributesProps {
|
||||
attributes: MetricDetailsAttribute[];
|
||||
metricName: string;
|
||||
metricType: MetricType | undefined;
|
||||
}
|
||||
@@ -36,3 +33,51 @@ export interface AllAttributesValueProps {
|
||||
filterValue: string[];
|
||||
goToMetricsExploreWithAppliedAttribute: (key: string, value: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricHighlight {
|
||||
dataPoints: number;
|
||||
lastReceived: number;
|
||||
totalTimeSeries: number;
|
||||
activeTimeSeries: number;
|
||||
}
|
||||
|
||||
export interface MetricAlert {
|
||||
alertName: string;
|
||||
alertId: string;
|
||||
}
|
||||
|
||||
export interface MetricDashboard {
|
||||
dashboardName: string;
|
||||
dashboardId: string;
|
||||
widgetId: string;
|
||||
widgetName: string;
|
||||
}
|
||||
|
||||
export interface MetricMetadata {
|
||||
metricType: MetricType;
|
||||
description: string;
|
||||
unit: string;
|
||||
temporality: Temporality;
|
||||
isMonotonic: boolean;
|
||||
}
|
||||
|
||||
export interface MetricMetadataState {
|
||||
metricType: MetricType;
|
||||
description: string;
|
||||
temporality: Temporality | undefined;
|
||||
unit: string | undefined;
|
||||
}
|
||||
|
||||
export interface MetricAttribute {
|
||||
key: string;
|
||||
values: string[];
|
||||
valueCount: number;
|
||||
}
|
||||
|
||||
export enum TableFields {
|
||||
DESCRIPTION = 'description',
|
||||
UNIT = 'unit',
|
||||
METRIC_TYPE = 'metricType',
|
||||
Temporality = 'temporality',
|
||||
IS_MONOTONIC = 'isMonotonic',
|
||||
}
|
||||
|
||||
@@ -2,11 +2,29 @@ import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAlertsResponse,
|
||||
GetMetricAttributesResponse,
|
||||
GetMetricDashboardsResponse,
|
||||
GetMetricHighlightsResponse,
|
||||
GetMetricMetadataResponse,
|
||||
UpdateMetricMetadataRequest,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export function formatTimestampToReadableDate(timestamp: string): string {
|
||||
import {
|
||||
MetricAlert,
|
||||
MetricAttribute,
|
||||
MetricDashboard,
|
||||
MetricHighlight,
|
||||
MetricMetadata,
|
||||
MetricMetadataState,
|
||||
} from './types';
|
||||
|
||||
export function formatTimestampToReadableDate(timestamp: number): string {
|
||||
const date = new Date(timestamp);
|
||||
const now = new Date();
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000);
|
||||
@@ -154,3 +172,149 @@ export function getMetricDetailsQuery(
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricHighlights(
|
||||
apiData: SuccessResponseV2<GetMetricHighlightsResponse> | undefined,
|
||||
): MetricHighlight | null {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {
|
||||
dataPoints,
|
||||
lastReceived,
|
||||
totalTimeSeries,
|
||||
activeTimeSeries,
|
||||
} = apiData.data.data;
|
||||
|
||||
return {
|
||||
dataPoints,
|
||||
lastReceived,
|
||||
totalTimeSeries,
|
||||
activeTimeSeries,
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricAlerts(
|
||||
apiData: SuccessResponseV2<GetMetricAlertsResponse> | undefined,
|
||||
): MetricAlert[] {
|
||||
if (
|
||||
!apiData ||
|
||||
!apiData.data ||
|
||||
!apiData.data.data ||
|
||||
!apiData.data.data.alerts
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
return apiData.data.data.alerts.map((alert) => ({
|
||||
alertName: alert.alertName,
|
||||
alertId: alert.alertId,
|
||||
}));
|
||||
}
|
||||
|
||||
export function transformMetricDashboards(
|
||||
apiData: SuccessResponseV2<GetMetricDashboardsResponse> | undefined,
|
||||
): MetricDashboard[] {
|
||||
if (
|
||||
!apiData ||
|
||||
!apiData.data ||
|
||||
!apiData.data.data ||
|
||||
!apiData.data.data.dashboards
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
const dashboards = apiData.data.data.dashboards.map((dashboard) => ({
|
||||
dashboardName: dashboard.dashboardName,
|
||||
dashboardId: dashboard.dashboardId,
|
||||
widgetId: dashboard.widgetId,
|
||||
widgetName: dashboard.widgetName,
|
||||
}));
|
||||
// Remove duplicate dashboards
|
||||
return dashboards.filter(
|
||||
(dashboard, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboardId === dashboard.dashboardId),
|
||||
);
|
||||
}
|
||||
|
||||
export function transformTemporality(temporality: string): Temporality {
|
||||
switch (temporality) {
|
||||
case 'delta':
|
||||
return Temporality.DELTA;
|
||||
case 'cumulative':
|
||||
return Temporality.CUMULATIVE;
|
||||
default:
|
||||
return Temporality.DELTA;
|
||||
}
|
||||
}
|
||||
|
||||
export function transformMetricType(type: string): MetricType {
|
||||
switch (type) {
|
||||
case 'sum':
|
||||
return MetricType.SUM;
|
||||
case 'gauge':
|
||||
return MetricType.GAUGE;
|
||||
case 'summary':
|
||||
return MetricType.SUMMARY;
|
||||
case 'histogram':
|
||||
return MetricType.HISTOGRAM;
|
||||
case 'exponential_histogram':
|
||||
return MetricType.EXPONENTIAL_HISTOGRAM;
|
||||
default:
|
||||
return MetricType.SUM;
|
||||
}
|
||||
}
|
||||
|
||||
export function transformMetricMetadata(
|
||||
apiData: SuccessResponseV2<GetMetricMetadataResponse> | undefined,
|
||||
): MetricMetadata | null {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return null;
|
||||
}
|
||||
const {
|
||||
type,
|
||||
description,
|
||||
unit,
|
||||
temporality,
|
||||
isMonotonic,
|
||||
} = apiData.data.data;
|
||||
|
||||
return {
|
||||
metricType: transformMetricType(type),
|
||||
description,
|
||||
unit,
|
||||
temporality: transformTemporality(temporality),
|
||||
isMonotonic,
|
||||
};
|
||||
}
|
||||
|
||||
export function transformUpdateMetricMetadataRequest(
|
||||
metricMetadata: MetricMetadataState,
|
||||
): UpdateMetricMetadataRequest {
|
||||
return {
|
||||
type: metricMetadata.metricType,
|
||||
description: metricMetadata.description,
|
||||
unit: metricMetadata.unit || '',
|
||||
temporality: metricMetadata.temporality || '',
|
||||
isMonotonic: determineIsMonotonic(
|
||||
metricMetadata.metricType,
|
||||
metricMetadata.temporality,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricAttributes(
|
||||
apiData: SuccessResponseV2<GetMetricAttributesResponse> | undefined,
|
||||
): { attributes: MetricAttribute[]; totalKeys: number } {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return { attributes: [], totalKeys: 0 };
|
||||
}
|
||||
const { attributes, totalKeys } = apiData.data.data;
|
||||
return {
|
||||
attributes: attributes.map((attribute) => ({
|
||||
key: attribute.key,
|
||||
values: attribute.values,
|
||||
valueCount: attribute.valueCount,
|
||||
})),
|
||||
totalKeys,
|
||||
};
|
||||
}
|
||||
|
||||
22
frontend/src/hooks/metricsExplorer/v2/useGetMetricAlerts.ts
Normal file
22
frontend/src/hooks/metricsExplorer/v2/useGetMetricAlerts.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { getMetricAlerts } from 'api/metricsExplorer/v2/getMetricAlerts';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricAlertsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricAlerts = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<SuccessResponseV2<GetMetricAlertsResponse>, Error>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricAlertsResponse>, Error>;
|
||||
|
||||
export const useGetMetricAlerts: UseGetMetricAlerts = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricAlertsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricAlerts(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_ALERTS, metricName],
|
||||
});
|
||||
@@ -0,0 +1,36 @@
|
||||
import { getMetricAttributes } from 'api/metricsExplorer/v2/getMetricAttributes';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAttributesRequest,
|
||||
GetMetricAttributesResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricAttributes = (
|
||||
requestData: GetMetricAttributesRequest,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricAttributesResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricAttributesResponse>, Error>;
|
||||
|
||||
export const useGetMetricAttributes: UseGetMetricAttributes = (
|
||||
requestData,
|
||||
options,
|
||||
headers,
|
||||
) => {
|
||||
const queryKey = [
|
||||
REACT_QUERY_KEY.GET_METRIC_ATTRIBUTES,
|
||||
requestData.metricName,
|
||||
requestData.start,
|
||||
requestData.end,
|
||||
];
|
||||
|
||||
return useQuery<SuccessResponseV2<GetMetricAttributesResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricAttributes(requestData, signal, headers),
|
||||
...options,
|
||||
queryKey,
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,25 @@
|
||||
import { getMetricDashboards } from 'api/metricsExplorer/v2/getMetricDashboards';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricDashboardsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricDashboards = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricDashboardsResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricDashboardsResponse>, Error>;
|
||||
|
||||
export const useGetMetricDashboards: UseGetMetricDashboards = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricDashboardsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricDashboards(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_DASHBOARDS, metricName],
|
||||
});
|
||||
@@ -0,0 +1,25 @@
|
||||
import { getMetricHighlights } from 'api/metricsExplorer/v2/getMetricHighlights';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricHighlightsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricHighlights = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricHighlightsResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricHighlightsResponse>, Error>;
|
||||
|
||||
export const useGetMetricHighlights: UseGetMetricHighlights = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricHighlightsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricHighlights(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_HIGHLIGHTS, metricName],
|
||||
});
|
||||
@@ -0,0 +1,22 @@
|
||||
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricMetadataResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricMetadata = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<SuccessResponseV2<GetMetricMetadataResponse>, Error>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricMetadataResponse>, Error>;
|
||||
|
||||
export const useGetMetricMetadata: UseGetMetricMetadata = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricMetadataResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricMetadata(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_METADATA, metricName],
|
||||
});
|
||||
@@ -0,0 +1,22 @@
|
||||
import updateMetricMetadata from 'api/metricsExplorer/v2/updateMetricMetadata';
|
||||
import { useMutation, UseMutationResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
UpdateMetricMetadataResponse,
|
||||
UseUpdateMetricMetadataProps,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export function useUpdateMetricMetadata(): UseMutationResult<
|
||||
SuccessResponseV2<UpdateMetricMetadataResponse>,
|
||||
Error,
|
||||
UseUpdateMetricMetadataProps
|
||||
> {
|
||||
return useMutation<
|
||||
SuccessResponseV2<UpdateMetricMetadataResponse>,
|
||||
Error,
|
||||
UseUpdateMetricMetadataProps
|
||||
>({
|
||||
mutationFn: ({ metricName, payload }) =>
|
||||
updateMetricMetadata(metricName, payload),
|
||||
});
|
||||
}
|
||||
77
frontend/src/types/api/metricsExplorer/v2.ts
Normal file
77
frontend/src/types/api/metricsExplorer/v2.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
export interface GetMetricMetadataResponse {
|
||||
status: string;
|
||||
data: {
|
||||
description: string;
|
||||
type: string;
|
||||
unit: string;
|
||||
temporality: string;
|
||||
isMonotonic: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricHighlightsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
dataPoints: number;
|
||||
lastReceived: number;
|
||||
totalTimeSeries: number;
|
||||
activeTimeSeries: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricAttributesRequest {
|
||||
metricName: string;
|
||||
start?: number;
|
||||
end?: number;
|
||||
}
|
||||
|
||||
export interface GetMetricAttributesResponse {
|
||||
status: string;
|
||||
data: {
|
||||
attributes: {
|
||||
key: string;
|
||||
values: string[];
|
||||
valueCount: number;
|
||||
}[];
|
||||
totalKeys: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricAlertsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
alerts: {
|
||||
alertName: string;
|
||||
alertId: string;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricDashboardsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
dashboards: {
|
||||
dashboardName: string;
|
||||
dashboardId: string;
|
||||
widgetId: string;
|
||||
widgetName: string;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataRequest {
|
||||
type: string;
|
||||
description: string;
|
||||
temporality: string;
|
||||
unit: string;
|
||||
isMonotonic: boolean;
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataResponse {
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface UseUpdateMetricMetadataProps {
|
||||
metricName: string;
|
||||
payload: UpdateMetricMetadataRequest;
|
||||
}
|
||||
10
frontend/src/utils/pluralize.ts
Normal file
10
frontend/src/utils/pluralize.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export function pluralize(
|
||||
count: number,
|
||||
singular: string,
|
||||
plural: string,
|
||||
): string {
|
||||
if (count === 1) {
|
||||
return `${count} ${singular}`;
|
||||
}
|
||||
return `${count} ${plural}`;
|
||||
}
|
||||
@@ -100,10 +100,8 @@ func newProvider(
|
||||
traceAggExprRewriter,
|
||||
)
|
||||
|
||||
logsKeyEvolutionMetadata := telemetrylogs.NewKeyEvolutionMetadata(telemetryStore, cache, settings.Logger)
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper(logsKeyEvolutionMetadata)
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
|
||||
@@ -51,8 +51,6 @@ func NewAggExprRewriter(
|
||||
// and the args if the parametric aggregation function is used.
|
||||
func (r *aggExprRewriter) Rewrite(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
expr string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -79,12 +77,7 @@ func (r *aggExprRewriter) Rewrite(
|
||||
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
|
||||
}
|
||||
|
||||
visitor := newExprVisitor(
|
||||
ctx,
|
||||
startNs,
|
||||
endNs,
|
||||
r.logger,
|
||||
keys,
|
||||
visitor := newExprVisitor(r.logger, keys,
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
@@ -105,8 +98,6 @@ func (r *aggExprRewriter) Rewrite(
|
||||
// RewriteMulti rewrites a slice of expressions.
|
||||
func (r *aggExprRewriter) RewriteMulti(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
exprs []string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -115,7 +106,7 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
var errs []error
|
||||
var chArgsList [][]any
|
||||
for i, e := range exprs {
|
||||
w, chArgs, err := r.Rewrite(ctx, startNs, endNs, e, rateInterval, keys)
|
||||
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
out[i] = e
|
||||
@@ -132,9 +123,6 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
|
||||
// exprVisitor walks FunctionExpr nodes and applies the mappers.
|
||||
type exprVisitor struct {
|
||||
ctx context.Context
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
chparser.DefaultASTVisitor
|
||||
logger *slog.Logger
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
@@ -149,9 +137,6 @@ type exprVisitor struct {
|
||||
}
|
||||
|
||||
func newExprVisitor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
logger *slog.Logger,
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
@@ -161,9 +146,6 @@ func newExprVisitor(
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
ctx: ctx,
|
||||
startNs: startNs,
|
||||
endNs: endNs,
|
||||
logger: logger,
|
||||
fieldKeys: fieldKeys,
|
||||
fullTextColumn: fullTextColumn,
|
||||
@@ -208,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
if aggFunc.FuncCombinator {
|
||||
// Map the predicate (last argument)
|
||||
origPred := args[len(args)-1].String()
|
||||
whereClause, err := PrepareWhereClause(
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Logger: v.logger,
|
||||
@@ -217,7 +199,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, v.startNs, v.endNs,
|
||||
}, 0, 0,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -237,7 +219,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -255,7 +237,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -19,8 +19,6 @@ import (
|
||||
|
||||
func CollisionHandledFinalExpr(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
fm qbtypes.FieldMapper,
|
||||
cb qbtypes.ConditionBuilder,
|
||||
@@ -47,7 +45,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, startNs, endNs)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -60,7 +58,7 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, err := fm.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := fm.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -95,7 +93,7 @@ func CollisionHandledFinalExpr(
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
colName, _ = fm.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = fm.FieldFor(ctx, key)
|
||||
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
stmts = append(stmts, colName)
|
||||
}
|
||||
|
||||
@@ -48,8 +48,8 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
op qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
@@ -68,7 +68,7 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
keyIdxFilter := sb.Like(column.Name, keyIndexFilter(key))
|
||||
valueForIndexFilter := valueForIndexFilter(op, key, value)
|
||||
|
||||
fieldName, err := b.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
fieldName, err := b.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -47,7 +47,6 @@ func (m *defaultFieldMapper) ColumnFor(
|
||||
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
@@ -62,11 +61,10 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
colName, err := m.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -47,7 +46,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -240,10 +239,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -251,12 +250,12 @@ func (c *conditionBuilder) ConditionFor(
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) && key.FieldContext != telemetrytypes.FieldContextBody {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -373,8 +372,7 @@ func TestConditionFor(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -427,8 +425,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -687,8 +684,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
||||
@@ -4,14 +4,11 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
@@ -58,13 +55,10 @@ var (
|
||||
)
|
||||
|
||||
type fieldMapper struct {
|
||||
evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore
|
||||
}
|
||||
|
||||
func NewFieldMapper(evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore) qbtypes.FieldMapper {
|
||||
return &fieldMapper{
|
||||
evolutionMetadataStore: evolutionMetadataStore,
|
||||
}
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
@@ -106,7 +100,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -118,34 +112,17 @@ func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
baseColumn := logsV2Columns["resources_string"]
|
||||
tsStartTime := time.Unix(0, int64(tsStart))
|
||||
|
||||
// Extract orgId from context
|
||||
var orgID valuer.UUID
|
||||
if claims, err := authtypes.ClaimsFromContext(ctx); err == nil {
|
||||
orgID, err = valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid orgId %s", claims.OrgID)
|
||||
}
|
||||
}
|
||||
|
||||
// get all evolution for the column
|
||||
evolutions := m.evolutionMetadataStore.Get(ctx, orgID, baseColumn.Name)
|
||||
|
||||
// restricting now to just one entry where we know we changed from map to json
|
||||
if len(evolutions) > 0 && evolutions[0].ReleaseTime.Before(tsStartTime) {
|
||||
return fmt.Sprintf("%s.`%s`::String", evolutions[0].NewColumn, key.Name), nil
|
||||
}
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName := telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
} else {
|
||||
attrVal := fmt.Sprintf("%s['%s']", baseColumn.Name, key.Name)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, baseColumn.Name, key.Name, attrVal), nil
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
@@ -184,12 +161,11 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -199,7 +175,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := logsV2Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextLog
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -217,12 +193,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -3,14 +3,10 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -168,8 +164,7 @@ func TestGetColumn(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(mockStore)
|
||||
fm := NewFieldMapper()
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -234,7 +229,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute - json",
|
||||
name: "Map column type - resource attribute",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
@@ -243,7 +238,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute - Materialized - json",
|
||||
name: "Map column type - resource attribute - Materialized",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
@@ -266,96 +261,8 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(mockStore)
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedResult, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFieldForWithEvolutionMetadata(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
|
||||
OrgID: orgId.String(),
|
||||
})
|
||||
|
||||
// Create a test release time
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTimeNano := uint64(releaseTime.UnixNano())
|
||||
|
||||
// Common test key
|
||||
serviceNameKey := telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
|
||||
// Common expected results
|
||||
jsonOnlyResult := "resource.`service.name`::String"
|
||||
fallbackResult := "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)"
|
||||
|
||||
// Set up stores once
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(mockKeyEvolutionMetadata(ctx, orgId, releaseTime))
|
||||
storeWithoutMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
mockStore *telemetrytypestest.MockKeyEvolutionMetadataStore
|
||||
expectedResult string
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Resource attribute - tsStart before release time (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - tsStart after release time (use new json column)",
|
||||
tsStart: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
tsEnd: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: jsonOnlyResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - no evolution metadata (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano,
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithoutMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - tsStart exactly at release time (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano,
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper(tc.mockStore)
|
||||
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -5,14 +5,12 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -35,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -48,8 +46,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
|
||||
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -72,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -7,15 +7,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
|
||||
func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
|
||||
@@ -9,15 +9,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
@@ -2424,8 +2422,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
|
||||
@@ -1,158 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
const (
|
||||
// KeyEvolutionMetadataTableName is the table name for key evolution metadata
|
||||
KeyEvolutionMetadataTableName = "distributed_column_key_evolution_metadata"
|
||||
// KeyEvolutionMetadataDBName is the database name for key evolution metadata
|
||||
KeyEvolutionMetadataDBName = "signoz_logs"
|
||||
// KeyEvolutionMetadataCacheKeyPrefix is the prefix for cache keys
|
||||
KeyEvolutionMetadataCacheKeyPrefix = "key_evolution_metadata:"
|
||||
|
||||
base_column = "base_column"
|
||||
base_column_type = "base_column_type"
|
||||
new_column = "new_column"
|
||||
new_column_type = "new_column_type"
|
||||
release_time = "release_time"
|
||||
)
|
||||
|
||||
// CachedKeyEvolutionMetadata is a cacheable type for storing key evolution metadata
|
||||
type CachedKeyEvolutionMetadata struct {
|
||||
Keys []*telemetrytypes.KeyEvolutionMetadataKey `json:"keys"`
|
||||
}
|
||||
|
||||
var _ cachetypes.Cacheable = (*CachedKeyEvolutionMetadata)(nil)
|
||||
|
||||
func (c *CachedKeyEvolutionMetadata) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
|
||||
func (c *CachedKeyEvolutionMetadata) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
// Each key can have multiple evolution entries, allowing for multiple column transitions over time.
|
||||
// The cache is organized by orgId, then by key name.
|
||||
type KeyEvolutionMetadata struct {
|
||||
cache cache.Cache
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
var _ telemetrytypes.KeyEvolutionMetadataStore = (*KeyEvolutionMetadata)(nil)
|
||||
|
||||
func NewKeyEvolutionMetadata(telemetryStore telemetrystore.TelemetryStore, cache cache.Cache, logger *slog.Logger) *KeyEvolutionMetadata {
|
||||
return &KeyEvolutionMetadata{
|
||||
cache: cache,
|
||||
telemetryStore: telemetryStore,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (k *KeyEvolutionMetadata) fetchFromClickHouse(ctx context.Context, orgID valuer.UUID, key string) []*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
store := k.telemetryStore
|
||||
logger := k.logger
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Build query to fetch all key evolution metadata
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(
|
||||
base_column,
|
||||
base_column_type,
|
||||
new_column,
|
||||
new_column_type,
|
||||
release_time,
|
||||
)
|
||||
sb.From(fmt.Sprintf("%s.%s", KeyEvolutionMetadataDBName, KeyEvolutionMetadataTableName))
|
||||
sb.OrderBy(base_column, release_time)
|
||||
sb.Where(sb.E(base_column, key))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := store.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "Failed to fetch key evolution metadata from ClickHouse", "error", err)
|
||||
return nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Group metadata by base_column
|
||||
metadataByKey := make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
baseColumn string
|
||||
baseColumnType string
|
||||
newColumn string
|
||||
newColumnType string
|
||||
releaseTime uint64
|
||||
)
|
||||
|
||||
if err := rows.Scan(&baseColumn, &baseColumnType, &newColumn, &newColumnType, &releaseTime); err != nil {
|
||||
logger.WarnContext(ctx, "Failed to scan key evolution metadata row", "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
key := &telemetrytypes.KeyEvolutionMetadataKey{
|
||||
BaseColumn: baseColumn,
|
||||
BaseColumnType: baseColumnType,
|
||||
NewColumn: newColumn,
|
||||
NewColumnType: newColumnType,
|
||||
ReleaseTime: time.Unix(0, int64(releaseTime)),
|
||||
}
|
||||
|
||||
metadataByKey[baseColumn] = append(metadataByKey[baseColumn], key)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
logger.WarnContext(ctx, "Error iterating key evolution metadata rows", "error", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
return metadataByKey[key]
|
||||
|
||||
}
|
||||
|
||||
// Get retrieves all metadata keys for the given key name and orgId from cache.
|
||||
// Returns an empty slice if the key is not found in cache.
|
||||
func (k *KeyEvolutionMetadata) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
cachedData := &CachedKeyEvolutionMetadata{}
|
||||
if err := k.cache.Get(ctx, orgId, cacheKey, cachedData); err != nil {
|
||||
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
k.logger.ErrorContext(ctx, "Failed to get key evolution metadata from cache", "error", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Cache miss - fetch from ClickHouse and try again
|
||||
metadata := k.fetchFromClickHouse(ctx, orgId, keyName)
|
||||
|
||||
if metadata != nil {
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
cachedData = &CachedKeyEvolutionMetadata{Keys: metadata}
|
||||
if err := k.cache.Set(ctx, orgId, cacheKey, cachedData, 24*time.Hour); err != nil {
|
||||
k.logger.WarnContext(ctx, "Failed to set key evolution metadata in cache", "key", keyName, "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return cachedData.Keys
|
||||
}
|
||||
@@ -1,213 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache/cachetest"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var (
|
||||
clickHouseQueryPattern = "SELECT.*base_column.*FROM.*distributed_column_key_evolution_metadata.*WHERE.*base_column.*=.*"
|
||||
clickHouseColumns = []cmock.ColumnType{
|
||||
{Name: base_column, Type: "String"},
|
||||
{Name: base_column_type, Type: "String"},
|
||||
{Name: new_column, Type: "String"},
|
||||
{Name: new_column_type, Type: "String"},
|
||||
{Name: release_time, Type: "UInt64"},
|
||||
}
|
||||
)
|
||||
|
||||
func newTestCache(t *testing.T) cache.Cache {
|
||||
t.Helper()
|
||||
testCache, err := cachetest.New(cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
return testCache
|
||||
}
|
||||
|
||||
func newTestTelemetryStore() *telemetrystoretest.Provider {
|
||||
return telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherRegexp)
|
||||
}
|
||||
|
||||
func newKeyEvolutionMetadata(telemetryStore telemetrystore.TelemetryStore, cache cache.Cache) *KeyEvolutionMetadata {
|
||||
return NewKeyEvolutionMetadata(telemetryStore, cache, slog.Default())
|
||||
}
|
||||
|
||||
func createMockRows(values [][]any) *cmock.Rows {
|
||||
return cmock.NewRows(clickHouseColumns, values)
|
||||
}
|
||||
|
||||
func assertMetadataEqual(t *testing.T, expected, actual *telemetrytypes.KeyEvolutionMetadataKey) {
|
||||
t.Helper()
|
||||
assert.Equal(t, expected.BaseColumn, actual.BaseColumn)
|
||||
assert.Equal(t, expected.BaseColumnType, actual.BaseColumnType)
|
||||
assert.Equal(t, expected.NewColumn, actual.NewColumn)
|
||||
assert.Equal(t, expected.NewColumnType, actual.NewColumnType)
|
||||
assert.Equal(t, expected.ReleaseTime, actual.ReleaseTime)
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_CacheHit(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
keyName := "service.name"
|
||||
|
||||
testCache := newTestCache(t)
|
||||
telemetryStore := newTestTelemetryStore()
|
||||
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
|
||||
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
expectedMetadata := []*telemetrytypes.KeyEvolutionMetadataKey{
|
||||
{
|
||||
BaseColumn: "resources_string",
|
||||
BaseColumnType: "Map(LowCardinality(String), String)",
|
||||
NewColumn: "resource",
|
||||
NewColumnType: "JSON(max_dynamic_paths=100)",
|
||||
ReleaseTime: releaseTime,
|
||||
},
|
||||
}
|
||||
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
cachedData := &CachedKeyEvolutionMetadata{Keys: expectedMetadata}
|
||||
err := testCache.Set(ctx, orgId, cacheKey, cachedData, 24*time.Hour)
|
||||
require.NoError(t, err)
|
||||
|
||||
result := kem.Get(ctx, orgId, keyName)
|
||||
|
||||
require.Len(t, result, 1)
|
||||
assertMetadataEqual(t, expectedMetadata[0], result[0])
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_CacheMiss_FetchFromClickHouse(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
keyName := "resources_string"
|
||||
|
||||
testCache := newTestCache(t)
|
||||
telemetryStore := newTestTelemetryStore()
|
||||
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
values := [][]any{
|
||||
{
|
||||
"resources_string",
|
||||
"Map(LowCardinality(String), String)",
|
||||
"resource",
|
||||
"JSON(max_dynamic_paths=100)",
|
||||
uint64(releaseTime.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
rows := createMockRows(values)
|
||||
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnRows(rows)
|
||||
|
||||
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
|
||||
result := kem.Get(ctx, orgId, keyName)
|
||||
|
||||
require.Len(t, result, 1)
|
||||
assert.Equal(t, "resources_string", result[0].BaseColumn)
|
||||
assert.Equal(t, "Map(LowCardinality(String), String)", result[0].BaseColumnType)
|
||||
assert.Equal(t, "resource", result[0].NewColumn)
|
||||
assert.Equal(t, "JSON(max_dynamic_paths=100)", result[0].NewColumnType)
|
||||
assert.Equal(t, releaseTime.UnixNano(), result[0].ReleaseTime.UnixNano())
|
||||
|
||||
// Verify data was cached
|
||||
var cachedData CachedKeyEvolutionMetadata
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
err := testCache.Get(ctx, orgId, cacheKey, &cachedData)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, cachedData.Keys, 1)
|
||||
assert.Equal(t, result[0].BaseColumn, cachedData.Keys[0].BaseColumn)
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_MultipleMetadataEntries(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
keyName := "resources_string"
|
||||
|
||||
testCache := newTestCache(t)
|
||||
telemetryStore := newTestTelemetryStore()
|
||||
|
||||
releaseTime1 := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTime2 := time.Date(2024, 2, 15, 10, 0, 0, 0, time.UTC)
|
||||
|
||||
values := [][]any{
|
||||
{
|
||||
"resources_string",
|
||||
"Map(LowCardinality(String), String)",
|
||||
"resource",
|
||||
"JSON(max_dynamic_paths=100)",
|
||||
uint64(releaseTime1.UnixNano()),
|
||||
},
|
||||
{
|
||||
"resources_string",
|
||||
"Map(LowCardinality(String), String)",
|
||||
"resource_v2",
|
||||
"JSON(max_dynamic_paths=100, max_dynamic_path_depth=10)",
|
||||
uint64(releaseTime2.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
rows := createMockRows(values)
|
||||
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnRows(rows)
|
||||
|
||||
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
|
||||
result := kem.Get(ctx, orgId, keyName)
|
||||
|
||||
require.Len(t, result, 2)
|
||||
assert.Equal(t, "resources_string", result[0].BaseColumn)
|
||||
assert.Equal(t, "resource", result[0].NewColumn)
|
||||
assert.Equal(t, "JSON(max_dynamic_paths=100)", result[0].NewColumnType)
|
||||
assert.Equal(t, releaseTime1.UnixNano(), result[0].ReleaseTime.UnixNano())
|
||||
assert.Equal(t, "resource_v2", result[1].NewColumn)
|
||||
assert.Equal(t, "JSON(max_dynamic_paths=100, max_dynamic_path_depth=10)", result[1].NewColumnType)
|
||||
assert.Equal(t, releaseTime2.UnixNano(), result[1].ReleaseTime.UnixNano())
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_EmptyResultFromClickHouse(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
keyName := "resources_string"
|
||||
|
||||
testCache := newTestCache(t)
|
||||
telemetryStore := newTestTelemetryStore()
|
||||
|
||||
rows := createMockRows([][]any{})
|
||||
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnRows(rows)
|
||||
|
||||
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
|
||||
result := kem.Get(ctx, orgId, keyName)
|
||||
|
||||
assert.Empty(t, result)
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_ClickHouseQueryError(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
keyName := "resources_string"
|
||||
|
||||
testCache := newTestCache(t)
|
||||
telemetryStore := newTestTelemetryStore()
|
||||
|
||||
telemetryStore.Mock().ExpectQuery(clickHouseQueryPattern).WithArgs(keyName).WillReturnError(assert.AnError)
|
||||
|
||||
kem := newKeyEvolutionMetadata(telemetryStore, testCache)
|
||||
result := kem.Get(ctx, orgId, keyName)
|
||||
|
||||
assert.Empty(t, result)
|
||||
}
|
||||
@@ -247,7 +247,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
continue
|
||||
}
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -267,7 +267,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -333,7 +333,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -347,7 +347,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, agg.Expression,
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -479,7 +479,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -496,7 +496,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
|
||||
@@ -8,11 +8,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
@@ -40,14 +38,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
}
|
||||
|
||||
func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
|
||||
// Create a test release time
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTimeNano := uint64(releaseTime.UnixNano())
|
||||
|
||||
cases := []struct {
|
||||
startTs uint64
|
||||
endTs uint64
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
@@ -55,16 +46,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit and count distinct on service.name",
|
||||
name: "Time series with limit",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count_distinct(service.name)",
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -80,22 +69,20 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with OR b/w resource attr and attribute filter and count distinct on service.name",
|
||||
name: "Time series with OR b/w resource attr and attribute filter",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count_distinct(service.name)",
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -111,14 +98,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1705224600), uint64(1705485600), "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600), 10, "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit + custom order by",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -152,14 +137,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with group by on materialized column",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -186,12 +169,10 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `materialized.key.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`materialized.key.name`) GLOBAL IN (SELECT `materialized.key.name` FROM __limit_cte) GROUP BY ts, `materialized.key.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with materialised column using or with regex operator",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -209,20 +190,13 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{uint64(1705397400), uint64(1705485600), "redis.*", true, "memcached", true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "redis.*", true, "memcached", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
|
||||
OrgID: orgId.String(),
|
||||
})
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(mockKeyEvolutionMetadata(ctx, orgId, releaseTime))
|
||||
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -246,7 +220,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(ctx, c.startTs, c.endTs, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
@@ -343,8 +317,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -453,8 +426,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -528,8 +500,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -625,8 +596,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore(nil)
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Helper function to limit string length for display
|
||||
@@ -954,23 +951,3 @@ func buildCompleteFieldKeyMapCollision() map[string][]*telemetrytypes.TelemetryF
|
||||
}
|
||||
return keysMap
|
||||
}
|
||||
|
||||
// mockKeyEvolutionMetadata builds a mock org-scoped key evolution metadata map for a column evolution.
|
||||
func mockKeyEvolutionMetadata(ctx context.Context, orgId valuer.UUID, releaseTime time.Time) map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
metadata := make(map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
|
||||
// Make sure to initialize the inner map before assignment to prevent 'assignment to entry in nil map'
|
||||
orgIdStr := orgId.String()
|
||||
if _, exists := metadata[orgIdStr]; !exists {
|
||||
metadata[orgIdStr] = make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
}
|
||||
newKeyEvolutionMetadataEntry := telemetrytypes.KeyEvolutionMetadataKey{
|
||||
BaseColumn: "resources_string",
|
||||
BaseColumnType: "Map(LowCardinality(String), String)",
|
||||
NewColumn: "resource",
|
||||
NewColumnType: "JSON(max_dynamic_paths=100)",
|
||||
ReleaseTime: releaseTime,
|
||||
}
|
||||
metadata[orgIdStr]["resources_string"] = []*telemetrytypes.KeyEvolutionMetadataKey{&newKeyEvolutionMetadataEntry}
|
||||
return metadata
|
||||
}
|
||||
|
||||
@@ -25,7 +25,8 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
tsStart, tsEnd uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
@@ -44,7 +45,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return "", nil
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
// if we don't have a table field name, we can't build a condition for related values
|
||||
return "", nil
|
||||
|
||||
@@ -53,7 +53,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -51,7 +51,7 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
return column, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -69,12 +69,11 @@ func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -84,7 +83,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := attributeMetadataColumns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -102,12 +101,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -145,8 +145,6 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
@@ -205,7 +203,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -942,18 +942,18 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldDataType: fieldValueSelector.FieldDataType,
|
||||
}
|
||||
|
||||
selectColumn, err := t.fm.FieldFor(ctx, 0, 0, key)
|
||||
selectColumn, err := t.fm.FieldFor(ctx, key)
|
||||
|
||||
if err != nil {
|
||||
// we don't have a explicit column to select from the related metadata table
|
||||
// so we will select either from resource_attributes or attributes table
|
||||
// in that order
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
})
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
@@ -978,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldMapper: t.fm,
|
||||
ConditionBuilder: t.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
}, 0, 0)
|
||||
}, 0, 0)
|
||||
if err == nil {
|
||||
sb.AddWhereClause(whereClause.WhereClause)
|
||||
} else {
|
||||
@@ -1002,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
|
||||
// search on attributes
|
||||
key.FieldContext = telemetrytypes.FieldContextAttribute
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
// search on resource
|
||||
key.FieldContext = telemetrytypes.FieldContextResource
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
key.FieldContext = origContext
|
||||
} else {
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -148,7 +148,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -200,7 +200,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
))
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -231,7 +231,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -270,7 +270,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -295,7 +295,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -23,8 +23,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -41,7 +39,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -141,10 +139,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -92,12 +92,11 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -207,7 +207,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -359,7 +359,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
|
||||
sb.Select("fingerprint")
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -29,8 +29,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -54,7 +52,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
// then ask the mapper for the actual SQL reference
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -241,20 +239,20 @@ func (c *conditionBuilder) ConditionFor(
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
if c.isSpanScopeField(key.Name) {
|
||||
return c.buildSpanScopeCondition(key, operator, value, startNs)
|
||||
}
|
||||
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), field) ||
|
||||
@@ -262,7 +260,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -225,7 +225,6 @@ func (m *defaultFieldMapper) ColumnFor(
|
||||
// otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
// Special handling for span scope fields
|
||||
@@ -298,12 +297,11 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
// if it exists otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -313,7 +311,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
if _, ok := indexV3Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -331,12 +329,12 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -92,7 +92,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -293,7 +293,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs?
|
||||
for _, field := range selectedFields {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &field, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -311,7 +311,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -509,7 +509,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, agg.Expression,
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -654,7 +654,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
}, b.start, b.end,
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -450,7 +450,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
if selectedFields[field.Name] {
|
||||
continue
|
||||
}
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &field, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.WarnContext(ctx, "failed to map select field",
|
||||
"field", field.Name, "error", err)
|
||||
@@ -465,7 +465,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
// Add order by support using ColumnExpressionFor
|
||||
orderApplied := false
|
||||
for _, orderBy := range b.operator.Order {
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -547,8 +547,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -574,8 +572,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64(b.operator.StepInterval.Seconds()),
|
||||
keys,
|
||||
@@ -661,8 +657,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -690,8 +684,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
@@ -805,8 +797,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -832,8 +822,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64((b.end-b.start)/querybuilder.NsToSeconds),
|
||||
keys,
|
||||
|
||||
@@ -21,11 +21,11 @@ type JsonKeyToFieldFunc func(context.Context, *telemetrytypes.TelemetryFieldKey,
|
||||
// FieldMapper maps the telemetry field key to the table field name.
|
||||
type FieldMapper interface {
|
||||
// FieldFor returns the field name for the given key.
|
||||
FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
// ColumnFor returns the column for the given key.
|
||||
ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error)
|
||||
// ColumnExpressionFor returns the column expression for the given key.
|
||||
ColumnExpressionFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
ColumnExpressionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
}
|
||||
|
||||
// ConditionBuilder builds the condition for the filter.
|
||||
@@ -37,8 +37,8 @@ type ConditionBuilder interface {
|
||||
|
||||
type AggExprRewriter interface {
|
||||
// Rewrite rewrites the aggregation expression to be used in the query.
|
||||
Rewrite(ctx context.Context, startNs, endNs uint64, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, startNs, endNs uint64, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||
Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||
}
|
||||
|
||||
type Statement struct {
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type KeyEvolutionMetadataKey struct {
|
||||
BaseColumn string
|
||||
BaseColumnType string
|
||||
NewColumn string
|
||||
NewColumnType string
|
||||
ReleaseTime time.Time
|
||||
}
|
||||
|
||||
type KeyEvolutionMetadataStore interface {
|
||||
Get(ctx context.Context, orgId valuer.UUID, keyName string) []*KeyEvolutionMetadataKey
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
package telemetrytypestest
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// MockKeyEvolutionMetadataStore implements the KeyEvolutionMetadataStore interface for testing purposes
|
||||
type MockKeyEvolutionMetadataStore struct {
|
||||
metadata map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey // orgId -> keyName -> metadata
|
||||
}
|
||||
|
||||
// NewMockKeyEvolutionMetadataStore creates a new instance of MockKeyEvolutionMetadataStore with initialized maps
|
||||
func NewMockKeyEvolutionMetadataStore(metadata map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey) *MockKeyEvolutionMetadataStore {
|
||||
return &MockKeyEvolutionMetadataStore{
|
||||
metadata: metadata,
|
||||
}
|
||||
}
|
||||
|
||||
// Get retrieves all metadata keys for the given key name and orgId.
|
||||
// Returns an empty slice if the key is not found.
|
||||
func (m *MockKeyEvolutionMetadataStore) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
if m.metadata == nil {
|
||||
return nil
|
||||
}
|
||||
orgMetadata, orgExists := m.metadata[orgId.String()]
|
||||
if !orgExists {
|
||||
return nil
|
||||
}
|
||||
keys, exists := orgMetadata[keyName]
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
// Return a copy to prevent external modification
|
||||
result := make([]*telemetrytypes.KeyEvolutionMetadataKey, len(keys))
|
||||
copy(result, keys)
|
||||
return result
|
||||
}
|
||||
Reference in New Issue
Block a user