Compare commits
15 Commits
issue_3017
...
feat/handl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd300adbc6 | ||
|
|
675728acf5 | ||
|
|
b63d1b0d1f | ||
|
|
f47b5cc4d6 | ||
|
|
f362200b22 | ||
|
|
07bb88e0ec | ||
|
|
6786767158 | ||
|
|
67082e9ff8 | ||
|
|
2040903fe5 | ||
|
|
b4dd5cb245 | ||
|
|
ee84efa73d | ||
|
|
ac11393491 | ||
|
|
9ad0ac694a | ||
|
|
e27b50c0fa | ||
|
|
4e4942f646 |
@@ -0,0 +1,80 @@
|
||||
.field-variant-badges-container {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.field-badge {
|
||||
&.data-type {
|
||||
display: flex;
|
||||
height: 20px;
|
||||
padding: 4px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-radius: 20px;
|
||||
background: color-mix(in srgb, var(--bg-vanilla-100) 8%, transparent);
|
||||
white-space: nowrap;
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
|
||||
&.type-tag {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 20px;
|
||||
padding: 0px 6px;
|
||||
justify-content: center;
|
||||
gap: 4px;
|
||||
border-radius: 50px;
|
||||
text-transform: capitalize;
|
||||
white-space: nowrap;
|
||||
|
||||
.dot {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 50%;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.text {
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
|
||||
&.attribute {
|
||||
background: color-mix(in srgb, var(--bg-sienna-400) 10%, transparent);
|
||||
color: var(--bg-sienna-400);
|
||||
|
||||
.dot {
|
||||
background-color: var(--bg-sienna-400);
|
||||
}
|
||||
|
||||
.text {
|
||||
color: var(--bg-sienna-400);
|
||||
}
|
||||
}
|
||||
|
||||
&.resource {
|
||||
background: color-mix(in srgb, var(--bg-aqua-400) 10%, transparent);
|
||||
color: var(--bg-aqua-400);
|
||||
|
||||
.dot {
|
||||
background-color: var(--bg-aqua-400);
|
||||
}
|
||||
|
||||
.text {
|
||||
color: var(--bg-aqua-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
import './FieldVariantBadges.styles.scss';
|
||||
|
||||
import cx from 'classnames';
|
||||
|
||||
/**
|
||||
* Field contexts that should display badges
|
||||
*/
|
||||
export enum AllowedFieldContext {
|
||||
Attribute = 'attribute',
|
||||
Resource = 'resource',
|
||||
}
|
||||
|
||||
const ALLOWED_FIELD_CONTEXTS = new Set<string>([
|
||||
AllowedFieldContext.Attribute,
|
||||
AllowedFieldContext.Resource,
|
||||
]);
|
||||
|
||||
interface FieldVariantBadgesProps {
|
||||
fieldDataType?: string;
|
||||
fieldContext?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if a fieldContext badge should be displayed
|
||||
* Only shows badges for contexts in ALLOWED_FIELD_CONTEXTS
|
||||
*/
|
||||
const shouldShowFieldContextBadge = (
|
||||
fieldContext: string | undefined | null,
|
||||
): boolean => {
|
||||
if (!fieldContext) {
|
||||
return false;
|
||||
}
|
||||
return ALLOWED_FIELD_CONTEXTS.has(fieldContext);
|
||||
};
|
||||
|
||||
function FieldVariantBadges({
|
||||
fieldDataType,
|
||||
fieldContext,
|
||||
}: FieldVariantBadgesProps): JSX.Element | null {
|
||||
// If neither value exists, don't render anything
|
||||
if (!fieldDataType && !fieldContext) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if fieldContext should be displayed
|
||||
const showFieldContext =
|
||||
fieldContext && shouldShowFieldContextBadge(fieldContext);
|
||||
|
||||
return (
|
||||
<span className="field-variant-badges-container">
|
||||
{fieldDataType && (
|
||||
<span className="field-badge data-type">{fieldDataType}</span>
|
||||
)}
|
||||
{showFieldContext && (
|
||||
<section className={cx('field-badge type-tag', fieldContext)}>
|
||||
<div className="dot" />
|
||||
<span className="text">{fieldContext}</span>
|
||||
</section>
|
||||
)}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
FieldVariantBadges.defaultProps = {
|
||||
fieldDataType: undefined,
|
||||
fieldContext: undefined,
|
||||
};
|
||||
|
||||
export default FieldVariantBadges;
|
||||
@@ -0,0 +1,170 @@
|
||||
import { renderHook, RenderHookResult } from '@testing-library/react';
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import {
|
||||
mockAllAvailableKeys,
|
||||
mockConflictingFieldsByContext,
|
||||
mockConflictingFieldsByDatatype,
|
||||
} from 'container/OptionsMenu/__tests__/mockData';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { renderColumnHeader } from 'tests/columnHeaderHelpers';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
import { useTableView } from '../useTableView';
|
||||
|
||||
const COLUMN_UNDEFINED_ERROR = 'statusCodeColumn is undefined';
|
||||
const SERVICE_NAME_COLUMN_UNDEFINED_ERROR = 'serviceNameColumn is undefined';
|
||||
|
||||
// Mock useTimezone hook
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
useTimezone: (): {
|
||||
formatTimezoneAdjustedTimestamp: (input: string | number) => string;
|
||||
} => ({
|
||||
formatTimezoneAdjustedTimestamp: jest.fn((input: string | number): string => {
|
||||
if (typeof input === 'string') {
|
||||
return new Date(input).toISOString();
|
||||
}
|
||||
return new Date(input / 1e6).toISOString();
|
||||
}),
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock useIsDarkMode hook
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
describe('useTableView - Column Headers', () => {
|
||||
const HTTP_STATUS_CODE = 'http.status_code';
|
||||
|
||||
const mockLogs: ILog[] = [
|
||||
({
|
||||
id: '1',
|
||||
body: 'Test log',
|
||||
timestamp: '2024-01-01T00:00:00Z',
|
||||
[HTTP_STATUS_CODE]: '200',
|
||||
} as unknown) as ILog,
|
||||
];
|
||||
|
||||
const renderUseTableView = (
|
||||
fields: TelemetryFieldKey[],
|
||||
allAvailableKeys = mockAllAvailableKeys,
|
||||
): RenderHookResult<ReturnType<typeof useTableView>, unknown> =>
|
||||
renderHook(() =>
|
||||
useTableView({
|
||||
logs: mockLogs,
|
||||
fields: fields as IField[],
|
||||
linesPerRow: 1,
|
||||
fontSize: FontSize.SMALL,
|
||||
allAvailableKeys,
|
||||
}),
|
||||
);
|
||||
|
||||
it('shows datatype in column header for conflicting columns', () => {
|
||||
const fields: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // string variant
|
||||
];
|
||||
|
||||
const { result } = renderUseTableView(fields);
|
||||
const { columns } = result.current;
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col): col is ColumnType<Record<string, unknown>> =>
|
||||
'dataIndex' in col && col.dataIndex === HTTP_STATUS_CODE,
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
expect(statusCodeColumn?.title).toBeDefined();
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
expect(container.textContent).toContain('http.status_code (string)');
|
||||
expect(container.textContent).toContain('string');
|
||||
});
|
||||
|
||||
it('shows tooltip icon when unselected conflicting variant exists', () => {
|
||||
const fields: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // Only string variant selected
|
||||
];
|
||||
|
||||
const { result } = renderUseTableView(fields, mockAllAvailableKeys); // Contains number variant
|
||||
const { columns } = result.current;
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col): col is ColumnType<Record<string, unknown>> =>
|
||||
'dataIndex' in col && col.dataIndex === HTTP_STATUS_CODE,
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is set correctly
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBe(true);
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('hides tooltip icon when all conflicting variants are selected', () => {
|
||||
const fields: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByDatatype, // Both variants selected
|
||||
];
|
||||
|
||||
const { result } = renderUseTableView(fields);
|
||||
const { columns } = result.current;
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col): col is ColumnType<Record<string, unknown>> =>
|
||||
'dataIndex' in col && col.dataIndex === HTTP_STATUS_CODE,
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is NOT set when all variants are selected
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBeUndefined();
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows context in header for attribute/resource conflicting fields', () => {
|
||||
// When same datatype but different contexts, it shows context
|
||||
const fields: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByContext[0], // resource variant
|
||||
mockConflictingFieldsByContext[1], // attribute variant - both have same datatype
|
||||
];
|
||||
|
||||
const { result } = renderUseTableView(fields);
|
||||
const { columns } = result.current;
|
||||
|
||||
const serviceNameColumn = columns.find(
|
||||
(col): col is ColumnType<Record<string, unknown>> =>
|
||||
'dataIndex' in col && col.dataIndex === 'service.name',
|
||||
);
|
||||
|
||||
expect(serviceNameColumn).toBeDefined();
|
||||
|
||||
if (!serviceNameColumn) {
|
||||
throw new Error(SERVICE_NAME_COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(serviceNameColumn);
|
||||
expect(container.textContent).toContain('service.name (resource)');
|
||||
expect(container.textContent).toContain('resource');
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,5 @@
|
||||
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
@@ -28,6 +29,7 @@ export type UseTableViewProps = {
|
||||
activeLogIndex?: number;
|
||||
activeContextLog?: ILog | null;
|
||||
isListViewPanel?: boolean;
|
||||
allAvailableKeys?: TelemetryFieldKey[];
|
||||
} & LogsTableViewProps;
|
||||
|
||||
export type ActionsColumnProps = {
|
||||
|
||||
@@ -5,6 +5,12 @@ import { ColumnsType } from 'antd/es/table';
|
||||
import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
|
||||
import {
|
||||
getColumnTitleWithTooltip,
|
||||
getFieldVariantsByName,
|
||||
getUniqueColumnKey,
|
||||
hasMultipleVariants,
|
||||
} from 'container/OptionsMenu/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
@@ -31,6 +37,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
fontSize,
|
||||
appendTo = 'center',
|
||||
isListViewPanel,
|
||||
allAvailableKeys,
|
||||
} = props;
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -50,30 +57,50 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
);
|
||||
|
||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||
// Group fields by name to analyze variants
|
||||
const fieldVariantsByName = getFieldVariantsByName(fields);
|
||||
|
||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
||||
.filter((e) => !['id', 'body', 'timestamp'].includes(e.name))
|
||||
.map(({ name }) => ({
|
||||
title: name,
|
||||
dataIndex: name,
|
||||
accessorKey: name,
|
||||
id: name.toLowerCase().replace(/\./g, '_'),
|
||||
key: name,
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: isListViewPanel
|
||||
? defaultListViewPanelStyle
|
||||
: getDefaultCellStyle(isDarkMode),
|
||||
},
|
||||
children: (
|
||||
<Typography.Paragraph
|
||||
ellipsis={{ rows: linesPerRow }}
|
||||
className={cx('paragraph', fontSize)}
|
||||
>
|
||||
{field}
|
||||
</Typography.Paragraph>
|
||||
),
|
||||
}),
|
||||
}));
|
||||
.map((field) => {
|
||||
const hasVariants = hasMultipleVariants(
|
||||
field.name || '',
|
||||
fields,
|
||||
allAvailableKeys,
|
||||
);
|
||||
const variants = fieldVariantsByName[field.name] || [];
|
||||
const { title, hasUnselectedConflict } = getColumnTitleWithTooltip(
|
||||
field,
|
||||
hasVariants,
|
||||
variants,
|
||||
fields,
|
||||
allAvailableKeys,
|
||||
);
|
||||
return {
|
||||
title,
|
||||
dataIndex: field.name,
|
||||
accessorKey: field.name,
|
||||
id: getUniqueColumnKey(field),
|
||||
key: getUniqueColumnKey(field),
|
||||
// Store metadata for header enhancement (will be rendered via custom header component)
|
||||
...(hasUnselectedConflict && { _hasUnselectedConflict: true }),
|
||||
render: (cellField): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: isListViewPanel
|
||||
? defaultListViewPanelStyle
|
||||
: getDefaultCellStyle(isDarkMode),
|
||||
},
|
||||
children: (
|
||||
<Typography.Paragraph
|
||||
ellipsis={{ rows: linesPerRow }}
|
||||
className={cx('paragraph', fontSize)}
|
||||
>
|
||||
{cellField}
|
||||
</Typography.Paragraph>
|
||||
),
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
if (isListViewPanel) {
|
||||
return [...fieldColumns];
|
||||
@@ -177,6 +204,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
fontSize,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
bodyColumnStyle,
|
||||
allAvailableKeys,
|
||||
]);
|
||||
|
||||
return { columns, dataSource: flattenLogData };
|
||||
|
||||
@@ -314,6 +314,23 @@
|
||||
background-color: var(--bg-ink-200);
|
||||
cursor: pointer;
|
||||
}
|
||||
.name-wrapper {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
min-width: 0;
|
||||
.name {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
@@ -402,12 +419,20 @@
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.name {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
.name-wrapper {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: calc(100% - 26px);
|
||||
gap: 8px;
|
||||
min-width: 0;
|
||||
.name {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
@@ -6,8 +6,14 @@ import './LogsFormatOptionsMenu.styles.scss';
|
||||
import { Button, Input, InputNumber, Popover, Tooltip, Typography } from 'antd';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
import cx from 'classnames';
|
||||
import FieldVariantBadges from 'components/FieldVariantBadges/FieldVariantBadges';
|
||||
import { LogViewMode } from 'container/LogsTable';
|
||||
import { FontSize, OptionsMenuConfig } from 'container/OptionsMenu/types';
|
||||
import {
|
||||
getNamesWithVariants,
|
||||
getUniqueColumnKey,
|
||||
hasMultipleVariants,
|
||||
} from 'container/OptionsMenu/utils';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import {
|
||||
Check,
|
||||
@@ -26,6 +32,7 @@ interface LogsFormatOptionsMenuProps {
|
||||
config: OptionsMenuConfig;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function OptionsMenu({
|
||||
items,
|
||||
selectedOptionFormat,
|
||||
@@ -50,6 +57,11 @@ function OptionsMenu({
|
||||
const listRef = useRef<HTMLDivElement>(null);
|
||||
const initialMouseEnterRef = useRef<boolean>(false);
|
||||
|
||||
// Detect which column names have multiple variants in dropdown options
|
||||
const namesWithVariantsInOptions = getNamesWithVariants(
|
||||
addColumn?.options || [],
|
||||
);
|
||||
|
||||
const onChange = useCallback(
|
||||
(key: LogViewMode) => {
|
||||
if (!format) return;
|
||||
@@ -301,33 +313,46 @@ function OptionsMenu({
|
||||
)}
|
||||
|
||||
<div className="column-format-new-options" ref={listRef}>
|
||||
{addColumn?.options?.map(({ label, value }, index) => (
|
||||
<div
|
||||
className={cx('column-name', value === selectedValue && 'selected')}
|
||||
key={value}
|
||||
onMouseEnter={(): void => {
|
||||
if (!initialMouseEnterRef.current) {
|
||||
setSelectedValue(value as string | null);
|
||||
}
|
||||
{addColumn?.options?.map((option, index) => {
|
||||
const { label, value, fieldDataType, fieldContext } = option;
|
||||
return (
|
||||
<div
|
||||
className={cx('column-name', value === selectedValue && 'selected')}
|
||||
key={value}
|
||||
onMouseEnter={(): void => {
|
||||
if (!initialMouseEnterRef.current) {
|
||||
setSelectedValue(value as string | null);
|
||||
}
|
||||
|
||||
initialMouseEnterRef.current = true;
|
||||
}}
|
||||
onMouseMove={(): void => {
|
||||
// this is added to handle the mouse move explicit event and not the re-rendered on mouse enter event
|
||||
setSelectedValue(value as string | null);
|
||||
}}
|
||||
onClick={(eve): void => {
|
||||
eve.stopPropagation();
|
||||
handleColumnSelection(index, addColumn?.options || []);
|
||||
}}
|
||||
>
|
||||
<div className="name">
|
||||
<Tooltip placement="left" title={label}>
|
||||
{label}
|
||||
</Tooltip>
|
||||
initialMouseEnterRef.current = true;
|
||||
}}
|
||||
onMouseMove={(): void => {
|
||||
// this is added to handle the mouse move explicit event and not the re-rendered on mouse enter event
|
||||
setSelectedValue(value as string | null);
|
||||
}}
|
||||
onClick={(eve): void => {
|
||||
eve.stopPropagation();
|
||||
handleColumnSelection(index, addColumn?.options || []);
|
||||
}}
|
||||
>
|
||||
<div className="name-wrapper">
|
||||
<Tooltip placement="left" title={label}>
|
||||
<span className="name">{label}</span>
|
||||
</Tooltip>
|
||||
{fieldDataType &&
|
||||
typeof label === 'string' &&
|
||||
namesWithVariantsInOptions.has(label) && (
|
||||
<span className="field-variant-badges">
|
||||
<FieldVariantBadges
|
||||
fieldDataType={fieldDataType}
|
||||
fieldContext={fieldContext}
|
||||
/>
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -416,22 +441,38 @@ function OptionsMenu({
|
||||
)}
|
||||
|
||||
<div className="column-format">
|
||||
{addColumn?.value?.map(({ name }) => (
|
||||
<div className="column-name" key={name}>
|
||||
<div className="name">
|
||||
<Tooltip placement="left" title={name}>
|
||||
{name}
|
||||
{addColumn?.value?.map((column) => {
|
||||
const uniqueKey = getUniqueColumnKey(column);
|
||||
const showBadge = hasMultipleVariants(
|
||||
column.name || '',
|
||||
addColumn?.value || [],
|
||||
addColumn?.allAvailableKeys,
|
||||
);
|
||||
return (
|
||||
<div className="column-name" key={uniqueKey}>
|
||||
<Tooltip placement="left" title={column.name}>
|
||||
<div className="name-wrapper">
|
||||
<span className="name">{column.name}</span>
|
||||
{showBadge && (
|
||||
<span className="field-variant-badges">
|
||||
<FieldVariantBadges
|
||||
fieldDataType={column.fieldDataType}
|
||||
fieldContext={column.fieldContext}
|
||||
/>
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</Tooltip>
|
||||
{addColumn?.value?.length > 1 && (
|
||||
<X
|
||||
className="delete-btn"
|
||||
size={14}
|
||||
onClick={(): void => addColumn.onRemove(uniqueKey)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
{addColumn?.value?.length > 1 && (
|
||||
<X
|
||||
className="delete-btn"
|
||||
size={14}
|
||||
onClick={(): void => addColumn.onRemove(name)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
{addColumn && addColumn?.value?.length === 0 && (
|
||||
<div className="column-name no-columns-selected">
|
||||
No columns selected
|
||||
|
||||
@@ -0,0 +1,198 @@
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import {
|
||||
mockAllAvailableKeys,
|
||||
mockConflictingFieldsByContext,
|
||||
mockConflictingFieldsByDatatype,
|
||||
} from 'container/OptionsMenu/__tests__/mockData';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { getOptionsFromKeys } from 'container/OptionsMenu/utils';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import LogsFormatOptionsMenu from '../LogsFormatOptionsMenu';
|
||||
|
||||
const mockUpdateFormatting = jest.fn();
|
||||
const mockUpdateColumns = jest.fn();
|
||||
|
||||
jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
|
||||
usePreferenceSync: (): any => ({
|
||||
preferences: {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
},
|
||||
},
|
||||
loading: false,
|
||||
error: null,
|
||||
updateColumns: mockUpdateColumns,
|
||||
updateFormatting: mockUpdateFormatting,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('LogsFormatOptionsMenu - Badge Display', () => {
|
||||
const FORMAT_BUTTON_TEST_ID = 'periscope-btn-format-options';
|
||||
const HTTP_STATUS_CODE = 'http.status_code';
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
function setup(configOverrides = {}): any {
|
||||
const items = [
|
||||
{ key: 'raw', label: 'Raw', data: { title: 'max lines per row' } },
|
||||
{ key: 'list', label: 'Default' },
|
||||
{ key: 'table', label: 'Column', data: { title: 'columns' } },
|
||||
];
|
||||
|
||||
const formatOnChange = jest.fn();
|
||||
const maxLinesOnChange = jest.fn();
|
||||
const fontSizeOnChange = jest.fn();
|
||||
const onSelect = jest.fn();
|
||||
const onRemove = jest.fn();
|
||||
const onSearch = jest.fn();
|
||||
const onFocus = jest.fn();
|
||||
const onBlur = jest.fn();
|
||||
|
||||
const defaultConfig = {
|
||||
format: { value: 'table', onChange: formatOnChange },
|
||||
maxLines: { value: 2, onChange: maxLinesOnChange },
|
||||
fontSize: { value: FontSize.SMALL, onChange: fontSizeOnChange },
|
||||
addColumn: {
|
||||
isFetching: false,
|
||||
value: [],
|
||||
options: [],
|
||||
onFocus,
|
||||
onBlur,
|
||||
onSearch,
|
||||
onSelect,
|
||||
onRemove,
|
||||
allAvailableKeys: mockAllAvailableKeys,
|
||||
...configOverrides,
|
||||
},
|
||||
};
|
||||
|
||||
const { getByTestId } = render(
|
||||
<LogsFormatOptionsMenu
|
||||
items={items}
|
||||
selectedOptionFormat="table"
|
||||
config={defaultConfig}
|
||||
/>,
|
||||
);
|
||||
|
||||
return {
|
||||
getByTestId,
|
||||
formatOnChange,
|
||||
maxLinesOnChange,
|
||||
fontSizeOnChange,
|
||||
onSelect,
|
||||
onRemove,
|
||||
onSearch,
|
||||
onFocus,
|
||||
onBlur,
|
||||
};
|
||||
}
|
||||
|
||||
it('shows badges in dropdown options when searching for conflicting attributes', () => {
|
||||
const options = getOptionsFromKeys(mockConflictingFieldsByDatatype, []);
|
||||
|
||||
expect(options).toBeDefined();
|
||||
expect(options).toHaveLength(2);
|
||||
expect(options?.[0]?.hasMultipleVariants).toBe(true);
|
||||
expect(options?.[1]?.hasMultipleVariants).toBe(true);
|
||||
expect(options?.[0]?.fieldDataType).toBe('string');
|
||||
expect(options?.[1]?.fieldDataType).toBe('number');
|
||||
});
|
||||
|
||||
it('shows badges in selected columns list after selecting conflicting attribute', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // Only string variant selected
|
||||
];
|
||||
|
||||
const { getByTestId } = setup({
|
||||
value: selectedColumns,
|
||||
});
|
||||
|
||||
// Open the popover menu
|
||||
const formatButton = getByTestId(FORMAT_BUTTON_TEST_ID);
|
||||
await user.click(formatButton);
|
||||
|
||||
// Wait for selected columns section to appear
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(HTTP_STATUS_CODE)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Badge should appear even though only one variant is selected
|
||||
// because allAvailableKeys contains the conflicting variant
|
||||
const datatypeBadge = screen.queryByText('string');
|
||||
expect(datatypeBadge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows context badge only for attribute/resource conflicting fields', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByContext[0], // resource variant
|
||||
];
|
||||
|
||||
const { getByTestId } = setup({
|
||||
value: selectedColumns,
|
||||
});
|
||||
|
||||
// Open the popover menu
|
||||
const formatButton = getByTestId(FORMAT_BUTTON_TEST_ID);
|
||||
await user.click(formatButton);
|
||||
|
||||
// Wait for selected columns section
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('service.name')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Context badge should appear for resource
|
||||
const contextBadge = screen.queryByText('resource');
|
||||
expect(contextBadge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows datatype badge for conflicting fields', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
{
|
||||
name: HTTP_STATUS_CODE,
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span', // span context
|
||||
signal: 'traces',
|
||||
},
|
||||
];
|
||||
|
||||
const { getByTestId } = setup({
|
||||
value: selectedColumns,
|
||||
allAvailableKeys: [
|
||||
...mockAllAvailableKeys,
|
||||
{
|
||||
name: HTTP_STATUS_CODE,
|
||||
fieldDataType: 'number',
|
||||
fieldContext: 'span',
|
||||
signal: 'traces',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Open the popover menu
|
||||
const formatButton = getByTestId(FORMAT_BUTTON_TEST_ID);
|
||||
await user.click(formatButton);
|
||||
|
||||
// Wait for selected columns section
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(HTTP_STATUS_CODE)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Datatype badge should appear
|
||||
const datatypeBadge = screen.queryByText('string');
|
||||
expect(datatypeBadge).toBeInTheDocument();
|
||||
|
||||
// Context badge should NOT appear for span context
|
||||
const contextBadge = screen.queryByText('span');
|
||||
expect(contextBadge).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,13 +1,18 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
|
||||
import { Table } from 'antd';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Table, Tooltip } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import cx from 'classnames';
|
||||
import {
|
||||
ColumnTitleIcon,
|
||||
ColumnTitleWrapper,
|
||||
} from 'container/OptionsMenu/styles';
|
||||
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
||||
import { getColumnWidth, RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { debounce, set } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import {
|
||||
import React, {
|
||||
SyntheticEvent,
|
||||
useCallback,
|
||||
useEffect,
|
||||
@@ -71,20 +76,48 @@ function ResizeTable({
|
||||
|
||||
const mergedColumns = useMemo(
|
||||
() =>
|
||||
columnsData.map((col, index) => ({
|
||||
...col,
|
||||
...(onDragColumn && {
|
||||
title: (
|
||||
<DragSpanStyle className="dragHandler">
|
||||
{col?.title?.toString() || ''}
|
||||
</DragSpanStyle>
|
||||
),
|
||||
}),
|
||||
onHeaderCell: (column: ColumnsType<unknown>[number]): unknown => ({
|
||||
width: column.width,
|
||||
onResize: handleResize(index),
|
||||
}),
|
||||
})) as ColumnsType<any>,
|
||||
columnsData.map((col, index) => {
|
||||
const columnRecord = col as Record<string, unknown>;
|
||||
const hasUnselectedConflict = columnRecord._hasUnselectedConflict === true;
|
||||
const titleText = col?.title?.toString();
|
||||
|
||||
// Render tooltip icon when there's a conflict, regardless of drag functionality
|
||||
// Only wrap in DragSpanStyle when drag is enabled
|
||||
const tooltipIcon = hasUnselectedConflict ? (
|
||||
<Tooltip title="The same column with a different type or context exists">
|
||||
<ColumnTitleIcon>
|
||||
<InfoCircleOutlined />
|
||||
</ColumnTitleIcon>
|
||||
</Tooltip>
|
||||
) : null;
|
||||
|
||||
const titleWithWrapper = (
|
||||
<ColumnTitleWrapper>
|
||||
{titleText}
|
||||
{tooltipIcon}
|
||||
</ColumnTitleWrapper>
|
||||
);
|
||||
|
||||
let titleElement: React.ReactNode = titleText;
|
||||
if (hasUnselectedConflict || onDragColumn) {
|
||||
if (onDragColumn) {
|
||||
titleElement = (
|
||||
<DragSpanStyle className="dragHandler">{titleWithWrapper}</DragSpanStyle>
|
||||
);
|
||||
} else {
|
||||
titleElement = titleWithWrapper;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...col,
|
||||
title: titleElement,
|
||||
onHeaderCell: (column: ColumnsType<unknown>[number]): unknown => ({
|
||||
width: column.width,
|
||||
onResize: handleResize(index),
|
||||
}),
|
||||
};
|
||||
}) as ColumnsType<RowData>,
|
||||
[columnsData, onDragColumn, handleResize],
|
||||
);
|
||||
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Tooltip } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import { getLogIndicatorType } from 'components/Logs/LogStateIndicator/utils';
|
||||
import { useTableView } from 'components/Logs/TableView/useTableView';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import {
|
||||
ColumnTitleIcon,
|
||||
ColumnTitleWrapper,
|
||||
} from 'container/OptionsMenu/styles';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
@@ -127,6 +133,12 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
.filter((column) => column.key)
|
||||
.map((column) => {
|
||||
const isDragColumn = column.key !== 'expand';
|
||||
const columnRecord = column as Record<string, unknown>;
|
||||
const hasUnselectedConflict =
|
||||
columnRecord._hasUnselectedConflict === true;
|
||||
const titleText = (column.title as string).replace(/^\w/, (c) =>
|
||||
c.toUpperCase(),
|
||||
);
|
||||
|
||||
return (
|
||||
<TableHeaderCellStyled
|
||||
@@ -139,7 +151,16 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
{...(isDragColumn && { className: `dragHandler ${column.key}` })}
|
||||
columnKey={column.key as string}
|
||||
>
|
||||
{(column.title as string).replace(/^\w/, (c) => c.toUpperCase())}
|
||||
<ColumnTitleWrapper>
|
||||
{titleText}
|
||||
{hasUnselectedConflict && (
|
||||
<Tooltip title="The same column with a different type or context exists">
|
||||
<ColumnTitleIcon>
|
||||
<InfoCircleOutlined />
|
||||
</ColumnTitleIcon>
|
||||
</Tooltip>
|
||||
)}
|
||||
</ColumnTitleWrapper>
|
||||
</TableHeaderCellStyled>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -60,7 +60,7 @@ function LogsExplorerList({
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
const { options, config } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator:
|
||||
@@ -147,6 +147,7 @@ function LogsExplorerList({
|
||||
fontSize: options.fontSize,
|
||||
appendTo: 'end',
|
||||
activeLogIndex,
|
||||
allAvailableKeys: config.addColumn?.allAvailableKeys,
|
||||
}}
|
||||
infitiyTableProps={{ onEndReached }}
|
||||
/>
|
||||
@@ -195,6 +196,7 @@ function LogsExplorerList({
|
||||
onEndReached,
|
||||
getItemContent,
|
||||
selectedFields,
|
||||
config.addColumn?.allAvailableKeys,
|
||||
]);
|
||||
|
||||
const isTraceToLogsNavigation = useMemo(() => {
|
||||
|
||||
@@ -7,9 +7,11 @@ import { ResizeTable } from 'components/ResizeTable';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import Controls from 'container/Controls';
|
||||
import { extractTelemetryFieldKeys } from 'container/OptionsMenu/utils';
|
||||
import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs';
|
||||
import { tableStyles } from 'container/TracesExplorer/ListView/styles';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKeySuggestions';
|
||||
import { useLogsData } from 'hooks/useLogsData';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
@@ -27,6 +29,7 @@ import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataSource, LogsAggregatorOperator } from 'types/common/queryBuilder';
|
||||
|
||||
import { getLogPanelColumnsList } from './utils';
|
||||
|
||||
@@ -59,14 +62,31 @@ function LogsPanelComponent({
|
||||
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
|
||||
// Fetch available keys to detect variants
|
||||
|
||||
const { data: keysData } = useGetQueryKeySuggestions(
|
||||
{
|
||||
searchText: '',
|
||||
signal: DataSource.LOGS,
|
||||
},
|
||||
{
|
||||
queryKey: [DataSource.LOGS, LogsAggregatorOperator.NOOP, ''],
|
||||
},
|
||||
);
|
||||
|
||||
// Extract all available keys from API response
|
||||
const allAvailableKeys = useMemo(() => extractTelemetryFieldKeys(keysData), [
|
||||
keysData,
|
||||
]);
|
||||
|
||||
const columns = useMemo(
|
||||
() =>
|
||||
getLogPanelColumnsList(
|
||||
widget.selectedLogFields,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
allAvailableKeys,
|
||||
),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[widget.selectedLogFields],
|
||||
[widget.selectedLogFields, formatTimezoneAdjustedTimestamp, allAvailableKeys],
|
||||
);
|
||||
|
||||
const dataLength =
|
||||
|
||||
@@ -0,0 +1,107 @@
|
||||
import { mockAllAvailableKeys } from 'container/OptionsMenu/__tests__/mockData';
|
||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
||||
import { renderColumnHeader } from 'tests/columnHeaderHelpers';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
|
||||
import { getLogPanelColumnsList } from '../utils';
|
||||
|
||||
const COLUMN_UNDEFINED_ERROR = 'statusCodeColumn is undefined';
|
||||
|
||||
// Mock the timezone formatter
|
||||
const mockFormatTimezoneAdjustedTimestamp = jest.fn(
|
||||
(input: TimestampInput): string => {
|
||||
if (typeof input === 'string') {
|
||||
return new Date(input).toISOString();
|
||||
}
|
||||
if (typeof input === 'number') {
|
||||
return new Date(input / 1e6).toISOString();
|
||||
}
|
||||
return new Date(input).toISOString();
|
||||
},
|
||||
);
|
||||
|
||||
describe('getLogPanelColumnsList - Column Headers', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('shows tooltip icon when conflicting variant exists in allAvailableKeys', () => {
|
||||
// Even with single variant selected, tooltip should appear if conflicting variant exists
|
||||
const selectedLogFields: IField[] = [
|
||||
{
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
name: 'http.status_code',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
} as IField,
|
||||
];
|
||||
|
||||
const columns = getLogPanelColumnsList(
|
||||
selectedLogFields,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys, // Contains number variant
|
||||
);
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col) => 'dataIndex' in col && col.dataIndex === 'http.status_code',
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
expect(statusCodeColumn?.title).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is set correctly
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBe(true);
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
expect(container.textContent).toContain('http.status_code (string)');
|
||||
|
||||
// Tooltip icon should appear
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('hides tooltip icon when all conflicting variants are selected', () => {
|
||||
const selectedLogFields: IField[] = [
|
||||
{
|
||||
name: 'http.status_code',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
} as IField,
|
||||
{
|
||||
name: 'http.status_code',
|
||||
dataType: 'number',
|
||||
type: 'attribute',
|
||||
} as IField,
|
||||
];
|
||||
|
||||
const columns = getLogPanelColumnsList(
|
||||
selectedLogFields,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col) => 'dataIndex' in col && col.dataIndex === 'http.status_code',
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is NOT set when all variants are selected
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBeUndefined();
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,12 @@
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import { Typography } from 'antd/lib';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import {
|
||||
getColumnTitleWithTooltip,
|
||||
getFieldVariantsByName,
|
||||
getUniqueColumnKey,
|
||||
hasMultipleVariants,
|
||||
} from 'container/OptionsMenu/utils';
|
||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
||||
// import Typography from 'antd/es/typography/Typography';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
@@ -13,17 +20,35 @@ export const getLogPanelColumnsList = (
|
||||
input: TimestampInput,
|
||||
format?: string,
|
||||
) => string,
|
||||
allAvailableKeys?: TelemetryFieldKey[],
|
||||
): ColumnsType<RowData> => {
|
||||
const initialColumns: ColumnsType<RowData> = [];
|
||||
|
||||
// Group fields by name to analyze variants
|
||||
const fieldVariantsByName = getFieldVariantsByName(selectedLogFields || []);
|
||||
|
||||
const columns: ColumnsType<RowData> =
|
||||
selectedLogFields?.map((field: IField) => {
|
||||
const { name } = field;
|
||||
const hasVariants = hasMultipleVariants(
|
||||
name,
|
||||
selectedLogFields || [],
|
||||
allAvailableKeys,
|
||||
);
|
||||
const variants = fieldVariantsByName[name] || [];
|
||||
const { title, hasUnselectedConflict } = getColumnTitleWithTooltip(
|
||||
field,
|
||||
hasVariants,
|
||||
variants,
|
||||
selectedLogFields || [],
|
||||
allAvailableKeys,
|
||||
);
|
||||
|
||||
return {
|
||||
title: name,
|
||||
title,
|
||||
dataIndex: name,
|
||||
key: name,
|
||||
key: getUniqueColumnKey(field),
|
||||
...(hasUnselectedConflict && { _hasUnselectedConflict: true }),
|
||||
width: name === 'body' ? 350 : 100,
|
||||
render: (value: ReactNode): JSX.Element => {
|
||||
if (name === 'timestamp') {
|
||||
|
||||
@@ -1,16 +1,27 @@
|
||||
import { Checkbox, Empty } from 'antd';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import FieldVariantBadges from 'components/FieldVariantBadges/FieldVariantBadges';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { EXCLUDED_COLUMNS } from 'container/OptionsMenu/constants';
|
||||
import { QueryKeySuggestionsResponseProps } from 'types/api/querySuggestions/types';
|
||||
import {
|
||||
getUniqueColumnKey,
|
||||
getVariantCounts,
|
||||
} from 'container/OptionsMenu/utils';
|
||||
import {
|
||||
QueryKeyDataSuggestionsProps,
|
||||
QueryKeySuggestionsResponseProps,
|
||||
} from 'types/api/querySuggestions/types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
type ExplorerAttributeColumnsProps = {
|
||||
isLoading: boolean;
|
||||
data: AxiosResponse<QueryKeySuggestionsResponseProps> | undefined;
|
||||
searchText: string;
|
||||
isAttributeKeySelected: (key: string) => boolean;
|
||||
handleCheckboxChange: (key: string) => void;
|
||||
isAttributeKeySelected: (
|
||||
attributeKey: QueryKeyDataSuggestionsProps,
|
||||
) => boolean;
|
||||
handleCheckboxChange: (attributeKey: QueryKeyDataSuggestionsProps) => void;
|
||||
dataSource: DataSource;
|
||||
};
|
||||
|
||||
@@ -38,6 +49,12 @@ function ExplorerAttributeColumns({
|
||||
attributeKey.name.toLowerCase().includes(searchText.toLowerCase()) &&
|
||||
!EXCLUDED_COLUMNS[dataSource].includes(attributeKey.name),
|
||||
) || [];
|
||||
|
||||
// Detect which column names have multiple variants
|
||||
const nameCounts = getVariantCounts(
|
||||
filteredAttributeKeys as TelemetryFieldKey[],
|
||||
);
|
||||
|
||||
if (filteredAttributeKeys.length === 0) {
|
||||
return (
|
||||
<div className="attribute-columns">
|
||||
@@ -48,16 +65,26 @@ function ExplorerAttributeColumns({
|
||||
|
||||
return (
|
||||
<div className="attribute-columns">
|
||||
{filteredAttributeKeys.map((attributeKey: any) => (
|
||||
<Checkbox
|
||||
checked={isAttributeKeySelected(attributeKey.name)}
|
||||
onChange={(): void => handleCheckboxChange(attributeKey.name)}
|
||||
style={{ padding: 0 }}
|
||||
key={attributeKey.name}
|
||||
>
|
||||
{attributeKey.name}
|
||||
</Checkbox>
|
||||
))}
|
||||
{filteredAttributeKeys.map((attributeKey) => {
|
||||
const hasVariants = nameCounts[attributeKey.name] > 1;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={isAttributeKeySelected(attributeKey)}
|
||||
onChange={(): void => handleCheckboxChange(attributeKey)}
|
||||
key={getUniqueColumnKey(attributeKey)}
|
||||
>
|
||||
<span className="attribute-column-label-wrapper">
|
||||
<span>{attributeKey.name}</span>
|
||||
{hasVariants && (
|
||||
<FieldVariantBadges
|
||||
fieldDataType={attributeKey.fieldDataType}
|
||||
fieldContext={attributeKey.fieldContext}
|
||||
/>
|
||||
)}
|
||||
</span>
|
||||
</Checkbox>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -60,6 +60,13 @@
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
cursor: grab;
|
||||
|
||||
.column-name-wrapper,
|
||||
.badges-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
.lucide-trash2 {
|
||||
@@ -114,6 +121,16 @@
|
||||
flex-direction: column;
|
||||
height: 160px;
|
||||
overflow: scroll;
|
||||
|
||||
.ant-checkbox-wrapper {
|
||||
padding: 0 !important;
|
||||
|
||||
.attribute-column-label-wrapper {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-columns::-webkit-scrollbar {
|
||||
|
||||
@@ -6,8 +6,13 @@ import './ExplorerColumnsRenderer.styles.scss';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Divider, Dropdown, Input, Tooltip, Typography } from 'antd';
|
||||
import { MenuProps } from 'antd/lib';
|
||||
import { FieldDataType } from 'api/v5/v5';
|
||||
import { FieldDataType, TelemetryFieldKey } from 'api/v5/v5';
|
||||
import FieldVariantBadges from 'components/FieldVariantBadges/FieldVariantBadges';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import {
|
||||
getUniqueColumnKey,
|
||||
getVariantCounts,
|
||||
} from 'container/OptionsMenu/utils';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKeySuggestions';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
@@ -26,6 +31,7 @@ import {
|
||||
Droppable,
|
||||
DropResult,
|
||||
} from 'react-beautiful-dnd';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { WidgetGraphProps } from '../types';
|
||||
@@ -82,64 +88,87 @@ function ExplorerColumnsRenderer({
|
||||
},
|
||||
);
|
||||
|
||||
const isAttributeKeySelected = (key: string): boolean => {
|
||||
const isAttributeKeySelected = (attribute: any): boolean => {
|
||||
const uniqueKey = getUniqueColumnKey(attribute);
|
||||
|
||||
if (initialDataSource === DataSource.LOGS && selectedLogFields) {
|
||||
return selectedLogFields.some((field) => field.name === key);
|
||||
return selectedLogFields.some(
|
||||
(field) => getUniqueColumnKey(field) === uniqueKey,
|
||||
);
|
||||
}
|
||||
if (initialDataSource === DataSource.TRACES && selectedTracesFields) {
|
||||
return selectedTracesFields.some((field) => field.name === key);
|
||||
return selectedTracesFields.some(
|
||||
(field) => getUniqueColumnKey(field) === uniqueKey,
|
||||
);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const handleCheckboxChange = (key: string): void => {
|
||||
const handleCheckboxChange = (attribute: any): void => {
|
||||
const uniqueKey = getUniqueColumnKey(attribute);
|
||||
|
||||
if (
|
||||
initialDataSource === DataSource.LOGS &&
|
||||
setSelectedLogFields !== undefined
|
||||
) {
|
||||
if (selectedLogFields) {
|
||||
if (isAttributeKeySelected(key)) {
|
||||
if (isAttributeKeySelected(attribute)) {
|
||||
setSelectedLogFields(
|
||||
selectedLogFields.filter((field) => field.name !== key),
|
||||
selectedLogFields.filter(
|
||||
(field) => getUniqueColumnKey(field) !== uniqueKey,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
setSelectedLogFields([
|
||||
...selectedLogFields,
|
||||
{ dataType: 'string', name: key, type: '' },
|
||||
{
|
||||
name: attribute.name,
|
||||
dataType: attribute.fieldDataType || 'string',
|
||||
type: attribute.fieldContext || '',
|
||||
fieldDataType: attribute.fieldDataType || 'string',
|
||||
fieldContext: attribute.fieldContext || '',
|
||||
} as IField & { fieldDataType: string; fieldContext: string },
|
||||
]);
|
||||
}
|
||||
} else {
|
||||
setSelectedLogFields([{ dataType: 'string', name: key, type: '' }]);
|
||||
setSelectedLogFields([
|
||||
{
|
||||
name: attribute.name,
|
||||
dataType: attribute.fieldDataType || 'string',
|
||||
type: attribute.fieldContext || '',
|
||||
fieldDataType: attribute.fieldDataType || 'string',
|
||||
fieldContext: attribute.fieldContext || '',
|
||||
} as IField & { fieldDataType: string; fieldContext: string },
|
||||
]);
|
||||
}
|
||||
} else if (
|
||||
initialDataSource === DataSource.TRACES &&
|
||||
setSelectedTracesFields !== undefined
|
||||
) {
|
||||
const selectedField = Object.values(data?.data?.data?.keys || {})
|
||||
?.flat()
|
||||
?.find((attributeKey) => attributeKey.name === key);
|
||||
|
||||
if (selectedTracesFields) {
|
||||
if (isAttributeKeySelected(key)) {
|
||||
if (isAttributeKeySelected(attribute)) {
|
||||
setSelectedTracesFields(
|
||||
selectedTracesFields.filter((field) => field.name !== key),
|
||||
selectedTracesFields.filter(
|
||||
(field) => getUniqueColumnKey(field) !== uniqueKey,
|
||||
),
|
||||
);
|
||||
} else if (selectedField) {
|
||||
} else {
|
||||
setSelectedTracesFields([
|
||||
...selectedTracesFields,
|
||||
{
|
||||
...selectedField,
|
||||
fieldDataType: selectedField.fieldDataType as FieldDataType,
|
||||
...attribute,
|
||||
fieldDataType: attribute.fieldDataType as FieldDataType,
|
||||
},
|
||||
]);
|
||||
}
|
||||
} else if (selectedField)
|
||||
} else {
|
||||
setSelectedTracesFields([
|
||||
{
|
||||
...selectedField,
|
||||
fieldDataType: selectedField.fieldDataType as FieldDataType,
|
||||
...attribute,
|
||||
fieldDataType: attribute.fieldDataType as FieldDataType,
|
||||
},
|
||||
]);
|
||||
}
|
||||
}
|
||||
setOpen(false);
|
||||
};
|
||||
@@ -189,14 +218,18 @@ function ExplorerColumnsRenderer({
|
||||
},
|
||||
];
|
||||
|
||||
const removeSelectedLogField = (name: string): void => {
|
||||
const removeSelectedLogField = (field: any): void => {
|
||||
const uniqueKey = getUniqueColumnKey(field);
|
||||
|
||||
if (
|
||||
initialDataSource === DataSource.LOGS &&
|
||||
setSelectedLogFields &&
|
||||
selectedLogFields
|
||||
) {
|
||||
setSelectedLogFields(
|
||||
selectedLogFields.filter((field) => field.name !== name),
|
||||
selectedLogFields.filter(
|
||||
(field) => getUniqueColumnKey(field) !== uniqueKey,
|
||||
),
|
||||
);
|
||||
}
|
||||
if (
|
||||
@@ -205,7 +238,9 @@ function ExplorerColumnsRenderer({
|
||||
selectedTracesFields
|
||||
) {
|
||||
setSelectedTracesFields(
|
||||
selectedTracesFields.filter((field) => field.name !== name),
|
||||
selectedTracesFields.filter(
|
||||
(field) => getUniqueColumnKey(field) !== uniqueKey,
|
||||
),
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -248,6 +283,11 @@ function ExplorerColumnsRenderer({
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
// Detect which column names have multiple variants from API data
|
||||
const allAttributeKeys =
|
||||
Object.values(data?.data?.data?.keys || {})?.flat() || [];
|
||||
const nameCounts = getVariantCounts(allAttributeKeys as TelemetryFieldKey[]);
|
||||
|
||||
return (
|
||||
<div className="explorer-columns-renderer">
|
||||
<div className="title">
|
||||
@@ -271,7 +311,7 @@ function ExplorerColumnsRenderer({
|
||||
>
|
||||
{initialDataSource === DataSource.LOGS &&
|
||||
selectedLogFields &&
|
||||
selectedLogFields.map((field, index) => (
|
||||
selectedLogFields.map((field: TelemetryFieldKey, index) => (
|
||||
// eslint-disable-next-line react/no-array-index-key
|
||||
<Draggable key={index} draggableId={index.toString()} index={index}>
|
||||
{(dragProvided): JSX.Element => (
|
||||
@@ -283,12 +323,22 @@ function ExplorerColumnsRenderer({
|
||||
>
|
||||
<div className="explorer-column-title">
|
||||
<GripVertical size={12} color="#5A5A5A" />
|
||||
{field.name}
|
||||
<span className="column-name-wrapper">
|
||||
{field.name}
|
||||
{nameCounts[field.name] > 1 && (
|
||||
<span className="badges-container">
|
||||
<FieldVariantBadges
|
||||
fieldDataType={field.fieldDataType}
|
||||
fieldContext={field.fieldContext}
|
||||
/>
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
<Trash2
|
||||
size={12}
|
||||
color="red"
|
||||
onClick={(): void => removeSelectedLogField(field.name)}
|
||||
onClick={(): void => removeSelectedLogField(field)}
|
||||
data-testid="trash-icon"
|
||||
/>
|
||||
</div>
|
||||
@@ -309,14 +359,22 @@ function ExplorerColumnsRenderer({
|
||||
>
|
||||
<div className="explorer-column-title">
|
||||
<GripVertical size={12} color="#5A5A5A" />
|
||||
{field?.name || (field as any)?.key}
|
||||
<span className="column-name-wrapper">
|
||||
{field?.name || field?.key}
|
||||
{nameCounts[field?.name || ''] > 1 && (
|
||||
<span className="badges-container">
|
||||
<FieldVariantBadges
|
||||
fieldDataType={field.fieldDataType}
|
||||
fieldContext={field.fieldContext}
|
||||
/>
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
<Trash2
|
||||
size={12}
|
||||
color="red"
|
||||
onClick={(): void =>
|
||||
removeSelectedLogField(field?.name || (field as any)?.key)
|
||||
}
|
||||
onClick={(): void => removeSelectedLogField(field)}
|
||||
data-testid="trash-icon"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -222,7 +222,13 @@ describe('ExplorerColumnsRenderer', () => {
|
||||
await userEvent.click(checkbox);
|
||||
|
||||
expect(mockSetSelectedLogFields).toHaveBeenCalledWith([
|
||||
{ dataType: 'string', name: 'attribute1', type: '' },
|
||||
{
|
||||
dataType: 'string',
|
||||
fieldContext: '',
|
||||
fieldDataType: 'string',
|
||||
name: 'attribute1',
|
||||
type: '',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -326,9 +332,21 @@ describe('ExplorerColumnsRenderer', () => {
|
||||
data: {
|
||||
data: {
|
||||
keys: {
|
||||
attributeKeys: [
|
||||
{ name: 'trace_attribute1', dataType: 'string', type: 'tag' },
|
||||
{ name: 'trace_attribute2', dataType: 'string', type: 'tag' },
|
||||
trace_attribute1: [
|
||||
{
|
||||
name: 'trace_attribute1',
|
||||
fieldDataType: DataTypes.String,
|
||||
fieldContext: '',
|
||||
signal: 'traces',
|
||||
},
|
||||
],
|
||||
trace_attribute2: [
|
||||
{
|
||||
name: 'trace_attribute2',
|
||||
fieldDataType: DataTypes.String,
|
||||
fieldContext: '',
|
||||
signal: 'traces',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -356,7 +374,12 @@ describe('ExplorerColumnsRenderer', () => {
|
||||
await userEvent.click(checkbox);
|
||||
|
||||
expect(mockSetSelectedTracesFields).toHaveBeenCalledWith([
|
||||
{ name: 'trace_attribute1', dataType: 'string', type: 'tag' },
|
||||
{
|
||||
name: 'trace_attribute1',
|
||||
fieldDataType: DataTypes.String,
|
||||
fieldContext: '',
|
||||
signal: 'traces',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
|
||||
import {
|
||||
mockAllAvailableKeys,
|
||||
mockConflictingFieldsByContext,
|
||||
mockConflictingFieldsByDatatype,
|
||||
mockNonConflictingField,
|
||||
} from '../../__tests__/mockData';
|
||||
import AddColumnField from '../index';
|
||||
|
||||
describe('AddColumnField - Badge Display', () => {
|
||||
const defaultConfig = {
|
||||
isFetching: false,
|
||||
options: [],
|
||||
value: [],
|
||||
onSelect: jest.fn(),
|
||||
onFocus: jest.fn(),
|
||||
onBlur: jest.fn(),
|
||||
onSearch: jest.fn(),
|
||||
onRemove: jest.fn(),
|
||||
allAvailableKeys: mockAllAvailableKeys,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('shows badge for single selected conflicting field (different datatype)', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // Only string variant selected
|
||||
];
|
||||
|
||||
render(
|
||||
<AddColumnField
|
||||
config={{
|
||||
...defaultConfig,
|
||||
value: selectedColumns,
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Badge should appear even though only one variant is selected
|
||||
// because allAvailableKeys contains the conflicting variant
|
||||
const badgeContainer = screen.queryByText('http.status_code')?.closest('div');
|
||||
expect(badgeContainer).toBeInTheDocument();
|
||||
|
||||
// Check for datatype badge
|
||||
const datatypeBadge = screen.queryByText('string');
|
||||
expect(datatypeBadge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows badges for multiple conflicting fields selected', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByDatatype, // Both string and number variants
|
||||
];
|
||||
|
||||
render(
|
||||
<AddColumnField
|
||||
config={{
|
||||
...defaultConfig,
|
||||
value: selectedColumns,
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Both variants should show badges
|
||||
const stringBadge = screen.getByText('string');
|
||||
const numberBadge = screen.getByText('number');
|
||||
expect(stringBadge).toBeInTheDocument();
|
||||
expect(numberBadge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows badges when all conflicting variants are selected', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByDatatype, // All variants selected
|
||||
];
|
||||
|
||||
render(
|
||||
<AddColumnField
|
||||
config={{
|
||||
...defaultConfig,
|
||||
value: selectedColumns,
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Both variants should appear as separate items in the list
|
||||
const fieldNames = screen.getAllByText('http.status_code');
|
||||
expect(fieldNames).toHaveLength(2); // One for each variant
|
||||
|
||||
// Badges should still be visible when all variants are selected
|
||||
const stringBadge = screen.getByText('string');
|
||||
const numberBadge = screen.getByText('number');
|
||||
expect(stringBadge).toBeInTheDocument();
|
||||
expect(numberBadge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show badge for non-conflicting field', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [...mockNonConflictingField];
|
||||
|
||||
render(
|
||||
<AddColumnField
|
||||
config={{
|
||||
...defaultConfig,
|
||||
value: selectedColumns,
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Field name should be visible
|
||||
expect(screen.getByText('trace_id')).toBeInTheDocument();
|
||||
|
||||
// But no badge should appear (no conflicting variants)
|
||||
const badgeContainer = document.querySelector(
|
||||
'.field-variant-badges-container',
|
||||
);
|
||||
expect(badgeContainer).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows context badge for attribute/resource conflicting fields', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByContext[0], // resource variant
|
||||
];
|
||||
|
||||
render(
|
||||
<AddColumnField
|
||||
config={{
|
||||
...defaultConfig,
|
||||
value: selectedColumns,
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Context badge should appear for resource
|
||||
const contextBadge = screen.queryByText('resource');
|
||||
expect(contextBadge).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,18 +1,39 @@
|
||||
import { SearchOutlined } from '@ant-design/icons';
|
||||
import { Input, Spin, Typography } from 'antd';
|
||||
import { Input, Spin } from 'antd';
|
||||
import { BaseOptionType } from 'antd/es/select';
|
||||
import FieldVariantBadges from 'components/FieldVariantBadges/FieldVariantBadges';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import { FieldTitle } from '../styles';
|
||||
import { OptionsMenuConfig } from '../types';
|
||||
import { getUniqueColumnKey, hasMultipleVariants } from '../utils';
|
||||
import {
|
||||
AddColumnItem,
|
||||
AddColumnSelect,
|
||||
AddColumnWrapper,
|
||||
DeleteOutlinedIcon,
|
||||
Name,
|
||||
NameWrapper,
|
||||
OptionContent,
|
||||
SearchIconWrapper,
|
||||
} from './styles';
|
||||
|
||||
function OptionRenderer(option: BaseOptionType): JSX.Element {
|
||||
const { label, data } = option;
|
||||
return (
|
||||
<OptionContent>
|
||||
<span className="option-label">{label}</span>
|
||||
{data?.hasMultipleVariants && (
|
||||
<FieldVariantBadges
|
||||
fieldDataType={data?.fieldDataType}
|
||||
fieldContext={data?.fieldContext}
|
||||
/>
|
||||
)}
|
||||
</OptionContent>
|
||||
);
|
||||
}
|
||||
|
||||
function AddColumnField({ config }: AddColumnFieldProps): JSX.Element | null {
|
||||
const { t } = useTranslation(['trace']);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -36,18 +57,35 @@ function AddColumnField({ config }: AddColumnFieldProps): JSX.Element | null {
|
||||
onFocus={config.onFocus}
|
||||
onBlur={config.onBlur}
|
||||
notFoundContent={config.isFetching ? <Spin size="small" /> : null}
|
||||
optionRender={OptionRenderer}
|
||||
/>
|
||||
<SearchIconWrapper $isDarkMode={isDarkMode}>
|
||||
<SearchOutlined />
|
||||
</SearchIconWrapper>
|
||||
</Input.Group>
|
||||
|
||||
{config.value?.map(({ name }) => (
|
||||
<AddColumnItem direction="horizontal" key={name}>
|
||||
<Typography>{name}</Typography>
|
||||
<DeleteOutlinedIcon onClick={(): void => config.onRemove(name)} />
|
||||
</AddColumnItem>
|
||||
))}
|
||||
{config.value?.map((column) => {
|
||||
const uniqueKey = getUniqueColumnKey(column);
|
||||
const showBadge = hasMultipleVariants(
|
||||
column.name || '',
|
||||
config.value || [],
|
||||
config.allAvailableKeys,
|
||||
);
|
||||
return (
|
||||
<AddColumnItem key={uniqueKey}>
|
||||
<NameWrapper>
|
||||
<Name>{column.name}</Name>
|
||||
{showBadge && (
|
||||
<FieldVariantBadges
|
||||
fieldDataType={column.fieldDataType}
|
||||
fieldContext={column.fieldContext}
|
||||
/>
|
||||
)}
|
||||
</NameWrapper>
|
||||
<DeleteOutlinedIcon onClick={(): void => config.onRemove(uniqueKey)} />
|
||||
</AddColumnItem>
|
||||
);
|
||||
})}
|
||||
</AddColumnWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ export const AddColumnWrapper = styled(Space)`
|
||||
width: 100%;
|
||||
`;
|
||||
|
||||
export const AddColumnItem = styled(Space)`
|
||||
export const AddColumnItem = styled.div`
|
||||
width: 100%;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -37,3 +37,35 @@ export const AddColumnItem = styled(Space)`
|
||||
export const DeleteOutlinedIcon = styled(DeleteOutlined)`
|
||||
color: red;
|
||||
`;
|
||||
|
||||
export const OptionContent = styled.div`
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
gap: 8px;
|
||||
min-width: 0;
|
||||
|
||||
.option-label {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
`;
|
||||
export const NameWrapper = styled.span`
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: calc(100% - 26px);
|
||||
gap: 8px;
|
||||
min-width: 0;
|
||||
`;
|
||||
export const Name = styled.span`
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
`;
|
||||
|
||||
111
frontend/src/container/OptionsMenu/__tests__/mockData.ts
Normal file
111
frontend/src/container/OptionsMenu/__tests__/mockData.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import { QUERY_BUILDER_KEY_TYPES } from 'constants/antlrQueryConstants';
|
||||
import { QueryKeySuggestionsResponseProps } from 'types/api/querySuggestions/types';
|
||||
|
||||
const HTTP_STATUS_CODE = 'http.status_code';
|
||||
const SERVICE_NAME = 'service.name';
|
||||
|
||||
// Conflicting fields: same name, different datatype
|
||||
export const mockConflictingFieldsByDatatype: TelemetryFieldKey[] = [
|
||||
{
|
||||
name: HTTP_STATUS_CODE,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.STRING,
|
||||
fieldContext: 'attribute',
|
||||
signal: 'traces',
|
||||
},
|
||||
{
|
||||
name: HTTP_STATUS_CODE,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.NUMBER,
|
||||
fieldContext: 'attribute',
|
||||
signal: 'traces',
|
||||
},
|
||||
];
|
||||
|
||||
// Conflicting fields: same name, different context
|
||||
export const mockConflictingFieldsByContext: TelemetryFieldKey[] = [
|
||||
{
|
||||
name: SERVICE_NAME,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.STRING,
|
||||
fieldContext: 'resource',
|
||||
signal: 'traces',
|
||||
},
|
||||
{
|
||||
name: SERVICE_NAME,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.STRING,
|
||||
fieldContext: 'attribute',
|
||||
signal: 'traces',
|
||||
},
|
||||
];
|
||||
|
||||
// Non-conflicting field (single variant)
|
||||
export const mockNonConflictingField: TelemetryFieldKey[] = [
|
||||
{
|
||||
name: 'trace_id',
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.STRING,
|
||||
fieldContext: 'attribute',
|
||||
signal: 'traces',
|
||||
},
|
||||
];
|
||||
|
||||
// Mock API response structure for conflicting fields by datatype
|
||||
export const mockQueryKeySuggestionsResponseByDatatype: QueryKeySuggestionsResponseProps = {
|
||||
status: 'success',
|
||||
data: {
|
||||
complete: true,
|
||||
keys: {
|
||||
[HTTP_STATUS_CODE]: [
|
||||
{
|
||||
name: HTTP_STATUS_CODE,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.STRING,
|
||||
fieldContext: 'attribute',
|
||||
signal: 'traces',
|
||||
label: HTTP_STATUS_CODE,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
name: HTTP_STATUS_CODE,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.NUMBER,
|
||||
fieldContext: 'attribute',
|
||||
signal: 'traces',
|
||||
label: HTTP_STATUS_CODE,
|
||||
type: 'attribute',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Mock API response structure for conflicting fields by context
|
||||
export const mockQueryKeySuggestionsResponseByContext: QueryKeySuggestionsResponseProps = {
|
||||
status: 'success',
|
||||
data: {
|
||||
complete: true,
|
||||
keys: {
|
||||
[SERVICE_NAME]: [
|
||||
{
|
||||
name: SERVICE_NAME,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.STRING,
|
||||
fieldContext: 'resource',
|
||||
signal: 'traces',
|
||||
label: SERVICE_NAME,
|
||||
type: 'resource',
|
||||
},
|
||||
{
|
||||
name: SERVICE_NAME,
|
||||
fieldDataType: QUERY_BUILDER_KEY_TYPES.STRING,
|
||||
fieldContext: 'attribute',
|
||||
signal: 'traces',
|
||||
label: SERVICE_NAME,
|
||||
type: 'attribute',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// All available keys (for allAvailableKeys prop)
|
||||
export const mockAllAvailableKeys: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByDatatype,
|
||||
...mockConflictingFieldsByContext,
|
||||
...mockNonConflictingField,
|
||||
];
|
||||
@@ -10,10 +10,22 @@ export const OptionsContainer = styled(Card)`
|
||||
`;
|
||||
|
||||
export const OptionsContentWrapper = styled(Space)`
|
||||
min-width: 11rem;
|
||||
width: 21rem;
|
||||
padding: 0.25rem 0.5rem;
|
||||
`;
|
||||
|
||||
export const FieldTitle = styled(Typography.Text)`
|
||||
font-size: 0.75rem;
|
||||
`;
|
||||
|
||||
export const ColumnTitleWrapper = styled.span`
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
word-break: break-word;
|
||||
`;
|
||||
|
||||
export const ColumnTitleIcon = styled.span`
|
||||
font-size: 12px;
|
||||
color: var(--bg-vanilla-400);
|
||||
`;
|
||||
|
||||
@@ -38,5 +38,6 @@ export type OptionsMenuConfig = {
|
||||
isFetching: boolean;
|
||||
value: TelemetryFieldKey[];
|
||||
onRemove: (key: string) => void;
|
||||
allAvailableKeys?: TelemetryFieldKey[];
|
||||
};
|
||||
};
|
||||
|
||||
@@ -36,7 +36,7 @@ import {
|
||||
OptionsMenuConfig,
|
||||
OptionsQuery,
|
||||
} from './types';
|
||||
import { getOptionsFromKeys } from './utils';
|
||||
import { getOptionsFromKeys, getUniqueColumnKey } from './utils';
|
||||
|
||||
interface UseOptionsMenuProps {
|
||||
storageKey?: string;
|
||||
@@ -170,7 +170,7 @@ const useOptionsMenu = ({
|
||||
...initialQueryParamsV5,
|
||||
searchText: debouncedSearchText,
|
||||
},
|
||||
{ queryKey: [debouncedSearchText, isFocused], enabled: isFocused },
|
||||
{ queryKey: [debouncedSearchText, isFocused] },
|
||||
);
|
||||
|
||||
// const {
|
||||
@@ -186,7 +186,7 @@ const useOptionsMenu = ({
|
||||
|
||||
const searchedAttributeKeys: TelemetryFieldKey[] = useMemo(() => {
|
||||
const searchedAttributesDataList = Object.values(
|
||||
searchedAttributesDataV5?.data.data.keys || {},
|
||||
searchedAttributesDataV5?.data?.data?.keys || {},
|
||||
).flat();
|
||||
if (searchedAttributesDataList.length) {
|
||||
if (dataSource === DataSource.LOGS) {
|
||||
@@ -230,7 +230,7 @@ const useOptionsMenu = ({
|
||||
}
|
||||
|
||||
return [];
|
||||
}, [dataSource, searchedAttributesDataV5?.data.data.keys]);
|
||||
}, [dataSource, searchedAttributesDataV5?.data?.data?.keys]);
|
||||
|
||||
const initialOptionsQuery: OptionsQuery = useMemo(() => {
|
||||
let defaultColumns: TelemetryFieldKey[] = defaultOptionsQuery.selectColumns;
|
||||
@@ -262,7 +262,7 @@ const useOptionsMenu = ({
|
||||
}, [dataSource, initialOptions, initialSelectedColumns]);
|
||||
|
||||
const selectedColumnKeys = useMemo(
|
||||
() => preferences?.columns?.map(({ name }) => name) || [],
|
||||
() => preferences?.columns?.map((col) => getUniqueColumnKey(col)) || [],
|
||||
[preferences?.columns],
|
||||
);
|
||||
|
||||
@@ -287,16 +287,14 @@ const useOptionsMenu = ({
|
||||
|
||||
const handleSelectColumns = useCallback(
|
||||
(value: string) => {
|
||||
const newSelectedColumnKeys = [...new Set([...selectedColumnKeys, value])];
|
||||
const newSelectedColumns = newSelectedColumnKeys.reduce((acc, key) => {
|
||||
const column = [
|
||||
...searchedAttributeKeys,
|
||||
...(preferences?.columns || []),
|
||||
].find(({ name }) => name === key);
|
||||
// value is now the unique key (name::dataType::context)
|
||||
const column = searchedAttributeKeys.find(
|
||||
(key) => getUniqueColumnKey(key) === value,
|
||||
);
|
||||
|
||||
if (!column) return acc;
|
||||
return [...acc, column];
|
||||
}, [] as TelemetryFieldKey[]);
|
||||
if (!column) return;
|
||||
|
||||
const newSelectedColumns = [...(preferences?.columns || []), column];
|
||||
|
||||
const optionsData: OptionsQuery = {
|
||||
...defaultOptionsQuery,
|
||||
@@ -311,7 +309,6 @@ const useOptionsMenu = ({
|
||||
},
|
||||
[
|
||||
searchedAttributeKeys,
|
||||
selectedColumnKeys,
|
||||
preferences,
|
||||
handleRedirectWithOptionsData,
|
||||
updateColumns,
|
||||
@@ -320,8 +317,9 @@ const useOptionsMenu = ({
|
||||
|
||||
const handleRemoveSelectedColumn = useCallback(
|
||||
(columnKey: string) => {
|
||||
// columnKey is now the unique key (name::dataType::context)
|
||||
const newSelectedColumns = preferences?.columns?.filter(
|
||||
({ name }) => name !== columnKey,
|
||||
(col) => getUniqueColumnKey(col) !== columnKey,
|
||||
);
|
||||
|
||||
if (!newSelectedColumns?.length && dataSource !== DataSource.LOGS) {
|
||||
@@ -432,6 +430,7 @@ const useOptionsMenu = ({
|
||||
preferences?.columns.filter((item) => has(item, 'name')) ||
|
||||
defaultOptionsQuery.selectColumns.filter((item) => has(item, 'name')),
|
||||
options: optionsFromAttributeKeys || [],
|
||||
allAvailableKeys: searchedAttributeKeys,
|
||||
onFocus: handleFocus,
|
||||
onBlur: handleBlur,
|
||||
onSelect: handleSelectColumns,
|
||||
@@ -455,6 +454,7 @@ const useOptionsMenu = ({
|
||||
isSearchedAttributesFetchingV5,
|
||||
preferences,
|
||||
optionsFromAttributeKeys,
|
||||
searchedAttributeKeys,
|
||||
handleSelectColumns,
|
||||
handleRemoveSelectedColumn,
|
||||
handleSearchAttribute,
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import { SelectProps } from 'antd';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
|
||||
export const getOptionsFromKeys = (
|
||||
keys: TelemetryFieldKey[],
|
||||
selectedKeys: (string | undefined)[],
|
||||
): SelectProps['options'] => {
|
||||
const options = keys.map(({ name }) => ({
|
||||
label: name,
|
||||
value: name,
|
||||
}));
|
||||
|
||||
return options.filter(
|
||||
({ value }) => !selectedKeys.find((key) => key === value),
|
||||
);
|
||||
};
|
||||
294
frontend/src/container/OptionsMenu/utils.tsx
Normal file
294
frontend/src/container/OptionsMenu/utils.tsx
Normal file
@@ -0,0 +1,294 @@
|
||||
import { SelectProps } from 'antd';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
QueryKeyDataSuggestionsProps,
|
||||
QueryKeySuggestionsResponseProps,
|
||||
} from 'types/api/querySuggestions/types';
|
||||
|
||||
/**
|
||||
* Extracts all available keys from API response and transforms them into TelemetryFieldKey format
|
||||
* @param keysData - The response data from useGetQueryKeySuggestions hook
|
||||
* @returns Array of TelemetryFieldKey objects
|
||||
*/
|
||||
export const extractTelemetryFieldKeys = (
|
||||
keysData?: AxiosResponse<QueryKeySuggestionsResponseProps>,
|
||||
): TelemetryFieldKey[] => {
|
||||
const keysList = Object.values(keysData?.data?.data?.keys || {})?.flat() || [];
|
||||
return keysList.map((key) => ({
|
||||
name: key.name,
|
||||
fieldDataType: key.fieldDataType,
|
||||
fieldContext: key.fieldContext,
|
||||
signal: key.signal,
|
||||
})) as TelemetryFieldKey[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a unique key for a column by combining context, name, and dataType
|
||||
* Format: fieldContext::name::fieldDataType
|
||||
* Example: "attribute::http.status_code::number"
|
||||
*/
|
||||
export const getUniqueColumnKey = (
|
||||
column: TelemetryFieldKey | QueryKeyDataSuggestionsProps,
|
||||
): string => {
|
||||
const name = column.name || '';
|
||||
const dataType =
|
||||
('fieldDataType' in column && column.fieldDataType) ||
|
||||
('dataType' in column && column.dataType) ||
|
||||
'string';
|
||||
const context =
|
||||
column.fieldContext || ('type' in column && column.type) || 'attribute';
|
||||
return `${context}::${name}::${dataType}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses a unique column key back into its components
|
||||
* Format: fieldContext::name::fieldDataType
|
||||
*/
|
||||
export const parseColumnKey = (
|
||||
key: string,
|
||||
): { name: string; fieldDataType: string; fieldContext: string } => {
|
||||
const parts = key.split('::');
|
||||
const fieldContext = parts[0] || 'attribute';
|
||||
const name = parts[1] || '';
|
||||
const fieldDataType = parts[2] || 'string';
|
||||
return { name, fieldDataType, fieldContext };
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a count map of how many variants each attribute name has
|
||||
* Used to determine which columns should display badges
|
||||
*/
|
||||
export const getVariantCounts = <T extends { name?: string }>(
|
||||
items: T[],
|
||||
): Record<string, number> => {
|
||||
if (!items || !items.length) return {};
|
||||
return items.reduce((acc: Record<string, number>, item: T) => {
|
||||
const name = item?.name || '';
|
||||
if (name) {
|
||||
acc[name] = (acc[name] || 0) + 1;
|
||||
}
|
||||
return acc;
|
||||
}, {} as Record<string, number>);
|
||||
};
|
||||
|
||||
/**
|
||||
* Extracts a Set of column names that have multiple variants from options
|
||||
* Useful when options already have hasMultipleVariants flag
|
||||
*/
|
||||
export const getNamesWithVariants = (
|
||||
options: SelectProps['options'],
|
||||
): Set<string> => {
|
||||
if (!options || !Array.isArray(options)) return new Set();
|
||||
const names = options
|
||||
.filter((opt) => {
|
||||
if (!opt) return false;
|
||||
const option = opt as DefaultOptionType & {
|
||||
hasMultipleVariants?: boolean;
|
||||
};
|
||||
return option?.hasMultipleVariants;
|
||||
})
|
||||
.map((opt) => {
|
||||
if (!opt) return '';
|
||||
const value = String(opt.value || '');
|
||||
return parseColumnKey(value).name;
|
||||
});
|
||||
return new Set(names);
|
||||
};
|
||||
|
||||
/**
|
||||
* Groups fields by their name to analyze variants
|
||||
* Returns a map of field name to array of fields with that name
|
||||
*/
|
||||
export const getFieldVariantsByName = <T extends { name?: string }>(
|
||||
fields: T[],
|
||||
): Record<string, T[]> =>
|
||||
fields.reduce((acc, field) => {
|
||||
const name = field.name || '';
|
||||
if (!acc[name]) {
|
||||
acc[name] = [];
|
||||
}
|
||||
acc[name].push(field);
|
||||
return acc;
|
||||
}, {} as Record<string, T[]>);
|
||||
|
||||
/**
|
||||
* Determines the column title based on variant analysis
|
||||
* Shows context if dataTypes are same but contexts differ
|
||||
* Shows dataType if dataTypes differ
|
||||
*/
|
||||
export const getColumnTitle = <
|
||||
T extends Partial<QueryKeyDataSuggestionsProps> | Partial<TelemetryFieldKey>
|
||||
>(
|
||||
field: T,
|
||||
hasVariants: boolean,
|
||||
variants: T[],
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): string => {
|
||||
const name = field.name || '';
|
||||
if (!hasVariants) return name;
|
||||
|
||||
// Extract data types from variants (support both fieldDataType and dataType)
|
||||
const uniqueDataTypes = new Set(
|
||||
variants
|
||||
.map(
|
||||
(v) =>
|
||||
('fieldDataType' in v && v.fieldDataType) ||
|
||||
('dataType' in v && v.dataType),
|
||||
)
|
||||
.filter(Boolean),
|
||||
);
|
||||
|
||||
// Extract contexts from variants (support both fieldContext and type)
|
||||
const uniqueContexts = new Set(
|
||||
variants
|
||||
.map(
|
||||
(v) => ('fieldContext' in v && v.fieldContext) || ('type' in v && v.type),
|
||||
)
|
||||
.filter(Boolean),
|
||||
);
|
||||
|
||||
// Same dataType but different contexts - show context
|
||||
if (
|
||||
uniqueDataTypes.size === 1 &&
|
||||
uniqueContexts.size > 1 &&
|
||||
(field.fieldContext || ('type' in field && field.type))
|
||||
) {
|
||||
return `${name} (${field.fieldContext || ('type' in field && field.type)})`;
|
||||
}
|
||||
|
||||
// Different dataTypes - show dataType
|
||||
const dataType =
|
||||
('fieldDataType' in field && field.fieldDataType) ||
|
||||
('dataType' in field && field.dataType);
|
||||
if (dataType) {
|
||||
return `${name} (${dataType})`;
|
||||
}
|
||||
|
||||
return name;
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if another field with the same name but different unique key exists in availableKeys
|
||||
* and if any of those conflicting fields are NOT already selected
|
||||
* This indicates a conflicted column scenario where user might not be aware of other variants
|
||||
*/
|
||||
const hasUnselectedConflictingField = <
|
||||
T extends Partial<QueryKeyDataSuggestionsProps> | Partial<TelemetryFieldKey>
|
||||
>(
|
||||
field: T,
|
||||
availableKeys?: TelemetryFieldKey[],
|
||||
selectedColumns?: TelemetryFieldKey[],
|
||||
): boolean => {
|
||||
if (!availableKeys || availableKeys.length === 0) return false;
|
||||
|
||||
const fieldName = field.name || '';
|
||||
const fieldUniqueKey = getUniqueColumnKey(field as TelemetryFieldKey);
|
||||
|
||||
// Find all conflicting fields (same name, different unique key)
|
||||
const conflictingFields = availableKeys.filter(
|
||||
(key) => key.name === fieldName && getUniqueColumnKey(key) !== fieldUniqueKey,
|
||||
);
|
||||
|
||||
// If no conflicting fields exist, no conflict
|
||||
if (conflictingFields.length === 0) return false;
|
||||
|
||||
// If no selected columns provided, assume conflict exists
|
||||
if (!selectedColumns || selectedColumns.length === 0) return true;
|
||||
|
||||
// Check if all conflicting fields are already selected
|
||||
const selectedUniqueKeys = new Set(
|
||||
selectedColumns.map((col) => getUniqueColumnKey(col)),
|
||||
);
|
||||
|
||||
// Return true if any conflicting field is NOT selected
|
||||
return conflictingFields.some(
|
||||
(conflictingField) =>
|
||||
!selectedUniqueKeys.has(getUniqueColumnKey(conflictingField)),
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns column title as string and metadata for tooltip icon
|
||||
* Shows tooltip only when another field with the same name but different type/context exists
|
||||
* and is NOT already selected (better UX - no need to show tooltip if all variants are visible)
|
||||
*
|
||||
* Returns an object with:
|
||||
* - title: string
|
||||
* - hasUnselectedConflict: boolean
|
||||
*/
|
||||
export const getColumnTitleWithTooltip = <
|
||||
T extends Partial<QueryKeyDataSuggestionsProps> | Partial<TelemetryFieldKey>
|
||||
>(
|
||||
field: T,
|
||||
hasVariants: boolean,
|
||||
variants: T[],
|
||||
selectedColumns: TelemetryFieldKey[],
|
||||
availableKeys?: TelemetryFieldKey[],
|
||||
): { title: string; hasUnselectedConflict: boolean } => {
|
||||
const title = getColumnTitle(field, hasVariants, variants);
|
||||
const hasUnselectedConflict = hasUnselectedConflictingField(
|
||||
field,
|
||||
availableKeys,
|
||||
selectedColumns,
|
||||
);
|
||||
|
||||
return { title, hasUnselectedConflict };
|
||||
};
|
||||
|
||||
export const getOptionsFromKeys = (
|
||||
keys: TelemetryFieldKey[],
|
||||
selectedKeys: (string | undefined)[],
|
||||
): SelectProps['options'] => {
|
||||
// Detect which attribute names have multiple variants
|
||||
const nameCounts = keys.reduce((acc, key) => {
|
||||
const name = key.name || '';
|
||||
acc[name] = (acc[name] || 0) + 1;
|
||||
return acc;
|
||||
}, {} as Record<string, number>);
|
||||
|
||||
const options = keys.map((key) => ({
|
||||
label: key.name,
|
||||
value: getUniqueColumnKey(key),
|
||||
// Store additional data for rendering
|
||||
fieldDataType: key.fieldDataType,
|
||||
fieldContext: key.fieldContext,
|
||||
signal: key.signal,
|
||||
hasMultipleVariants: nameCounts[key.name || ''] > 1,
|
||||
}));
|
||||
|
||||
return options.filter(
|
||||
({ value }) => !selectedKeys.find((selectedKey) => selectedKey === value),
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines if a column name has multiple variants
|
||||
* Checks both selected columns and available keys (from search) to detect conflicts
|
||||
* Reuses getVariantCounts for consistency
|
||||
*/
|
||||
export const hasMultipleVariants = (
|
||||
columnName: string,
|
||||
selectedColumns: TelemetryFieldKey[],
|
||||
availableKeys?: TelemetryFieldKey[],
|
||||
): boolean => {
|
||||
// Combine selected columns with available keys (if provided)
|
||||
const allKeys = availableKeys
|
||||
? [...selectedColumns, ...availableKeys]
|
||||
: selectedColumns;
|
||||
|
||||
// Deduplicate by unique key to avoid counting same variant twice
|
||||
const uniqueKeysMap = new Map<string, TelemetryFieldKey>();
|
||||
allKeys.forEach((key) => {
|
||||
const uniqueKey = getUniqueColumnKey(key);
|
||||
if (!uniqueKeysMap.has(uniqueKey)) {
|
||||
uniqueKeysMap.set(uniqueKey, key);
|
||||
}
|
||||
});
|
||||
|
||||
const deduplicatedKeys = Array.from(uniqueKeysMap.values());
|
||||
const variantCounts = getVariantCounts(deduplicatedKeys);
|
||||
|
||||
return variantCounts[columnName] > 1;
|
||||
};
|
||||
@@ -0,0 +1,161 @@
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import {
|
||||
mockAllAvailableKeys,
|
||||
mockConflictingFieldsByContext,
|
||||
mockConflictingFieldsByDatatype,
|
||||
} from 'container/OptionsMenu/__tests__/mockData';
|
||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
||||
import { renderColumnHeader } from 'tests/columnHeaderHelpers';
|
||||
|
||||
import { getListColumns } from '../utils';
|
||||
|
||||
const COLUMN_UNDEFINED_ERROR = 'statusCodeColumn is undefined';
|
||||
const SERVICE_NAME_COLUMN_UNDEFINED_ERROR = 'serviceNameColumn is undefined';
|
||||
|
||||
// Mock the timezone formatter
|
||||
const mockFormatTimezoneAdjustedTimestamp = jest.fn(
|
||||
(input: TimestampInput): string => {
|
||||
if (typeof input === 'string') {
|
||||
return new Date(input).toISOString();
|
||||
}
|
||||
if (typeof input === 'number') {
|
||||
return new Date(input / 1e6).toISOString();
|
||||
}
|
||||
return new Date(input).toISOString();
|
||||
},
|
||||
);
|
||||
|
||||
describe('getListColumns - Column Headers and Tooltips', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('shows datatype in column header for conflicting fields', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // string variant
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedColumns,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
(col) => 'dataIndex' in col && col.dataIndex === 'http.status_code',
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
expect(statusCodeColumn?.title).toBeDefined();
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
expect(container.textContent).toContain('http.status_code (string)');
|
||||
expect(container.textContent).toContain('string');
|
||||
});
|
||||
|
||||
it('shows tooltip icon when unselected conflicting variant exists', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // Only string variant selected
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedColumns,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys, // Contains number variant
|
||||
);
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col) => 'dataIndex' in col && col.dataIndex === 'http.status_code',
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is set correctly
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBe(true);
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('hides tooltip icon when all conflicting variants are selected', () => {
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByDatatype, // Both variants selected
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedColumns,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col) => 'dataIndex' in col && col.dataIndex === 'http.status_code',
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is NOT set when all variants are selected
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBeUndefined();
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows context in header for attribute/resource conflicting fields', () => {
|
||||
// When same datatype but different contexts, it shows context
|
||||
const selectedColumns: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByContext, // Both resource and attribute variants
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedColumns,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
const serviceNameColumn = columns.find(
|
||||
(col) => 'dataIndex' in col && col.dataIndex === 'service.name',
|
||||
);
|
||||
|
||||
expect(serviceNameColumn).toBeDefined();
|
||||
|
||||
if (!serviceNameColumn) {
|
||||
throw new Error(SERVICE_NAME_COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(serviceNameColumn);
|
||||
expect(container.textContent).toContain('service.name (resource)');
|
||||
expect(container.textContent).toContain('resource');
|
||||
});
|
||||
|
||||
it('includes timestamp column in initial columns', () => {
|
||||
const columns = getListColumns(
|
||||
[],
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
const timestampColumn = columns.find(
|
||||
(col) => 'dataIndex' in col && col.dataIndex === 'date',
|
||||
);
|
||||
expect(timestampColumn).toBeDefined();
|
||||
expect(timestampColumn?.title).toBe('Timestamp');
|
||||
});
|
||||
});
|
||||
@@ -186,9 +186,15 @@ function ListView({
|
||||
const updatedColumns = getListColumns(
|
||||
options?.selectColumns || [],
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
config.addColumn?.allAvailableKeys,
|
||||
);
|
||||
return getDraggedColumns(updatedColumns, draggedColumns);
|
||||
}, [options?.selectColumns, formatTimezoneAdjustedTimestamp, draggedColumns]);
|
||||
}, [
|
||||
options?.selectColumns,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
draggedColumns,
|
||||
config.addColumn?.allAvailableKeys,
|
||||
]);
|
||||
|
||||
const transformedQueryTableData = useMemo(
|
||||
() => transformDataWithDate(queryTableData) || [],
|
||||
|
||||
@@ -3,6 +3,12 @@ import { ColumnsType } from 'antd/es/table';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
getColumnTitleWithTooltip,
|
||||
getFieldVariantsByName,
|
||||
getUniqueColumnKey,
|
||||
hasMultipleVariants,
|
||||
} from 'container/OptionsMenu/utils';
|
||||
import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util';
|
||||
import { formUrlParams } from 'container/TraceDetail/utils';
|
||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
||||
@@ -52,6 +58,7 @@ export const getListColumns = (
|
||||
input: TimestampInput,
|
||||
format?: string,
|
||||
) => string | number,
|
||||
allAvailableKeys?: TelemetryFieldKey[],
|
||||
): ColumnsType<RowData> => {
|
||||
const initialColumns: ColumnsType<RowData> = [
|
||||
{
|
||||
@@ -79,15 +86,31 @@ export const getListColumns = (
|
||||
},
|
||||
];
|
||||
|
||||
// Group fields by name to analyze variants
|
||||
const fieldVariantsByName = getFieldVariantsByName(selectedColumns);
|
||||
|
||||
const columns: ColumnsType<RowData> =
|
||||
selectedColumns.map((props) => {
|
||||
const name = props?.name || (props as any)?.key;
|
||||
const fieldDataType = props?.fieldDataType || (props as any)?.dataType;
|
||||
const fieldContext = props?.fieldContext || (props as any)?.type;
|
||||
const hasVariants = hasMultipleVariants(
|
||||
name,
|
||||
selectedColumns,
|
||||
allAvailableKeys,
|
||||
);
|
||||
const variants = fieldVariantsByName[name] || [];
|
||||
const { title, hasUnselectedConflict } = getColumnTitleWithTooltip(
|
||||
props,
|
||||
hasVariants,
|
||||
variants,
|
||||
selectedColumns,
|
||||
allAvailableKeys,
|
||||
);
|
||||
|
||||
return {
|
||||
title: name,
|
||||
title,
|
||||
dataIndex: name,
|
||||
key: `${name}-${fieldDataType}-${fieldContext}`,
|
||||
key: getUniqueColumnKey(props),
|
||||
...(hasUnselectedConflict && { _hasUnselectedConflict: true }),
|
||||
width: 145,
|
||||
render: (value, item): JSX.Element => {
|
||||
if (value === '') {
|
||||
|
||||
@@ -4,6 +4,7 @@ import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import Controls from 'container/Controls';
|
||||
import { extractTelemetryFieldKeys } from 'container/OptionsMenu/utils';
|
||||
import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs';
|
||||
import { tableStyles } from 'container/TracesExplorer/ListView/styles';
|
||||
import {
|
||||
@@ -12,6 +13,7 @@ import {
|
||||
transformDataWithDate,
|
||||
} from 'container/TracesExplorer/ListView/utils';
|
||||
import { Pagination } from 'hooks/queryPagination';
|
||||
import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKeySuggestions';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import history from 'lib/history';
|
||||
@@ -30,6 +32,10 @@ import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import {
|
||||
DataSource,
|
||||
TracesAggregatorOperator,
|
||||
} from 'types/common/queryBuilder';
|
||||
|
||||
function TracesTableComponent({
|
||||
widget,
|
||||
@@ -54,14 +60,35 @@ function TracesTableComponent({
|
||||
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
|
||||
// Fetch available keys to detect variants
|
||||
|
||||
const { data: keysData } = useGetQueryKeySuggestions(
|
||||
{
|
||||
searchText: '',
|
||||
signal: DataSource.TRACES,
|
||||
},
|
||||
{
|
||||
queryKey: [DataSource.TRACES, TracesAggregatorOperator.NOOP, ''],
|
||||
},
|
||||
);
|
||||
|
||||
// Extract all available keys from API response
|
||||
const allAvailableKeys = useMemo(() => extractTelemetryFieldKeys(keysData), [
|
||||
keysData,
|
||||
]);
|
||||
|
||||
const columns = useMemo(
|
||||
() =>
|
||||
getListColumns(
|
||||
widget.selectedTracesFields || [],
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
allAvailableKeys,
|
||||
),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[widget.selectedTracesFields],
|
||||
[
|
||||
widget.selectedTracesFields,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
allAvailableKeys,
|
||||
],
|
||||
);
|
||||
|
||||
const dataLength =
|
||||
|
||||
@@ -0,0 +1,159 @@
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import {
|
||||
mockAllAvailableKeys,
|
||||
mockConflictingFieldsByContext,
|
||||
mockConflictingFieldsByDatatype,
|
||||
} from 'container/OptionsMenu/__tests__/mockData';
|
||||
import { getListColumns } from 'container/TracesExplorer/ListView/utils';
|
||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { renderColumnHeader } from 'tests/columnHeaderHelpers';
|
||||
|
||||
const HTTP_STATUS_CODE = 'http.status_code';
|
||||
const SERVICE_NAME = 'service.name';
|
||||
|
||||
const COLUMN_UNDEFINED_ERROR = 'statusCodeColumn is undefined';
|
||||
const SERVICE_NAME_COLUMN_UNDEFINED_ERROR = 'serviceNameColumn is undefined';
|
||||
|
||||
// Mock the timezone formatter
|
||||
const mockFormatTimezoneAdjustedTimestamp = jest.fn(
|
||||
(input: TimestampInput): string => {
|
||||
if (typeof input === 'string') {
|
||||
return new Date(input).toISOString();
|
||||
}
|
||||
if (typeof input === 'number') {
|
||||
return new Date(input / 1e6).toISOString();
|
||||
}
|
||||
return new Date(input).toISOString();
|
||||
},
|
||||
);
|
||||
|
||||
describe('TracesTableComponent - Column Headers', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('shows datatype in column header for conflicting columns', () => {
|
||||
const selectedTracesFields: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // string variant
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedTracesFields,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
// Find the http.status_code column
|
||||
const statusCodeColumn = columns.find(
|
||||
(col): col is ColumnType<RowData> =>
|
||||
'dataIndex' in col && (col.dataIndex as string) === HTTP_STATUS_CODE,
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
expect(statusCodeColumn?.title).toBeDefined();
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
expect(container.textContent).toContain('http.status_code (string)');
|
||||
expect(container.textContent).toContain('string');
|
||||
});
|
||||
|
||||
it('shows tooltip icon when unselected conflicting variant exists', () => {
|
||||
const selectedTracesFields: TelemetryFieldKey[] = [
|
||||
mockConflictingFieldsByDatatype[0], // Only string variant selected
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedTracesFields,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys, // Contains number variant
|
||||
);
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col): col is ColumnType<RowData> =>
|
||||
'dataIndex' in col && (col.dataIndex as string) === HTTP_STATUS_CODE,
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is set correctly
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBe(true);
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
|
||||
// Check for tooltip icon (InfoCircleOutlined)
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('hides tooltip icon when all conflicting variants are selected', () => {
|
||||
const selectedTracesFields: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByDatatype, // Both variants selected
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedTracesFields,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
const statusCodeColumn = columns.find(
|
||||
(col): col is ColumnType<RowData> =>
|
||||
'dataIndex' in col && (col.dataIndex as string) === HTTP_STATUS_CODE,
|
||||
);
|
||||
|
||||
expect(statusCodeColumn).toBeDefined();
|
||||
|
||||
// Verify that _hasUnselectedConflict metadata is NOT set when all variants are selected
|
||||
const columnRecord = statusCodeColumn as Record<string, unknown>;
|
||||
expect(columnRecord._hasUnselectedConflict).toBeUndefined();
|
||||
|
||||
if (!statusCodeColumn) {
|
||||
throw new Error(COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(statusCodeColumn);
|
||||
|
||||
// Tooltip icon should NOT be present when all variants are selected
|
||||
const tooltipIcon = container.querySelector('.anticon-info-circle');
|
||||
expect(tooltipIcon).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows context in header for attribute/resource conflicting fields', () => {
|
||||
// When same datatype but different contexts, it shows context
|
||||
const selectedTracesFields: TelemetryFieldKey[] = [
|
||||
...mockConflictingFieldsByContext, // Both resource and attribute variants
|
||||
];
|
||||
|
||||
const columns = getListColumns(
|
||||
selectedTracesFields,
|
||||
mockFormatTimezoneAdjustedTimestamp,
|
||||
mockAllAvailableKeys,
|
||||
);
|
||||
|
||||
const serviceNameColumn = columns.find(
|
||||
(col): col is ColumnType<RowData> =>
|
||||
'dataIndex' in col && (col.dataIndex as string) === SERVICE_NAME,
|
||||
);
|
||||
|
||||
expect(serviceNameColumn).toBeDefined();
|
||||
|
||||
if (!serviceNameColumn) {
|
||||
throw new Error(SERVICE_NAME_COLUMN_UNDEFINED_ERROR);
|
||||
}
|
||||
|
||||
const { container } = renderColumnHeader(serviceNameColumn);
|
||||
expect(container.textContent).toContain('service.name (resource)');
|
||||
expect(container.textContent).toContain('resource');
|
||||
});
|
||||
});
|
||||
37
frontend/src/tests/columnHeaderHelpers.tsx
Normal file
37
frontend/src/tests/columnHeaderHelpers.tsx
Normal file
@@ -0,0 +1,37 @@
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { render } from '@testing-library/react';
|
||||
import { Tooltip } from 'antd';
|
||||
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||
import {
|
||||
ColumnTitleIcon,
|
||||
ColumnTitleWrapper,
|
||||
} from 'container/OptionsMenu/styles';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
|
||||
/**
|
||||
* Helper function that mimics ResizeTable's column title transformation logic.
|
||||
* This renders the column header the way it appears in the actual table when
|
||||
* onDragColumn is provided (which adds the tooltip icon for conflicting variants).
|
||||
*
|
||||
* Works with both ColumnType and ColumnsType column definitions.
|
||||
*/
|
||||
export const renderColumnHeader = <T extends RowData | Record<string, unknown>>(
|
||||
column: ColumnType<T> | ColumnsType<T>[number],
|
||||
): ReturnType<typeof render> => {
|
||||
const columnRecord = column as Record<string, unknown>;
|
||||
const hasUnselectedConflict = columnRecord._hasUnselectedConflict === true;
|
||||
const titleText = column?.title?.toString() || '';
|
||||
|
||||
return render(
|
||||
<ColumnTitleWrapper>
|
||||
{titleText}
|
||||
{hasUnselectedConflict && (
|
||||
<Tooltip title="The same column with a different type or context exists">
|
||||
<ColumnTitleIcon>
|
||||
<InfoCircleOutlined />
|
||||
</ColumnTitleIcon>
|
||||
</Tooltip>
|
||||
)}
|
||||
</ColumnTitleWrapper>,
|
||||
);
|
||||
};
|
||||
@@ -100,10 +100,8 @@ func newProvider(
|
||||
traceAggExprRewriter,
|
||||
)
|
||||
|
||||
logsKeyEvolutionMetadata := telemetrylogs.NewKeyEvolutionMetadata(telemetryStore, cache, settings.Logger)
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper(logsKeyEvolutionMetadata)
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
|
||||
@@ -51,8 +51,6 @@ func NewAggExprRewriter(
|
||||
// and the args if the parametric aggregation function is used.
|
||||
func (r *aggExprRewriter) Rewrite(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
expr string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -79,12 +77,7 @@ func (r *aggExprRewriter) Rewrite(
|
||||
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
|
||||
}
|
||||
|
||||
visitor := newExprVisitor(
|
||||
ctx,
|
||||
startNs,
|
||||
endNs,
|
||||
r.logger,
|
||||
keys,
|
||||
visitor := newExprVisitor(r.logger, keys,
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
@@ -105,8 +98,6 @@ func (r *aggExprRewriter) Rewrite(
|
||||
// RewriteMulti rewrites a slice of expressions.
|
||||
func (r *aggExprRewriter) RewriteMulti(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
exprs []string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -115,7 +106,7 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
var errs []error
|
||||
var chArgsList [][]any
|
||||
for i, e := range exprs {
|
||||
w, chArgs, err := r.Rewrite(ctx, startNs, endNs, e, rateInterval, keys)
|
||||
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
out[i] = e
|
||||
@@ -132,9 +123,6 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
|
||||
// exprVisitor walks FunctionExpr nodes and applies the mappers.
|
||||
type exprVisitor struct {
|
||||
ctx context.Context
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
chparser.DefaultASTVisitor
|
||||
logger *slog.Logger
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
@@ -149,9 +137,6 @@ type exprVisitor struct {
|
||||
}
|
||||
|
||||
func newExprVisitor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
logger *slog.Logger,
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
@@ -161,9 +146,6 @@ func newExprVisitor(
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
ctx: ctx,
|
||||
startNs: startNs,
|
||||
endNs: endNs,
|
||||
logger: logger,
|
||||
fieldKeys: fieldKeys,
|
||||
fullTextColumn: fullTextColumn,
|
||||
@@ -208,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
if aggFunc.FuncCombinator {
|
||||
// Map the predicate (last argument)
|
||||
origPred := args[len(args)-1].String()
|
||||
whereClause, err := PrepareWhereClause(
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Logger: v.logger,
|
||||
@@ -218,7 +200,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonBodyPrefix: v.jsonBodyPrefix,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, v.startNs, v.endNs,
|
||||
}, 0, 0,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -238,7 +220,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -256,7 +238,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -19,8 +19,6 @@ import (
|
||||
|
||||
func CollisionHandledFinalExpr(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
fm qbtypes.FieldMapper,
|
||||
cb qbtypes.ConditionBuilder,
|
||||
@@ -47,7 +45,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, startNs, endNs)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -60,7 +58,7 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, err := fm.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := fm.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -95,7 +93,7 @@ func CollisionHandledFinalExpr(
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
colName, _ = fm.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = fm.FieldFor(ctx, key)
|
||||
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
stmts = append(stmts, colName)
|
||||
}
|
||||
|
||||
@@ -48,8 +48,8 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
op qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
@@ -68,7 +68,7 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
keyIdxFilter := sb.Like(column.Name, keyIndexFilter(key))
|
||||
valueForIndexFilter := valueForIndexFilter(op, key, value)
|
||||
|
||||
fieldName, err := b.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
fieldName, err := b.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -47,7 +47,6 @@ func (m *defaultFieldMapper) ColumnFor(
|
||||
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
@@ -62,11 +61,10 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
colName, err := m.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -48,7 +47,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -227,10 +226,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -238,12 +237,12 @@ func (c *conditionBuilder) ConditionFor(
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -373,8 +372,7 @@ func TestConditionFor(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -427,8 +425,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -667,8 +664,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
||||
@@ -4,14 +4,11 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
@@ -58,13 +55,10 @@ var (
|
||||
)
|
||||
|
||||
type fieldMapper struct {
|
||||
evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore
|
||||
}
|
||||
|
||||
func NewFieldMapper(evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore) qbtypes.FieldMapper {
|
||||
return &fieldMapper{
|
||||
evolutionMetadataStore: evolutionMetadataStore,
|
||||
}
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
@@ -103,7 +97,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -115,35 +109,19 @@ func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
baseColumn := logsV2Columns["resources_string"]
|
||||
tsStartTime := time.Unix(0, int64(tsStart))
|
||||
|
||||
// Extract orgId from context
|
||||
var orgID valuer.UUID
|
||||
if claims, err := authtypes.ClaimsFromContext(ctx); err == nil {
|
||||
orgID, err = valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid orgId %s", claims.OrgID)
|
||||
}
|
||||
}
|
||||
|
||||
// get all evolution for the column
|
||||
evolutions := m.evolutionMetadataStore.Get(ctx, orgID, baseColumn.Name)
|
||||
|
||||
// restricting now to just one entry where we know we changes from map to json
|
||||
if len(evolutions) > 0 && evolutions[0].ReleaseTime.Before(tsStartTime) {
|
||||
return fmt.Sprintf("%s.`%s`::String", column.Name, key.Name), nil
|
||||
}
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName := telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
} else {
|
||||
attrVal := fmt.Sprintf("%s['%s']", baseColumn.Name, key.Name)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, baseColumn.Name, key.Name, attrVal), nil
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
}
|
||||
|
||||
case schema.ColumnTypeString,
|
||||
schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||
schema.ColumnTypeUInt64,
|
||||
@@ -188,12 +166,11 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -203,7 +180,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := logsV2Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextLog
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -221,12 +198,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -3,14 +3,10 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -168,8 +164,7 @@ func TestGetColumn(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(mockStore)
|
||||
fm := NewFieldMapper()
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -234,7 +229,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute - json",
|
||||
name: "Map column type - resource attribute",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
@@ -243,7 +238,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute - Materialized - json",
|
||||
name: "Map column type - resource attribute - Materialized",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
@@ -266,97 +261,8 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(mockStore)
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedResult, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFieldForWithEvolutionMetadata(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
|
||||
OrgID: orgId.String(),
|
||||
})
|
||||
|
||||
// Create a test release time
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTimeNano := uint64(releaseTime.UnixNano())
|
||||
|
||||
// Common test key
|
||||
serviceNameKey := telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
|
||||
// Common expected results
|
||||
jsonOnlyResult := "resource.`service.name`::String"
|
||||
fallbackResult := "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)"
|
||||
|
||||
// Set up stores once
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
setupResourcesStringEvolutionMetadata(ctx, storeWithMetadata, orgId, releaseTime)
|
||||
storeWithoutMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
mockStore *telemetrytypestest.MockKeyEvolutionMetadataStore
|
||||
expectedResult string
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Resource attribute - tsStart before release time (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - tsStart after release time (use new json column)",
|
||||
tsStart: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
tsEnd: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: jsonOnlyResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - no evolution metadata (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano,
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithoutMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - tsStart exactly at release time (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano,
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper(tc.mockStore)
|
||||
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -5,14 +5,12 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -36,7 +34,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -49,8 +47,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
|
||||
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -74,7 +71,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -7,15 +7,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
|
||||
func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
@@ -165,7 +163,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -9,15 +9,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
@@ -2425,8 +2423,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
|
||||
@@ -1,163 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
const (
|
||||
// KeyEvolutionMetadataTableName is the table name for key evolution metadata
|
||||
KeyEvolutionMetadataTableName = "distributed_column_key_evolution_metadata"
|
||||
// KeyEvolutionMetadataDBName is the database name for key evolution metadata
|
||||
KeyEvolutionMetadataDBName = "signoz_logs"
|
||||
// KeyEvolutionMetadataCacheKeyPrefix is the prefix for cache keys
|
||||
KeyEvolutionMetadataCacheKeyPrefix = "key_evolution_metadata:"
|
||||
)
|
||||
|
||||
// CachedKeyEvolutionMetadata is a cacheable type for storing key evolution metadata
|
||||
type CachedKeyEvolutionMetadata struct {
|
||||
Keys []*telemetrytypes.KeyEvolutionMetadataKey `json:"keys"`
|
||||
}
|
||||
|
||||
var _ cachetypes.Cacheable = (*CachedKeyEvolutionMetadata)(nil)
|
||||
|
||||
func (c *CachedKeyEvolutionMetadata) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
|
||||
func (c *CachedKeyEvolutionMetadata) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
// Each key can have multiple evolution entries, allowing for multiple column transitions over time.
|
||||
// The cache is organized by orgId, then by key name.
|
||||
type KeyEvolutionMetadata struct {
|
||||
cache cache.Cache
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewKeyEvolutionMetadata(telemetryStore telemetrystore.TelemetryStore, cache cache.Cache, logger *slog.Logger) *KeyEvolutionMetadata {
|
||||
return &KeyEvolutionMetadata{
|
||||
cache: cache,
|
||||
telemetryStore: telemetryStore,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (k *KeyEvolutionMetadata) fetchFromClickHouse(ctx context.Context, orgID valuer.UUID) {
|
||||
store := k.telemetryStore
|
||||
logger := k.logger
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if store.ClickhouseDB() == nil {
|
||||
logger.WarnContext(ctx, "ClickHouse connection not available for key evolution metadata fetch")
|
||||
return
|
||||
}
|
||||
|
||||
// Build query to fetch all key evolution metadata
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(
|
||||
"base_column",
|
||||
"base_column_type",
|
||||
"new_column",
|
||||
"new_column_type",
|
||||
"release_time",
|
||||
)
|
||||
sb.From(fmt.Sprintf("%s.%s", KeyEvolutionMetadataDBName, KeyEvolutionMetadataTableName))
|
||||
sb.OrderBy("base_column", "release_time")
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := store.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "Failed to fetch key evolution metadata from ClickHouse", "error", err)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Group metadata by base_column
|
||||
metadataByKey := make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
baseColumn string
|
||||
baseColumnType string
|
||||
newColumn string
|
||||
newColumnType string
|
||||
releaseTime uint64
|
||||
)
|
||||
|
||||
if err := rows.Scan(&baseColumn, &baseColumnType, &newColumn, &newColumnType, &releaseTime); err != nil {
|
||||
logger.WarnContext(ctx, "Failed to scan key evolution metadata row", "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
key := &telemetrytypes.KeyEvolutionMetadataKey{
|
||||
BaseColumn: baseColumn,
|
||||
BaseColumnType: baseColumnType,
|
||||
NewColumn: newColumn,
|
||||
NewColumnType: newColumnType,
|
||||
ReleaseTime: time.Unix(0, int64(releaseTime)),
|
||||
}
|
||||
|
||||
metadataByKey[baseColumn] = append(metadataByKey[baseColumn], key)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
logger.WarnContext(ctx, "Error iterating key evolution metadata rows", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Store each key's metadata in cache
|
||||
for keyName, keys := range metadataByKey {
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
cachedData := &CachedKeyEvolutionMetadata{Keys: keys}
|
||||
if err := k.cache.Set(ctx, orgID, cacheKey, cachedData, 24*time.Hour); err != nil {
|
||||
logger.WarnContext(ctx, "Failed to set key evolution metadata in cache", "key", keyName, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
logger.DebugContext(ctx, "Successfully fetched key evolution metadata from ClickHouse", "count", len(metadataByKey))
|
||||
}
|
||||
|
||||
// Add adds a metadata key for the given key name and orgId.
|
||||
// This is primarily for testing purposes. In production, data should come from ClickHouse.
|
||||
func (k *KeyEvolutionMetadata) Add(ctx context.Context, orgId valuer.UUID, keyName string, key *telemetrytypes.KeyEvolutionMetadataKey) {
|
||||
k.logger.WarnContext(ctx, "Add is not implemented for key evolution metadata")
|
||||
|
||||
}
|
||||
|
||||
// Get retrieves all metadata keys for the given key name and orgId from cache.
|
||||
// Returns an empty slice if the key is not found in cache.
|
||||
func (k *KeyEvolutionMetadata) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
var cachedData CachedKeyEvolutionMetadata
|
||||
if err := k.cache.Get(ctx, orgId, cacheKey, &cachedData); err != nil {
|
||||
// Cache miss - fetch from ClickHouse and try again
|
||||
k.fetchFromClickHouse(ctx, orgId)
|
||||
|
||||
// Check cache again after fetching
|
||||
if err := k.cache.Get(ctx, orgId, cacheKey, &cachedData); err != nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Return a copy to prevent external modification
|
||||
result := make([]*telemetrytypes.KeyEvolutionMetadataKey, len(cachedData.Keys))
|
||||
copy(result, cachedData.Keys)
|
||||
return result
|
||||
}
|
||||
@@ -247,7 +247,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
continue
|
||||
}
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -267,7 +267,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -333,7 +333,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -347,7 +347,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, agg.Expression,
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -479,7 +479,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -496,7 +496,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -592,7 +592,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -8,11 +8,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
@@ -40,14 +38,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
}
|
||||
|
||||
func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
|
||||
// Create a test release time
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTimeNano := uint64(releaseTime.UnixNano())
|
||||
|
||||
cases := []struct {
|
||||
startTs uint64
|
||||
endTs uint64
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
@@ -55,16 +46,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit and count distinct on service.name",
|
||||
name: "Time series with limit",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count_distinct(service.name)",
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -80,22 +69,20 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with OR b/w resource attr and attribute filter and count distinct on service.name",
|
||||
name: "Time series with OR b/w resource attr and attribute filter",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count_distinct(service.name)",
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -111,14 +98,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1705224600), uint64(1705485600), "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600), 10, "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit + custom order by",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -152,14 +137,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with group by on materialized column",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -186,12 +169,10 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `materialized.key.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`materialized.key.name`) GLOBAL IN (SELECT `materialized.key.name` FROM __limit_cte) GROUP BY ts, `materialized.key.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with materialised column using or with regex operator",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -209,21 +190,13 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{uint64(1705397400), uint64(1705485600), "redis.*", true, "memcached", true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "redis.*", true, "memcached", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
|
||||
OrgID: orgId.String(),
|
||||
})
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
setupResourcesStringEvolutionMetadata(ctx, storeWithMetadata, orgId, releaseTime)
|
||||
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -247,7 +220,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(ctx, c.startTs, c.endTs, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
@@ -344,8 +317,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -454,8 +426,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -529,8 +500,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -626,8 +596,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Helper function to limit string length for display
|
||||
@@ -955,20 +951,3 @@ func buildCompleteFieldKeyMapCollision() map[string][]*telemetrytypes.TelemetryF
|
||||
}
|
||||
return keysMap
|
||||
}
|
||||
|
||||
// buildKeyEvolutionMetadataForResourcesString returns key evolution metadata for resources_string.
|
||||
// This can be used to populate a mock key evolution metadata store in tests.
|
||||
func buildKeyEvolutionMetadataForResourcesString(releaseTime time.Time) *telemetrytypes.KeyEvolutionMetadataKey {
|
||||
return &telemetrytypes.KeyEvolutionMetadataKey{
|
||||
BaseColumn: "resources_string",
|
||||
BaseColumnType: "Map(LowCardinality(String), String)",
|
||||
NewColumn: "resource",
|
||||
NewColumnType: "JSON(max_dynamic_paths=100)",
|
||||
ReleaseTime: releaseTime,
|
||||
}
|
||||
}
|
||||
|
||||
// setupResourcesStringEvolutionMetadata sets up resources_string evolution metadata in the mock store.
|
||||
func setupResourcesStringEvolutionMetadata(ctx context.Context, m *telemetrytypestest.MockKeyEvolutionMetadataStore, orgId valuer.UUID, releaseTime time.Time) {
|
||||
m.Add(ctx, orgId, "resources_string", buildKeyEvolutionMetadataForResourcesString(releaseTime))
|
||||
}
|
||||
|
||||
@@ -25,7 +25,8 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
tsStart, tsEnd uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
@@ -44,7 +45,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return "", nil
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
// if we don't have a table field name, we can't build a condition for related values
|
||||
return "", nil
|
||||
|
||||
@@ -53,7 +53,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -51,7 +51,7 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
return column, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -69,12 +69,11 @@ func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -84,7 +83,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := attributeMetadataColumns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -102,12 +101,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -145,8 +145,6 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
@@ -205,7 +203,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -942,18 +942,18 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldDataType: fieldValueSelector.FieldDataType,
|
||||
}
|
||||
|
||||
selectColumn, err := t.fm.FieldFor(ctx, 0, 0, key)
|
||||
selectColumn, err := t.fm.FieldFor(ctx, key)
|
||||
|
||||
if err != nil {
|
||||
// we don't have a explicit column to select from the related metadata table
|
||||
// so we will select either from resource_attributes or attributes table
|
||||
// in that order
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
})
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
@@ -978,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldMapper: t.fm,
|
||||
ConditionBuilder: t.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
}, 0, 0)
|
||||
}, 0, 0)
|
||||
if err == nil {
|
||||
sb.AddWhereClause(whereClause.WhereClause)
|
||||
} else {
|
||||
@@ -1002,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
|
||||
// search on attributes
|
||||
key.FieldContext = telemetrytypes.FieldContextAttribute
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
// search on resource
|
||||
key.FieldContext = telemetrytypes.FieldContextResource
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
key.FieldContext = origContext
|
||||
} else {
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -148,7 +148,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -200,7 +200,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
))
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -231,7 +231,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -270,7 +270,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -295,7 +295,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -23,8 +23,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -41,7 +39,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -141,10 +139,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -92,12 +92,11 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -207,7 +207,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -359,7 +359,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
|
||||
sb.Select("fingerprint")
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -29,8 +29,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -54,7 +52,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
// then ask the mapper for the actual SQL reference
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -229,20 +227,20 @@ func (c *conditionBuilder) ConditionFor(
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
if c.isSpanScopeField(key.Name) {
|
||||
return c.buildSpanScopeCondition(key, operator, value, startNs)
|
||||
}
|
||||
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), field) ||
|
||||
@@ -250,7 +248,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -225,7 +225,6 @@ func (m *defaultFieldMapper) ColumnFor(
|
||||
// otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
// Special handling for span scope fields
|
||||
@@ -304,12 +303,11 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
// if it exists otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -319,7 +317,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
if _, ok := indexV3Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -337,12 +335,12 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -92,7 +92,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -293,7 +293,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs?
|
||||
for _, field := range selectedFields {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &field, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -311,7 +311,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -509,7 +509,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, agg.Expression,
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -654,7 +654,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
}, b.start, b.end,
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -450,7 +450,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
if selectedFields[field.Name] {
|
||||
continue
|
||||
}
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &field, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.WarnContext(ctx, "failed to map select field",
|
||||
"field", field.Name, "error", err)
|
||||
@@ -465,7 +465,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
// Add order by support using ColumnExpressionFor
|
||||
orderApplied := false
|
||||
for _, orderBy := range b.operator.Order {
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -547,8 +547,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -574,8 +572,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64(b.operator.StepInterval.Seconds()),
|
||||
keys,
|
||||
@@ -661,8 +657,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -690,8 +684,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
@@ -805,8 +797,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -832,8 +822,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64((b.end-b.start)/querybuilder.NsToSeconds),
|
||||
keys,
|
||||
|
||||
@@ -21,11 +21,11 @@ type JsonKeyToFieldFunc func(context.Context, *telemetrytypes.TelemetryFieldKey,
|
||||
// FieldMapper maps the telemetry field key to the table field name.
|
||||
type FieldMapper interface {
|
||||
// FieldFor returns the field name for the given key.
|
||||
FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
// ColumnFor returns the column for the given key.
|
||||
ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error)
|
||||
// ColumnExpressionFor returns the column expression for the given key.
|
||||
ColumnExpressionFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
ColumnExpressionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
}
|
||||
|
||||
// ConditionBuilder builds the condition for the filter.
|
||||
@@ -37,8 +37,8 @@ type ConditionBuilder interface {
|
||||
|
||||
type AggExprRewriter interface {
|
||||
// Rewrite rewrites the aggregation expression to be used in the query.
|
||||
Rewrite(ctx context.Context, startNs, endNs uint64, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, startNs, endNs uint64, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||
Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||
}
|
||||
|
||||
type Statement struct {
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type KeyEvolutionMetadataKey struct {
|
||||
BaseColumn string
|
||||
BaseColumnType string
|
||||
NewColumn string
|
||||
NewColumnType string
|
||||
ReleaseTime time.Time
|
||||
}
|
||||
|
||||
type KeyEvolutionMetadataStore interface {
|
||||
Get(ctx context.Context, orgId valuer.UUID, keyName string) []*KeyEvolutionMetadataKey
|
||||
Add(ctx context.Context, orgId valuer.UUID, keyName string, key *KeyEvolutionMetadataKey)
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
package telemetrytypestest
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// MockKeyEvolutionMetadataStore implements the KeyEvolutionMetadataStore interface for testing purposes
|
||||
type MockKeyEvolutionMetadataStore struct {
|
||||
metadata map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey // orgId -> keyName -> metadata
|
||||
}
|
||||
|
||||
// NewMockKeyEvolutionMetadataStore creates a new instance of MockKeyEvolutionMetadataStore with initialized maps
|
||||
func NewMockKeyEvolutionMetadataStore() *MockKeyEvolutionMetadataStore {
|
||||
return &MockKeyEvolutionMetadataStore{
|
||||
metadata: make(map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey),
|
||||
}
|
||||
}
|
||||
|
||||
// Get retrieves all metadata keys for the given key name and orgId.
|
||||
// Returns an empty slice if the key is not found.
|
||||
func (m *MockKeyEvolutionMetadataStore) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
if m.metadata == nil {
|
||||
return nil
|
||||
}
|
||||
orgMetadata, orgExists := m.metadata[orgId.String()]
|
||||
if !orgExists {
|
||||
return nil
|
||||
}
|
||||
keys, exists := orgMetadata[keyName]
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
// Return a copy to prevent external modification
|
||||
result := make([]*telemetrytypes.KeyEvolutionMetadataKey, len(keys))
|
||||
copy(result, keys)
|
||||
return result
|
||||
}
|
||||
|
||||
// Add adds a metadata key for the given key name and orgId
|
||||
func (m *MockKeyEvolutionMetadataStore) Add(ctx context.Context, orgId valuer.UUID, keyName string, key *telemetrytypes.KeyEvolutionMetadataKey) {
|
||||
if m.metadata == nil {
|
||||
m.metadata = make(map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
}
|
||||
if m.metadata[orgId.String()] == nil {
|
||||
m.metadata[orgId.String()] = make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
}
|
||||
m.metadata[orgId.String()][keyName] = append(m.metadata[orgId.String()][keyName], key)
|
||||
}
|
||||
Reference in New Issue
Block a user