Compare commits

..

12 Commits

Author SHA1 Message Date
ahmadshaheer
22d1b90e2a chore: add optional chaining .keys 2025-12-15 18:35:38 +04:30
ahmadshaheer
aa9a2863af chore: add tests for add/remove columns from log detail drawer 2025-12-15 15:16:06 +04:30
ahmadshaheer
c5fddb2e09 feat: add ability to add/remove columns from log detail page 2025-12-14 16:39:05 +04:30
Piyush Singariya
e66bfe5961 feat(JSON): JSON Body Metadata (#9593)
* feat: json Body Keys

* feat: telemetry types

* feat: change ExtractBodyPaths

* chore: minor comment change

* chore: func rename, file rename

* chore: change table names

* chore: reflect changes from the overhaul

* test: fixing test 1

* fix: test TestQueryToKeys

* fix: test TestPrepareLogsQuery

* chore: remove db

* chore: go mod

* chore: changes based on review

* chore: changes based on review

* fix: in LIKE operation

* chore: addressed few changes

* revert: test file

* fix: comparison fix

* test: add TestBuildListLogsJSONIndexesQuery

* fix: in test TestBuildListLogsJSONIndexesQuery

* fix: pull promoted paths in single db call

* fix: reducing db calls

* test: fix TestBuildListLogsJSONIndexesQuery

* fix: test TestConditionForJSONBodySearch

* fix: lint try 1

* chore: review changes based on cursor

* fix: use enums only

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2025-12-09 20:47:26 +07:00
Nikhil Mantri
42943f72b7 chore(metric-metadata): do not delete rows, keep inserting, pick latest 2025-12-09 05:46:48 +00:00
Nikhil Mantri
7a72a209e5 chore: metric highlights API for detailed view (#9679) 2025-12-09 00:09:23 +05:30
Karan Balani
44f00943a8 fix: tokenizer cache ttls and guardrails for config (#9776) 2025-12-05 11:01:05 +05:30
Nikhil Mantri
8867e1ef38 chore: metric Metadata Seperate Attributes API (#9622) 2025-12-04 19:38:37 +00:00
Abhi kumar
c08e520941 chore: removed alertRuleProvider from global state (#9648) 2025-12-04 10:27:50 +00:00
Ishan
139cc4452d style: metrics custom function css overflow issues (#9660)
* style: metrics custom function css overflow issues

* feat: add support for recovery threshold (#9428)

* feat: created common component for overflowing input tooltip

* feat: updated function.tsx to have useMemo for debounce

* feat: removed unwanted useEffect and moved inline css to separate file

* feat: re-applied useEffect due to css issues

* feat: removed inline styling

* feat: updated mirror ref to be in common component along with css updates

* feat: reverted prom_rule.go

* feat: code cleanup - input ref/forwardref cleanup

* feat: code cleanup - updated test file

* feat: extracted mirror-ref outside of tooltip

* feat: removed unwanted css

* feat: code optmized

* feat: test file updated

* feat: snapshot update

---------

Co-authored-by: Ishan Uniyal <ishan@Ishans-MacBook-Pro.local>
Co-authored-by: Abhishek Kumar Singh <hritik6058@gmail.com>
2025-12-04 15:04:39 +05:30
Nityananda Gohain
2f3baeb302 fix: fix third party api filtering (#9770) 2025-12-03 23:36:05 +05:30
Abhishek Kumar Singh
3d42b0058e chore: Query filter extraction API (#9617) 2025-12-03 13:13:32 +00:00
73 changed files with 2397 additions and 460 deletions

View File

@@ -19,6 +19,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model" basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules" rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -60,6 +61,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore), FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz, Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics), QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
}) })
if err != nil { if err != nil {

View File

@@ -22,7 +22,6 @@ import { StatusCodes } from 'http-status-codes';
import history from 'lib/history'; import history from 'lib/history';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'; import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import posthog from 'posthog-js'; import posthog from 'posthog-js';
import AlertRuleProvider from 'providers/Alert';
import { useAppContext } from 'providers/App/App'; import { useAppContext } from 'providers/App/App';
import { IUser } from 'providers/App/types'; import { IUser } from 'providers/App/types';
import { DashboardProvider } from 'providers/Dashboard/Dashboard'; import { DashboardProvider } from 'providers/Dashboard/Dashboard';
@@ -374,26 +373,24 @@ function App(): JSX.Element {
<QueryBuilderProvider> <QueryBuilderProvider>
<DashboardProvider> <DashboardProvider>
<KeyboardHotkeysProvider> <KeyboardHotkeysProvider>
<AlertRuleProvider> <AppLayout>
<AppLayout> <PreferenceContextProvider>
<PreferenceContextProvider> <Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}> <Switch>
<Switch> {routes.map(({ path, component, exact }) => (
{routes.map(({ path, component, exact }) => ( <Route
<Route key={`${path}`}
key={`${path}`} exact={exact}
exact={exact} path={path}
path={path} component={component}
component={component} />
/> ))}
))} <Route exact path="/" component={Home} />
<Route exact path="/" component={Home} /> <Route path="*" component={NotFound} />
<Route path="*" component={NotFound} /> </Switch>
</Switch> </Suspense>
</Suspense> </PreferenceContextProvider>
</PreferenceContextProvider> </AppLayout>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider> </KeyboardHotkeysProvider>
</DashboardProvider> </DashboardProvider>
</QueryBuilderProvider> </QueryBuilderProvider>

View File

@@ -80,12 +80,32 @@ function LogDetailInner({
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null; return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
}, [stagedQuery]); }, [stagedQuery]);
const { options } = useOptionsMenu({ const { options, config } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS, storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: DataSource.LOGS, dataSource: DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP, aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
}); });
const handleAddColumn = useCallback(
(fieldName: string): void => {
if (config?.addColumn?.onSelect) {
// onSelect from SelectProps has signature (value, option), but handleSelectColumns only needs value
// eslint-disable-next-line @typescript-eslint/no-explicit-any
config.addColumn.onSelect(fieldName, {} as any);
}
},
[config],
);
const handleRemoveColumn = useCallback(
(fieldName: string): void => {
if (config?.addColumn?.onRemove) {
config.addColumn.onRemove(fieldName);
}
},
[config],
);
const isDarkMode = useIsDarkMode(); const isDarkMode = useIsDarkMode();
const location = useLocation(); const location = useLocation();
const { safeNavigate } = useSafeNavigate(); const { safeNavigate } = useSafeNavigate();
@@ -369,6 +389,8 @@ function LogDetailInner({
isListViewPanel={isListViewPanel} isListViewPanel={isListViewPanel}
selectedOptions={options} selectedOptions={options}
listViewPanelSelectedFields={listViewPanelSelectedFields} listViewPanelSelectedFields={listViewPanelSelectedFields}
onAddColumn={handleAddColumn}
onRemoveColumn={handleRemoveColumn}
/> />
)} )}
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />} {selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}

View File

@@ -0,0 +1,16 @@
.overflow-input {
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
}
.overflow-input-mirror {
position: absolute;
visibility: hidden;
white-space: pre;
pointer-events: none;
font: inherit;
letter-spacing: inherit;
height: 0;
overflow: hidden;
}

View File

@@ -0,0 +1,119 @@
import { render, screen, userEvent, waitFor, within } from 'tests/test-utils';
import OverflowInputToolTip from './OverflowInputToolTip';
const TOOLTIP_INNER_SELECTOR = '.ant-tooltip-inner';
// Utility to mock overflow behaviour on inputs / elements.
// Stubs HTMLElement.prototype.clientWidth, scrollWidth and offsetWidth used by component.
function mockOverflow(clientWidth: number, scrollWidth: number): void {
Object.defineProperty(HTMLElement.prototype, 'clientWidth', {
configurable: true,
value: clientWidth,
});
Object.defineProperty(HTMLElement.prototype, 'scrollWidth', {
configurable: true,
value: scrollWidth,
});
// mirror.offsetWidth is used to compute mirrorWidth = offsetWidth + 24.
// Use clientWidth so the mirror measurement aligns with the mocked client width in tests.
Object.defineProperty(HTMLElement.prototype, 'offsetWidth', {
configurable: true,
value: clientWidth,
});
}
function queryTooltipInner(): HTMLElement | null {
// find element that has role="tooltip" (could be the inner itself)
const tooltip = document.querySelector<HTMLElement>('[role="tooltip"]');
if (!tooltip) return document.querySelector(TOOLTIP_INNER_SELECTOR);
// if the role element is already the inner, return it; otherwise return its descendant
if (tooltip.classList.contains('ant-tooltip-inner')) return tooltip;
return (
(tooltip.querySelector(TOOLTIP_INNER_SELECTOR) as HTMLElement) ??
document.querySelector(TOOLTIP_INNER_SELECTOR)
);
}
describe('OverflowInputToolTip', () => {
beforeEach(() => {
jest.restoreAllMocks();
});
test('shows tooltip when content overflows and input is clamped at maxAutoWidth', async () => {
mockOverflow(150, 250); // clientWidth >= maxAutoWidth (150), scrollWidth > clientWidth
render(<OverflowInputToolTip value="Very long overflowing text" />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).not.toBeNull();
});
const tooltipInner = queryTooltipInner();
if (!tooltipInner) throw new Error('Tooltip inner not found');
expect(
within(tooltipInner).getByText('Very long overflowing text'),
).toBeInTheDocument();
});
test('does NOT show tooltip when content does not overflow', async () => {
mockOverflow(150, 100); // content fits (scrollWidth <= clientWidth)
render(<OverflowInputToolTip value="Short text" />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).toBeNull();
});
});
test('does NOT show tooltip when content overflows but input is NOT at maxAutoWidth', async () => {
mockOverflow(100, 250); // clientWidth < maxAutoWidth (150), scrollWidth > clientWidth
render(<OverflowInputToolTip value="Long but input not clamped" />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).toBeNull();
});
});
test('uncontrolled input allows typing', async () => {
render(<OverflowInputToolTip defaultValue="Init" />);
const input = screen.getByRole('textbox') as HTMLInputElement;
await userEvent.type(input, 'ABC');
expect(input).toHaveValue('InitABC');
});
test('disabled input never shows tooltip even if overflowing', async () => {
mockOverflow(150, 300);
render(<OverflowInputToolTip value="Overflowing!" disabled />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).toBeNull();
});
});
test('renders mirror span and input correctly (structural assertions instead of snapshot)', () => {
const { container } = render(<OverflowInputToolTip value="Snapshot" />);
const mirror = container.querySelector('.overflow-input-mirror');
const input = container.querySelector('input') as HTMLInputElement | null;
expect(mirror).toBeTruthy();
expect(mirror?.textContent).toBe('Snapshot');
expect(input).toBeTruthy();
expect(input?.value).toBe('Snapshot');
// width should be set inline (component calculates width on mount)
expect(input?.getAttribute('style')).toContain('width:');
});
});

View File

@@ -0,0 +1,73 @@
/* eslint-disable react/require-default-props */
/* eslint-disable react/jsx-props-no-spreading */
import './OverflowInputToolTip.scss';
import { Input, InputProps, InputRef, Tooltip } from 'antd';
import cx from 'classnames';
import { useEffect, useRef, useState } from 'react';
export interface OverflowTooltipInputProps extends InputProps {
tooltipPlacement?: 'top' | 'bottom' | 'left' | 'right';
minAutoWidth?: number;
maxAutoWidth?: number;
}
function OverflowInputToolTip({
value,
defaultValue,
onChange,
disabled = false,
tooltipPlacement = 'top',
className,
minAutoWidth = 70,
maxAutoWidth = 150,
...rest
}: OverflowTooltipInputProps): JSX.Element {
const inputRef = useRef<InputRef>(null);
const mirrorRef = useRef<HTMLSpanElement | null>(null);
const [isOverflowing, setIsOverflowing] = useState<boolean>(false);
useEffect(() => {
const input = inputRef.current?.input;
const mirror = mirrorRef.current;
if (!input || !mirror) {
setIsOverflowing(false);
return;
}
mirror.textContent = String(value ?? '') || ' ';
const mirrorWidth = mirror.offsetWidth + 24;
const newWidth = Math.min(maxAutoWidth, Math.max(minAutoWidth, mirrorWidth));
input.style.width = `${newWidth}px`;
// consider clamped when mirrorWidth reaches maxAutoWidth (allow -5px tolerance)
const isClamped = mirrorWidth >= maxAutoWidth - 5;
const overflow = input.scrollWidth > input.clientWidth && isClamped;
setIsOverflowing(overflow);
}, [value, disabled, minAutoWidth, maxAutoWidth]);
const tooltipTitle = !disabled && isOverflowing ? String(value ?? '') : '';
return (
<>
<span ref={mirrorRef} aria-hidden className="overflow-input-mirror" />
<Tooltip title={tooltipTitle} placement={tooltipPlacement}>
<Input
{...rest}
value={value}
defaultValue={defaultValue}
onChange={onChange}
disabled={disabled}
ref={inputRef}
className={cx('overflow-input', className)}
/>
</Tooltip>
</>
);
}
OverflowInputToolTip.displayName = 'OverflowInputToolTip';
export default OverflowInputToolTip;

View File

@@ -0,0 +1,3 @@
import OverflowInputToolTip from './OverflowInputToolTip';
export default OverflowInputToolTip;

View File

@@ -30,6 +30,8 @@ interface OverviewProps {
selectedOptions: OptionsQuery; selectedOptions: OptionsQuery;
listViewPanelSelectedFields?: IField[] | null; listViewPanelSelectedFields?: IField[] | null;
onGroupByAttribute?: (fieldKey: string, dataType?: DataTypes) => Promise<void>; onGroupByAttribute?: (fieldKey: string, dataType?: DataTypes) => Promise<void>;
onAddColumn?: (fieldName: string) => void;
onRemoveColumn?: (fieldName: string) => void;
} }
type Props = OverviewProps & type Props = OverviewProps &
@@ -44,6 +46,8 @@ function Overview({
selectedOptions, selectedOptions,
onGroupByAttribute, onGroupByAttribute,
listViewPanelSelectedFields, listViewPanelSelectedFields,
onAddColumn,
onRemoveColumn,
}: Props): JSX.Element { }: Props): JSX.Element {
const [isWrapWord, setIsWrapWord] = useState<boolean>(true); const [isWrapWord, setIsWrapWord] = useState<boolean>(true);
const [isSearchVisible, setIsSearchVisible] = useState<boolean>(false); const [isSearchVisible, setIsSearchVisible] = useState<boolean>(false);
@@ -213,6 +217,8 @@ function Overview({
isListViewPanel={isListViewPanel} isListViewPanel={isListViewPanel}
selectedOptions={selectedOptions} selectedOptions={selectedOptions}
listViewPanelSelectedFields={listViewPanelSelectedFields} listViewPanelSelectedFields={listViewPanelSelectedFields}
onAddColumn={onAddColumn}
onRemoveColumn={onRemoveColumn}
/> />
</> </>
), ),
@@ -228,6 +234,8 @@ Overview.defaultProps = {
isListViewPanel: false, isListViewPanel: false,
listViewPanelSelectedFields: null, listViewPanelSelectedFields: null,
onGroupByAttribute: undefined, onGroupByAttribute: undefined,
onAddColumn: undefined,
onRemoveColumn: undefined,
}; };
export default Overview; export default Overview;

View File

@@ -48,6 +48,8 @@ interface TableViewProps {
isListViewPanel?: boolean; isListViewPanel?: boolean;
listViewPanelSelectedFields?: IField[] | null; listViewPanelSelectedFields?: IField[] | null;
onGroupByAttribute?: (fieldKey: string, dataType?: DataTypes) => Promise<void>; onGroupByAttribute?: (fieldKey: string, dataType?: DataTypes) => Promise<void>;
onAddColumn?: (fieldName: string) => void;
onRemoveColumn?: (fieldName: string) => void;
} }
type Props = TableViewProps & type Props = TableViewProps &
@@ -63,6 +65,8 @@ function TableView({
selectedOptions, selectedOptions,
onGroupByAttribute, onGroupByAttribute,
listViewPanelSelectedFields, listViewPanelSelectedFields,
onAddColumn,
onRemoveColumn,
}: Props): JSX.Element | null { }: Props): JSX.Element | null {
const dispatch = useDispatch<Dispatch<AppActions>>(); const dispatch = useDispatch<Dispatch<AppActions>>();
const [isfilterInLoading, setIsFilterInLoading] = useState<boolean>(false); const [isfilterInLoading, setIsFilterInLoading] = useState<boolean>(false);
@@ -292,6 +296,9 @@ function TableView({
isfilterOutLoading={isfilterOutLoading} isfilterOutLoading={isfilterOutLoading}
onClickHandler={onClickHandler} onClickHandler={onClickHandler}
onGroupByAttribute={onGroupByAttribute} onGroupByAttribute={onGroupByAttribute}
onAddColumn={onAddColumn}
onRemoveColumn={onRemoveColumn}
selectedOptions={selectedOptions}
/> />
), ),
}, },
@@ -335,6 +342,8 @@ TableView.defaultProps = {
isListViewPanel: false, isListViewPanel: false,
listViewPanelSelectedFields: null, listViewPanelSelectedFields: null,
onGroupByAttribute: undefined, onGroupByAttribute: undefined,
onAddColumn: undefined,
onRemoveColumn: undefined,
}; };
export interface DataType { export interface DataType {

View File

@@ -11,7 +11,14 @@ import { OPERATORS } from 'constants/queryBuilder';
import ROUTES from 'constants/routes'; import ROUTES from 'constants/routes';
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config'; import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
import { MetricsType } from 'container/MetricsApplication/constant'; import { MetricsType } from 'container/MetricsApplication/constant';
import { ArrowDownToDot, ArrowUpFromDot, Ellipsis } from 'lucide-react'; import { OptionsQuery } from 'container/OptionsMenu/types';
import {
ArrowDownToDot,
ArrowUpFromDot,
Ellipsis,
Minus,
Plus,
} from 'lucide-react';
import { useTimezone } from 'providers/Timezone'; import { useTimezone } from 'providers/Timezone';
import React, { useCallback, useMemo, useState } from 'react'; import React, { useCallback, useMemo, useState } from 'react';
import { useLocation } from 'react-router-dom'; import { useLocation } from 'react-router-dom';
@@ -34,6 +41,9 @@ interface ITableViewActionsProps {
isfilterInLoading: boolean; isfilterInLoading: boolean;
isfilterOutLoading: boolean; isfilterOutLoading: boolean;
onGroupByAttribute?: (fieldKey: string, dataType?: DataTypes) => Promise<void>; onGroupByAttribute?: (fieldKey: string, dataType?: DataTypes) => Promise<void>;
onAddColumn?: (fieldName: string) => void;
onRemoveColumn?: (fieldName: string) => void;
selectedOptions?: OptionsQuery;
onClickHandler: ( onClickHandler: (
operator: string, operator: string,
fieldKey: string, fieldKey: string,
@@ -60,8 +70,7 @@ const BodyContent: React.FC<{
fieldData: Record<string, string>; fieldData: Record<string, string>;
record: DataType; record: DataType;
bodyHtml: { __html: string }; bodyHtml: { __html: string };
textToCopy: string; }> = React.memo(({ fieldData, record, bodyHtml }) => {
}> = React.memo(({ fieldData, record, bodyHtml, textToCopy }) => {
const { isLoading, treeData, error } = useAsyncJSONProcessing( const { isLoading, treeData, error } = useAsyncJSONProcessing(
fieldData.value, fieldData.value,
record.field === 'body', record.field === 'body',
@@ -93,13 +102,11 @@ const BodyContent: React.FC<{
if (record.field === 'body') { if (record.field === 'body') {
return ( return (
<CopyClipboardHOC entityKey="body" textToCopy={textToCopy}> <span
<span style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }} >
> <span dangerouslySetInnerHTML={bodyHtml} />
<span dangerouslySetInnerHTML={bodyHtml} /> </span>
</span>
</CopyClipboardHOC>
); );
} }
@@ -108,6 +115,7 @@ const BodyContent: React.FC<{
BodyContent.displayName = 'BodyContent'; BodyContent.displayName = 'BodyContent';
// eslint-disable-next-line sonarjs/cognitive-complexity
export default function TableViewActions( export default function TableViewActions(
props: ITableViewActionsProps, props: ITableViewActionsProps,
): React.ReactElement { ): React.ReactElement {
@@ -119,6 +127,9 @@ export default function TableViewActions(
isfilterOutLoading, isfilterOutLoading,
onClickHandler, onClickHandler,
onGroupByAttribute, onGroupByAttribute,
onAddColumn,
onRemoveColumn,
selectedOptions,
} = props; } = props;
const { pathname } = useLocation(); const { pathname } = useLocation();
@@ -145,6 +156,13 @@ export default function TableViewActions(
const fieldFilterKey = filterKeyForField(fieldData.field); const fieldFilterKey = filterKeyForField(fieldData.field);
const isFieldInSelectedColumns = useMemo(() => {
if (!selectedOptions?.selectColumns) return false;
return selectedOptions.selectColumns.some(
(col) => col.name === fieldFilterKey,
);
}, [selectedOptions, fieldFilterKey]);
// Memoize textToCopy computation // Memoize textToCopy computation
const textToCopy = useMemo(() => { const textToCopy = useMemo(() => {
let text = fieldData.value; let text = fieldData.value;
@@ -175,12 +193,7 @@ export default function TableViewActions(
switch (record.field) { switch (record.field) {
case 'body': case 'body':
return ( return (
<BodyContent <BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
fieldData={fieldData}
record={record}
bodyHtml={bodyHtml}
textToCopy={textToCopy}
/>
); );
case 'timestamp': case 'timestamp':
@@ -202,7 +215,6 @@ export default function TableViewActions(
record, record,
fieldData, fieldData,
bodyHtml, bodyHtml,
textToCopy,
formatTimezoneAdjustedTimestamp, formatTimezoneAdjustedTimestamp,
cleanTimestamp, cleanTimestamp,
]); ]);
@@ -211,12 +223,7 @@ export default function TableViewActions(
if (record.field === 'body') { if (record.field === 'body') {
return ( return (
<div className={cx('value-field', isOpen ? 'open-popover' : '')}> <div className={cx('value-field', isOpen ? 'open-popover' : '')}>
<BodyContent <BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
fieldData={fieldData}
record={record}
bodyHtml={bodyHtml}
textToCopy={textToCopy}
/>
{!isListViewPanel && !RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && ( {!isListViewPanel && !RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && (
<span className="action-btn"> <span className="action-btn">
<Tooltip title="Filter for value"> <Tooltip title="Filter for value">
@@ -264,6 +271,32 @@ export default function TableViewActions(
arrow={false} arrow={false}
content={ content={
<div> <div>
{onAddColumn && !isFieldInSelectedColumns && (
<Button
className="group-by-clause"
type="text"
icon={<Plus size={14} />}
onClick={(): void => {
onAddColumn(fieldFilterKey);
setIsOpen(false);
}}
>
Add to Columns
</Button>
)}
{onRemoveColumn && isFieldInSelectedColumns && (
<Button
className="group-by-clause"
type="text"
icon={<Minus size={14} />}
onClick={(): void => {
onRemoveColumn(fieldFilterKey);
setIsOpen(false);
}}
>
Remove from Columns
</Button>
)}
<Button <Button
className="group-by-clause" className="group-by-clause"
type="text" type="text"
@@ -344,6 +377,32 @@ export default function TableViewActions(
arrow={false} arrow={false}
content={ content={
<div> <div>
{onAddColumn && !isFieldInSelectedColumns && (
<Button
className="group-by-clause"
type="text"
icon={<Plus size={14} />}
onClick={(): void => {
onAddColumn(fieldFilterKey);
setIsOpen(false);
}}
>
Add to Columns
</Button>
)}
{onRemoveColumn && isFieldInSelectedColumns && (
<Button
className="group-by-clause"
type="text"
icon={<Minus size={14} />}
onClick={(): void => {
onRemoveColumn(fieldFilterKey);
setIsOpen(false);
}}
>
Remove from Columns
</Button>
)}
<Button <Button
className="group-by-clause" className="group-by-clause"
type="text" type="text"
@@ -374,4 +433,7 @@ export default function TableViewActions(
TableViewActions.defaultProps = { TableViewActions.defaultProps = {
onGroupByAttribute: undefined, onGroupByAttribute: undefined,
onAddColumn: undefined,
onRemoveColumn: undefined,
selectedOptions: undefined,
}; };

View File

@@ -1,54 +1,18 @@
import { fireEvent, render, screen } from '@testing-library/react'; import { fireEvent, render, screen, waitFor } from '@testing-library/react';
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config'; import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
import { LogViewMode } from 'container/LogsTable';
import { FontSize } from 'container/OptionsMenu/types';
import TableViewActions from '../TableViewActions'; import TableViewActions from '../TableViewActions';
import useAsyncJSONProcessing from '../useAsyncJSONProcessing';
// Mock data for tests
let mockCopyToClipboard: jest.Mock;
let mockNotificationsSuccess: jest.Mock;
// Mock the components and hooks // Mock the components and hooks
jest.mock('components/Logs/CopyClipboardHOC', () => ({ jest.mock('components/Logs/CopyClipboardHOC', () => ({
__esModule: true, __esModule: true,
default: ({ default: ({ children }: { children: React.ReactNode }): JSX.Element => (
children, <div className="CopyClipboardHOC">{children}</div>
textToCopy,
entityKey,
}: {
children: React.ReactNode;
textToCopy: string;
entityKey: string;
}): JSX.Element => (
// eslint-disable-next-line jsx-a11y/click-events-have-key-events
<div
className="CopyClipboardHOC"
data-testid={`copy-clipboard-${entityKey}`}
data-text-to-copy={textToCopy}
onClick={(): void => {
if (mockCopyToClipboard) {
mockCopyToClipboard(textToCopy);
}
if (mockNotificationsSuccess) {
mockNotificationsSuccess({
message: `${entityKey} copied to clipboard`,
key: `${entityKey} copied to clipboard`,
});
}
}}
role="button"
tabIndex={0}
>
{children}
</div>
), ),
})); }));
jest.mock('../useAsyncJSONProcessing', () => ({
__esModule: true,
default: jest.fn(),
}));
jest.mock('providers/Timezone', () => ({ jest.mock('providers/Timezone', () => ({
useTimezone: (): { useTimezone: (): {
formatTimezoneAdjustedTimestamp: (timestamp: string) => string; formatTimezoneAdjustedTimestamp: (timestamp: string) => string;
@@ -71,17 +35,32 @@ jest.mock('react-router-dom', () => ({
}), }),
})); }));
jest.mock('../useAsyncJSONProcessing', () => ({
__esModule: true,
default: (): {
isLoading: boolean;
treeData: unknown[] | null;
error: string | null;
} => ({
isLoading: false,
treeData: null,
error: null,
}),
}));
describe('TableViewActions', () => { describe('TableViewActions', () => {
const TEST_VALUE = 'test value'; const TEST_VALUE = 'test value';
const ACTION_BUTTON_TEST_ID = '.action-btn'; const ACTION_BUTTON_TEST_ID = '.action-btn';
const TEST_FIELD = 'test-field';
const defaultProps = { const defaultProps = {
fieldData: { fieldData: {
field: 'test-field', field: TEST_FIELD,
value: TEST_VALUE, value: TEST_VALUE,
}, },
record: { record: {
key: 'test-key', key: 'test-key',
field: 'test-field', field: TEST_FIELD,
value: TEST_VALUE, value: TEST_VALUE,
}, },
isListViewPanel: false, isListViewPanel: false,
@@ -91,19 +70,6 @@ describe('TableViewActions', () => {
onGroupByAttribute: jest.fn(), onGroupByAttribute: jest.fn(),
}; };
beforeEach(() => {
mockCopyToClipboard = jest.fn();
mockNotificationsSuccess = jest.fn();
// Default mock for useAsyncJSONProcessing
const mockUseAsyncJSONProcessing = jest.mocked(useAsyncJSONProcessing);
mockUseAsyncJSONProcessing.mockReturnValue({
isLoading: false,
treeData: null,
error: null,
});
});
it('should render without crashing', () => { it('should render without crashing', () => {
render( render(
<TableViewActions <TableViewActions
@@ -179,59 +145,133 @@ describe('TableViewActions', () => {
).not.toBeInTheDocument(); ).not.toBeInTheDocument();
}); });
it('should copy non-JSON body text without quotes when user clicks on body', () => { describe('Add/Remove Column functionality', () => {
// Setup: body field with surrounding quotes const ADD_TO_COLUMNS_TEXT = 'Add to Columns';
const bodyValueWithQuotes = const REMOVE_FROM_COLUMNS_TEXT = 'Remove from Columns';
'"FeatureFlag \'kafkaQueueProblems\' is enabled, sleeping 1 second"';
const expectedCopiedText =
"FeatureFlag 'kafkaQueueProblems' is enabled, sleeping 1 second";
const bodyProps = { const getEllipsisButton = (container: HTMLElement): HTMLElement => {
fieldData: { const buttons = container.querySelectorAll('.filter-btn.periscope-btn');
field: 'body', return buttons[buttons.length - 1] as HTMLElement;
value: bodyValueWithQuotes,
},
record: {
key: 'body-key',
field: 'body',
value: bodyValueWithQuotes,
},
isListViewPanel: false,
isfilterInLoading: false,
isfilterOutLoading: false,
onClickHandler: jest.fn(),
onGroupByAttribute: jest.fn(),
}; };
// Render component with body field const defaultSelectedOptions = {
render( selectColumns: [],
<TableViewActions maxLines: 1,
fieldData={bodyProps.fieldData} format: 'table' as LogViewMode,
record={bodyProps.record} fontSize: FontSize.MEDIUM,
isListViewPanel={bodyProps.isListViewPanel} };
isfilterInLoading={bodyProps.isfilterInLoading}
isfilterOutLoading={bodyProps.isfilterOutLoading}
onClickHandler={bodyProps.onClickHandler}
onGroupByAttribute={bodyProps.onGroupByAttribute}
/>,
);
// Find the clickable copy area for body it('shows Add to Columns button when field is not selected', async () => {
const copyArea = screen.getByTestId('copy-clipboard-body'); const onAddColumn = jest.fn();
const { container } = render(
<TableViewActions
fieldData={defaultProps.fieldData}
record={defaultProps.record}
isListViewPanel={defaultProps.isListViewPanel}
isfilterInLoading={defaultProps.isfilterInLoading}
isfilterOutLoading={defaultProps.isfilterOutLoading}
onClickHandler={defaultProps.onClickHandler}
onGroupByAttribute={defaultProps.onGroupByAttribute}
onAddColumn={onAddColumn}
selectedOptions={defaultSelectedOptions}
/>,
);
// Verify it has the correct text to copy (without quotes) const ellipsisButton = getEllipsisButton(container);
expect(copyArea).toHaveAttribute('data-text-to-copy', expectedCopiedText); fireEvent.mouseOver(ellipsisButton);
// Action: User clicks on body content await waitFor(() => {
fireEvent.click(copyArea); expect(screen.getByText(ADD_TO_COLUMNS_TEXT)).toBeInTheDocument();
});
});
// Assert: Text was copied without surrounding quotes it(`calls onAddColumn with correct field key when ${ADD_TO_COLUMNS_TEXT} is clicked`, async () => {
expect(mockCopyToClipboard).toHaveBeenCalledWith(expectedCopiedText); const onAddColumn = jest.fn();
const { container } = render(
<TableViewActions
fieldData={defaultProps.fieldData}
record={defaultProps.record}
isListViewPanel={defaultProps.isListViewPanel}
isfilterInLoading={defaultProps.isfilterInLoading}
isfilterOutLoading={defaultProps.isfilterOutLoading}
onClickHandler={defaultProps.onClickHandler}
onGroupByAttribute={defaultProps.onGroupByAttribute}
onAddColumn={onAddColumn}
selectedOptions={defaultSelectedOptions}
/>,
);
// Assert: Success notification shown const ellipsisButton = getEllipsisButton(container);
expect(mockNotificationsSuccess).toHaveBeenCalledWith({ fireEvent.mouseOver(ellipsisButton);
message: 'body copied to clipboard',
key: 'body copied to clipboard', await waitFor(() => {
expect(screen.getByText(ADD_TO_COLUMNS_TEXT)).toBeInTheDocument();
});
const addButton = screen.getByText(ADD_TO_COLUMNS_TEXT);
fireEvent.click(addButton);
expect(onAddColumn).toHaveBeenCalledWith(TEST_FIELD);
});
it('shows Remove from Columns button when field is already selected', async () => {
const onRemoveColumn = jest.fn();
const { container } = render(
<TableViewActions
fieldData={defaultProps.fieldData}
record={defaultProps.record}
isListViewPanel={defaultProps.isListViewPanel}
isfilterInLoading={defaultProps.isfilterInLoading}
isfilterOutLoading={defaultProps.isfilterOutLoading}
onClickHandler={defaultProps.onClickHandler}
onGroupByAttribute={defaultProps.onGroupByAttribute}
onRemoveColumn={onRemoveColumn}
selectedOptions={{
...defaultSelectedOptions,
selectColumns: [{ name: TEST_FIELD }],
}}
/>,
);
const ellipsisButton = getEllipsisButton(container);
fireEvent.mouseOver(ellipsisButton);
await waitFor(() => {
expect(screen.getByText(REMOVE_FROM_COLUMNS_TEXT)).toBeInTheDocument();
});
expect(screen.queryByText(ADD_TO_COLUMNS_TEXT)).not.toBeInTheDocument();
});
it(`calls onRemoveColumn with correct field key when ${REMOVE_FROM_COLUMNS_TEXT} is clicked`, async () => {
const onRemoveColumn = jest.fn();
const { container } = render(
<TableViewActions
fieldData={defaultProps.fieldData}
record={defaultProps.record}
isListViewPanel={defaultProps.isListViewPanel}
isfilterInLoading={defaultProps.isfilterInLoading}
isfilterOutLoading={defaultProps.isfilterOutLoading}
onClickHandler={defaultProps.onClickHandler}
onGroupByAttribute={defaultProps.onGroupByAttribute}
onRemoveColumn={onRemoveColumn}
selectedOptions={{
...defaultSelectedOptions,
selectColumns: [{ name: TEST_FIELD }],
}}
/>,
);
const ellipsisButton = getEllipsisButton(container);
fireEvent.mouseOver(ellipsisButton);
await waitFor(() => {
expect(screen.getByText('Remove from Columns')).toBeInTheDocument();
});
const removeButton = screen.getByText(REMOVE_FROM_COLUMNS_TEXT);
fireEvent.click(removeButton);
expect(onRemoveColumn).toHaveBeenCalledWith(TEST_FIELD);
}); });
}); });
}); });

View File

@@ -170,7 +170,7 @@ const useOptionsMenu = ({
...initialQueryParamsV5, ...initialQueryParamsV5,
searchText: debouncedSearchText, searchText: debouncedSearchText,
}, },
{ queryKey: [debouncedSearchText, isFocused], enabled: isFocused }, { queryKey: [debouncedSearchText, isFocused] },
); );
// const { // const {
@@ -186,7 +186,7 @@ const useOptionsMenu = ({
const searchedAttributeKeys: TelemetryFieldKey[] = useMemo(() => { const searchedAttributeKeys: TelemetryFieldKey[] = useMemo(() => {
const searchedAttributesDataList = Object.values( const searchedAttributesDataList = Object.values(
searchedAttributesDataV5?.data.data.keys || {}, searchedAttributesDataV5?.data.data?.keys || {},
).flat(); ).flat();
if (searchedAttributesDataList.length) { if (searchedAttributesDataList.length) {
if (dataSource === DataSource.LOGS) { if (dataSource === DataSource.LOGS) {
@@ -230,7 +230,7 @@ const useOptionsMenu = ({
} }
return []; return [];
}, [dataSource, searchedAttributesDataV5?.data.data.keys]); }, [dataSource, searchedAttributesDataV5?.data.data?.keys]);
const initialOptionsQuery: OptionsQuery = useMemo(() => { const initialOptionsQuery: OptionsQuery = useMemo(() => {
let defaultColumns: TelemetryFieldKey[] = defaultOptionsQuery.selectColumns; let defaultColumns: TelemetryFieldKey[] = defaultOptionsQuery.selectColumns;

View File

@@ -1,6 +1,8 @@
/* eslint-disable react/jsx-props-no-spreading */ /* eslint-disable react/jsx-props-no-spreading */
import { Button, Flex, Input, Select } from 'antd';
import { Button, Flex, Select } from 'antd';
import cx from 'classnames'; import cx from 'classnames';
import OverflowInputToolTip from 'components/OverflowInputToolTip';
import { import {
logsQueryFunctionOptions, logsQueryFunctionOptions,
metricQueryFunctionOptions, metricQueryFunctionOptions,
@@ -9,6 +11,7 @@ import {
import { useIsDarkMode } from 'hooks/useDarkMode'; import { useIsDarkMode } from 'hooks/useDarkMode';
import { debounce, isNil } from 'lodash-es'; import { debounce, isNil } from 'lodash-es';
import { X } from 'lucide-react'; import { X } from 'lucide-react';
import { useMemo, useState } from 'react';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { QueryFunction } from 'types/api/v5/queryRange'; import { QueryFunction } from 'types/api/v5/queryRange';
import { DataSource, QueryFunctionsTypes } from 'types/common/queryBuilder'; import { DataSource, QueryFunctionsTypes } from 'types/common/queryBuilder';
@@ -47,9 +50,13 @@ export default function Function({
functionValue = funcData.args?.[0]?.value; functionValue = funcData.args?.[0]?.value;
} }
const debouncedhandleUpdateFunctionArgs = debounce( const [value, setValue] = useState<string>(
handleUpdateFunctionArgs, functionValue !== undefined ? String(functionValue) : '',
500, );
const debouncedhandleUpdateFunctionArgs = useMemo(
() => debounce(handleUpdateFunctionArgs, 500),
[handleUpdateFunctionArgs],
); );
// update the logic when we start supporting functions for traces // update the logic when we start supporting functions for traces
@@ -89,13 +96,18 @@ export default function Function({
/> />
{showInput && ( {showInput && (
<Input <OverflowInputToolTip
className="query-function-value"
autoFocus autoFocus
defaultValue={functionValue} value={value}
onChange={(event): void => { onChange={(event): void => {
const newVal = event.target.value;
setValue(newVal);
debouncedhandleUpdateFunctionArgs(funcData, index, event.target.value); debouncedhandleUpdateFunctionArgs(funcData, index, event.target.value);
}} }}
tooltipPlacement="top"
minAutoWidth={70}
maxAutoWidth={150}
className="query-function-value"
/> />
)} )}

View File

@@ -99,7 +99,7 @@
} }
.query-function-value { .query-function-value {
width: 55px; width: 70px;
border-left: 0; border-left: 0;
background: var(--bg-ink-200); background: var(--bg-ink-200);
border-radius: 0; border-radius: 0;

View File

@@ -1,3 +1,13 @@
import AlertRuleProvider from 'providers/Alert';
import AlertDetails from './AlertDetails'; import AlertDetails from './AlertDetails';
export default AlertDetails; function AlertDetailsPage(): JSX.Element {
return (
<AlertRuleProvider>
<AlertDetails />
</AlertRuleProvider>
);
}
export default AlertDetailsPage;

9
go.mod
View File

@@ -8,7 +8,7 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.40.1 github.com/ClickHouse/clickhouse-go/v2 v2.40.1
github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
github.com/SigNoz/signoz-otel-collector v0.129.4 github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
github.com/antlr4-go/antlr/v4 v4.13.1 github.com/antlr4-go/antlr/v4 v4.13.1
github.com/antonmedv/expr v1.15.3 github.com/antonmedv/expr v1.15.3
github.com/cespare/xxhash/v2 v2.3.0 github.com/cespare/xxhash/v2 v2.3.0
@@ -86,12 +86,19 @@ require (
) )
require ( require (
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic v1.14.1 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
go.yaml.in/yaml/v2 v2.4.2 // indirect go.yaml.in/yaml/v2 v2.4.2 // indirect
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
modernc.org/libc v1.66.10 // indirect modernc.org/libc v1.66.10 // indirect
modernc.org/mathutil v1.7.1 // indirect modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect modernc.org/memory v1.11.0 // indirect

16
go.sum
View File

@@ -106,8 +106,8 @@ github.com/SigNoz/expr v1.17.7-beta h1:FyZkleM5dTQ0O6muQfwGpoH5A2ohmN/XTasRCO72g
github.com/SigNoz/expr v1.17.7-beta/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= github.com/SigNoz/expr v1.17.7-beta/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
github.com/SigNoz/signoz-otel-collector v0.129.4 h1:DGDu9y1I1FU+HX4eECPGmfhnXE4ys4yr7LL6znbf6to= github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9 h1:WmYDSSwzyW2yiJ3tPq5AFdjsrz3NBdtPkygtFKOsACw=
github.com/SigNoz/signoz-otel-collector v0.129.4/go.mod h1:xyR+coBzzO04p6Eu+ql2RVYUl/jFD+8hD9lArcc9U7g= github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9/go.mod h1:4eJCRUd/P4OiCHXvGYZK8q6oyBVGJFVj/G6qKSoN/TQ=
github.com/Yiling-J/theine-go v0.6.2 h1:1GeoXeQ0O0AUkiwj2S9Jc0Mzx+hpqzmqsJ4kIC4M9AY= github.com/Yiling-J/theine-go v0.6.2 h1:1GeoXeQ0O0AUkiwj2S9Jc0Mzx+hpqzmqsJ4kIC4M9AY=
github.com/Yiling-J/theine-go v0.6.2/go.mod h1:08QpMa5JZ2pKN+UJCRrCasWYO1IKCdl54Xa836rpmDU= github.com/Yiling-J/theine-go v0.6.2/go.mod h1:08QpMa5JZ2pKN+UJCRrCasWYO1IKCdl54Xa836rpmDU=
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
@@ -162,6 +162,12 @@ github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM=
github.com/bytedance/sonic v1.14.1 h1:FBMC0zVz5XUmE4z9wF4Jey0An5FueFvOsTKKKtwIl7w=
github.com/bytedance/sonic v1.14.1/go.mod h1:gi6uhQLMbTdeP0muCnrjHLeCUPyb70ujhnNlhOylAFc=
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI= github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
@@ -178,6 +184,8 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
@@ -991,6 +999,8 @@ github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GH
github.com/trivago/tgo v1.0.7 h1:uaWH/XIy9aWYWpjm2CU3RpcqZXmX2ysQ9/Go+d9gyrM= github.com/trivago/tgo v1.0.7 h1:uaWH/XIy9aWYWpjm2CU3RpcqZXmX2ysQ9/Go+d9gyrM=
github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkFnhc= github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkFnhc=
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/uptrace/bun v1.2.9 h1:OOt2DlIcRUMSZPr6iXDFg/LaQd59kOxbAjpIVHddKRs= github.com/uptrace/bun v1.2.9 h1:OOt2DlIcRUMSZPr6iXDFg/LaQd59kOxbAjpIVHddKRs=
github.com/uptrace/bun v1.2.9/go.mod h1:r2ZaaGs9Ru5bpGTr8GQfp8jp+TlCav9grYCPOu2CJSg= github.com/uptrace/bun v1.2.9/go.mod h1:r2ZaaGs9Ru5bpGTr8GQfp8jp+TlCav9grYCPOu2CJSg=
github.com/uptrace/bun/dialect/pgdialect v1.2.9 h1:caf5uFbOGiXvadV6pA5gn87k0awFFxL1kuuY3SpxnWk= github.com/uptrace/bun/dialect/pgdialect v1.2.9 h1:caf5uFbOGiXvadV6pA5gn87k0awFFxL1kuuY3SpxnWk=
@@ -1235,6 +1245,8 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=

View File

@@ -105,6 +105,12 @@ func (provider *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey s
return err return err
} }
// To make sure ristretto does not go into no-op
if ttl < 0 {
provider.settings.Logger().WarnContext(ctx, "ttl is less than 0, setting it to 0")
ttl = 0
}
if cloneable, ok := data.(cachetypes.Cloneable); ok { if cloneable, ok := data.(cachetypes.Cloneable); ok {
span.SetAttributes(attribute.Bool("memory.cloneable", true)) span.SetAttributes(attribute.Bool("memory.cloneable", true))
span.SetAttributes(attribute.Int64("memory.cost", 1)) span.SetAttributes(attribute.Int64("memory.cost", 1))

View File

@@ -208,3 +208,13 @@ func WrapUnexpectedf(cause error, code Code, format string, args ...any) *base {
func NewUnexpectedf(code Code, format string, args ...any) *base { func NewUnexpectedf(code Code, format string, args ...any) *base {
return Newf(TypeInvalidInput, code, format, args...) return Newf(TypeInvalidInput, code, format, args...)
} }
// WrapTimeoutf is a wrapper around Wrapf with TypeTimeout.
func WrapTimeoutf(cause error, code Code, format string, args ...any) *base {
return Wrapf(cause, TypeTimeout, code, format, args...)
}
// NewTimeoutf is a wrapper around Newf with TypeTimeout.
func NewTimeoutf(code Code, format string, args ...any) *base {
return Newf(TypeTimeout, code, format, args...)
}

View File

@@ -96,7 +96,6 @@ func (h *handler) UpdateMetricMetadata(rw http.ResponseWriter, req *http.Request
// Set metric name from URL path // Set metric name from URL path
in.MetricName = metricName in.MetricName = metricName
orgID := valuer.MustNewUUID(claims.OrgID) orgID := valuer.MustNewUUID(claims.OrgID)
err = h.module.UpdateMetricMetadata(req.Context(), orgID, &in) err = h.module.UpdateMetricMetadata(req.Context(), orgID, &in)
@@ -137,3 +136,47 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
render.Success(rw, http.StatusOK, metadata) render.Success(rw, http.StatusOK, metadata)
} }
func (h *handler) GetMetricHighlights(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
if metricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
highlights, err := h.module.GetMetricHighlights(req.Context(), orgID, metricName)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, highlights)
}
func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
var in metricsexplorertypes.MetricAttributesRequest
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricAttributes(req.Context(), orgID, &in)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}

View File

@@ -2,6 +2,7 @@ package implmetricsexplorer
import ( import (
"context" "context"
"database/sql"
"fmt" "fmt"
"log/slog" "log/slog"
"strings" "strings"
@@ -20,6 +21,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer" "github.com/SigNoz/signoz/pkg/valuer"
sqlbuilder "github.com/huandu/go-sqlbuilder" sqlbuilder "github.com/huandu/go-sqlbuilder"
"golang.org/x/sync/errgroup"
) )
type module struct { type module struct {
@@ -190,6 +192,79 @@ func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, re
return nil return nil
} }
// GetMetricHighlights returns highlights for a metric including data points, last received, total time series, and active time series.
func (m *module) GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error) {
if metricName == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metric name is required")
}
var response metricsexplorertypes.MetricHighlightsResponse
g, gCtx := errgroup.WithContext(ctx)
// Fetch data points
g.Go(func() error {
dataPoints, err := m.getMetricDataPoints(gCtx, metricName)
if err != nil {
return err
}
response.DataPoints = dataPoints
return nil
})
// Fetch last received
g.Go(func() error {
lastReceived, err := m.getMetricLastReceived(gCtx, metricName)
if err != nil {
return err
}
response.LastReceived = lastReceived
return nil
})
// Fetch total time series
g.Go(func() error {
totalTimeSeries, err := m.getTotalTimeSeriesForMetricName(gCtx, metricName)
if err != nil {
return err
}
response.TotalTimeSeries = totalTimeSeries
return nil
})
// Fetch active time series (using 120 minutes as default duration)
g.Go(func() error {
activeTimeSeries, err := m.getActiveTimeSeriesForMetricName(gCtx, metricName, 120*time.Minute)
if err != nil {
return err
}
response.ActiveTimeSeries = activeTimeSeries
return nil
})
if err := g.Wait(); err != nil {
return nil, err
}
return &response, nil
}
func (m *module) GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error) {
if err := req.Validate(); err != nil {
return nil, err
}
attributes, err := m.fetchMetricAttributes(ctx, req.MetricName, req.Start, req.End)
if err != nil {
return nil, err
}
return &metricsexplorertypes.MetricAttributesResponse{
Attributes: attributes,
TotalKeys: int64(len(attributes)),
}, nil
}
func (m *module) fetchMetadataFromCache(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, []string) { func (m *module) fetchMetadataFromCache(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, []string) {
hits := make(map[string]*metricsexplorertypes.MetricMetadata) hits := make(map[string]*metricsexplorertypes.MetricMetadata)
misses := make([]string, 0) misses := make([]string, 0)
@@ -771,3 +846,132 @@ func (m *module) computeSamplesTreemap(ctx context.Context, req *metricsexplorer
return entries, nil return entries, nil
} }
// getMetricDataPoints returns the total number of data points (samples) for a metric.
func (m *module) getMetricDataPoints(ctx context.Context, metricName string) (uint64, error) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("sum(count) AS data_points")
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.SamplesV4Agg30mTableName))
sb.Where(sb.E("metric_name", metricName))
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
var dataPoints uint64
err := db.QueryRow(ctx, query, args...).Scan(&dataPoints)
if err != nil {
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get metrics data points")
}
return dataPoints, nil
}
// getMetricLastReceived returns the last received timestamp for a metric.
func (m *module) getMetricLastReceived(ctx context.Context, metricName string) (uint64, error) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("MAX(last_reported_unix_milli) AS last_received_time")
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
sb.Where(sb.E("metric_name", metricName))
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
var lastReceived sql.NullInt64
err := db.QueryRow(ctx, query, args...).Scan(&lastReceived)
if err != nil {
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get last received timestamp")
}
if !lastReceived.Valid {
return 0, nil
}
return uint64(lastReceived.Int64), nil
}
// getTotalTimeSeriesForMetricName returns the total number of unique time series for a metric.
func (m *module) getTotalTimeSeriesForMetricName(ctx context.Context, metricName string) (uint64, error) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("uniq(fingerprint) AS time_series_count")
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.TimeseriesV41weekTableName))
sb.Where(sb.E("metric_name", metricName))
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
var timeSeriesCount uint64
err := db.QueryRow(ctx, query, args...).Scan(&timeSeriesCount)
if err != nil {
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get total time series count")
}
return timeSeriesCount, nil
}
// getActiveTimeSeriesForMetricName returns the number of active time series for a metric within the given duration.
func (m *module) getActiveTimeSeriesForMetricName(ctx context.Context, metricName string, duration time.Duration) (uint64, error) {
milli := time.Now().Add(-duration).UnixMilli()
sb := sqlbuilder.NewSelectBuilder()
sb.Select("uniq(fingerprint) AS active_time_series")
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.TimeseriesV4TableName))
sb.Where(sb.E("metric_name", metricName))
sb.Where(sb.GTE("unix_milli", milli))
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
var activeTimeSeries uint64
err := db.QueryRow(ctx, query, args...).Scan(&activeTimeSeries)
if err != nil {
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get active time series count")
}
return activeTimeSeries, nil
}
func (m *module) fetchMetricAttributes(ctx context.Context, metricName string, start, end *int64) ([]metricsexplorertypes.MetricAttribute, error) {
// Build query using sqlbuilder
sb := sqlbuilder.NewSelectBuilder()
sb.Select(
"attr_name AS key",
"groupUniqArray(1000)(attr_string_value) AS values",
"uniq(attr_string_value) AS valueCount",
)
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
sb.Where(sb.E("metric_name", metricName))
sb.Where("NOT startsWith(attr_name, '__')")
// Add time range filtering if provided
if start != nil {
// Filter by start time: attributes that were active at or after start time
sb.Where(sb.GE("last_reported_unix_milli", *start))
}
if end != nil {
// Filter by end time: attributes that were active at or before end time
sb.Where(sb.LE("first_reported_unix_milli", *end))
}
sb.GroupBy("attr_name")
sb.OrderBy("valueCount DESC")
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(ctx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch metric attributes")
}
defer rows.Close()
attributes := make([]metricsexplorertypes.MetricAttribute, 0)
for rows.Next() {
var attr metricsexplorertypes.MetricAttribute
if err := rows.Scan(&attr.Key, &attr.Values, &attr.ValueCount); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metric attribute row")
}
attributes = append(attributes, attr)
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metric attribute rows")
}
return attributes, nil
}

View File

@@ -13,7 +13,9 @@ type Handler interface {
GetStats(http.ResponseWriter, *http.Request) GetStats(http.ResponseWriter, *http.Request)
GetTreemap(http.ResponseWriter, *http.Request) GetTreemap(http.ResponseWriter, *http.Request)
GetMetricMetadata(http.ResponseWriter, *http.Request) GetMetricMetadata(http.ResponseWriter, *http.Request)
GetMetricAttributes(http.ResponseWriter, *http.Request)
UpdateMetricMetadata(http.ResponseWriter, *http.Request) UpdateMetricMetadata(http.ResponseWriter, *http.Request)
GetMetricHighlights(http.ResponseWriter, *http.Request)
} }
// Module represents the metrics module interface. // Module represents the metrics module interface.
@@ -22,4 +24,6 @@ type Module interface {
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error) GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error) GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error)
GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error)
} }

View File

@@ -2,11 +2,11 @@ package thirdpartyapi
import ( import (
"fmt" "fmt"
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
"net" "net"
"regexp"
"time" "time"
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
) )
@@ -287,11 +287,6 @@ func shouldIncludeRow(row *qbtypes.RawRow) bool {
return true return true
} }
func containsKindStringOverride(expression string) bool {
kindStringPattern := regexp.MustCompile(`kind_string\s*[!=<>]+`)
return kindStringPattern.MatchString(expression)
}
func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey { func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append(base, additional...) return append(base, additional...)
} }
@@ -400,6 +395,8 @@ func buildRpsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
} }
func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope { func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
filter := buildBaseFilter(req.Filter)
filter.Expression = fmt.Sprintf("has_error = true AND (%s)", filter.Expression)
return qbtypes.QueryEnvelope{ return qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder, Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
@@ -409,7 +406,7 @@ func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Query
Aggregations: []qbtypes.TraceAggregation{ Aggregations: []qbtypes.TraceAggregation{
{Expression: "count()"}, {Expression: "count()"},
}, },
Filter: buildErrorFilter(req.Filter), Filter: filter,
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy), GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
}, },
} }
@@ -526,25 +523,9 @@ func buildBaseFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
urlPathKeyLegacy, urlPathKey) urlPathKeyLegacy, urlPathKey)
if additionalFilter != nil && additionalFilter.Expression != "" { if additionalFilter != nil && additionalFilter.Expression != "" {
if containsKindStringOverride(additionalFilter.Expression) { // even if it contains kind_string we add with an AND so it doesn't matter if the user is overriding it.
return &qbtypes.Filter{Expression: baseExpression}
}
baseExpression = fmt.Sprintf("(%s) AND (%s)", baseExpression, additionalFilter.Expression) baseExpression = fmt.Sprintf("(%s) AND (%s)", baseExpression, additionalFilter.Expression)
} }
return &qbtypes.Filter{Expression: baseExpression} return &qbtypes.Filter{Expression: baseExpression}
} }
func buildErrorFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
errorExpression := fmt.Sprintf("has_error = true AND (%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
urlPathKeyLegacy, urlPathKey)
if additionalFilter != nil && additionalFilter.Expression != "" {
if containsKindStringOverride(additionalFilter.Expression) {
return &qbtypes.Filter{Expression: errorExpression}
}
errorExpression = fmt.Sprintf("(%s) AND (%s)", errorExpression, additionalFilter.Expression)
}
return &qbtypes.Filter{Expression: errorExpression}
}

View File

@@ -6252,17 +6252,6 @@ LIMIT 40`, // added rand to get diff value every time we run this query
return fingerprints, nil return fingerprints, nil
} }
func (r *ClickHouseReader) DeleteMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricName string) *model.ApiError {
delQuery := fmt.Sprintf(`ALTER TABLE %s.%s DELETE WHERE metric_name = ?;`, signozMetricDBName, signozUpdatedMetricsMetadataLocalTable)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
err := r.db.Exec(valueCtx, delQuery, metricName)
if err != nil {
return &model.ApiError{Typ: "ClickHouseError", Err: err}
}
r.cache.Delete(ctx, orgID, constants.UpdatedMetricsMetadataCachePrefix+metricName)
return nil
}
func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, orgID valuer.UUID, req *model.UpdateMetricsMetadata) *model.ApiError { func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, orgID valuer.UUID, req *model.UpdateMetricsMetadata) *model.ApiError {
if req.MetricType == v3.MetricTypeHistogram { if req.MetricType == v3.MetricTypeHistogram {
labels := []string{"le"} labels := []string{"le"}
@@ -6292,10 +6281,7 @@ func (r *ClickHouseReader) UpdateMetricsMetadata(ctx context.Context, orgID valu
} }
} }
apiErr := r.DeleteMetricsMetadata(ctx, orgID, req.MetricName) // Insert new metadata (keeping history of all updates)
if apiErr != nil {
return apiErr
}
insertQuery := fmt.Sprintf(`INSERT INTO %s.%s (metric_name, temporality, is_monotonic, type, description, unit, created_at) insertQuery := fmt.Sprintf(`INSERT INTO %s.%s (metric_name, temporality, is_monotonic, type, description, unit, created_at)
VALUES ( ?, ?, ?, ?, ?, ?, ?);`, signozMetricDBName, signozUpdatedMetricsMetadataTable) VALUES ( ?, ?, ?, ?, ?, ?, ?);`, signozMetricDBName, signozUpdatedMetricsMetadataTable)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads) valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
@@ -6364,9 +6350,19 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID
var stillMissing []string var stillMissing []string
if len(missingMetrics) > 0 { if len(missingMetrics) > 0 {
metricList := "'" + strings.Join(missingMetrics, "', '") + "'" metricList := "'" + strings.Join(missingMetrics, "', '") + "'"
query := fmt.Sprintf(`SELECT metric_name, type, description, temporality, is_monotonic, unit query := fmt.Sprintf(`SELECT
FROM %s.%s metric_name,
WHERE metric_name IN (%s);`, signozMetricDBName, signozUpdatedMetricsMetadataTable, metricList) argMax(type, created_at) AS type,
argMax(description, created_at) AS description,
argMax(temporality, created_at) AS temporality,
argMax(is_monotonic, created_at) AS is_monotonic,
argMax(unit, created_at) AS unit
FROM %s.%s
WHERE metric_name IN (%s)
GROUP BY metric_name;`,
signozMetricDBName,
signozUpdatedMetricsMetadataTable,
metricList)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads) valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
rows, err := r.db.Query(valueCtx, query) rows, err := r.db.Query(valueCtx, query)

View File

@@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi" "github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
"github.com/SigNoz/signoz/pkg/queryparser"
"io" "io"
"math" "math"
@@ -146,6 +147,8 @@ type APIHandler struct {
QuerierAPI *querierAPI.API QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
Signoz *signoz.SigNoz Signoz *signoz.SigNoz
} }
@@ -176,6 +179,8 @@ type APIHandlerOpts struct {
QuerierAPI *querierAPI.API QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
Signoz *signoz.SigNoz Signoz *signoz.SigNoz
} }
@@ -238,6 +243,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
Signoz: opts.Signoz, Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI, FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI, QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
} }
logsQueryBuilder := logsv4.PrepareLogsQuery logsQueryBuilder := logsv4.PrepareLogsQuery
@@ -632,6 +638,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost) router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
// Query Filter Analyzer api used to extract metric names and grouping columns from a query
router.HandleFunc("/api/v1/query_filter/analyze", am.ViewAccess(aH.QueryParserAPI.AnalyzeQueryFilter)).Methods(http.MethodPost)
} }
func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.AuthZ) { func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.AuthZ) {
@@ -660,10 +668,12 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
am.ViewAccess(ah.UpdateMetricsMetadata)). am.ViewAccess(ah.UpdateMetricsMetadata)).
Methods(http.MethodPost) Methods(http.MethodPost)
// v2 endpoints // v2 endpoints
router.HandleFunc("/api/v2/metrics/stats", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetStats)).Methods(http.MethodPost) router.HandleFunc("/api/v2/metrics/stats", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetStats)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metrics/treemap", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetTreemap)).Methods(http.MethodPost) router.HandleFunc("/api/v2/metrics/treemap", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetTreemap)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetMetricMetadata)).Methods(http.MethodGet) router.HandleFunc("/api/v2/metrics/attributes", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricAttributes)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.UpdateMetricMetadata)).Methods(http.MethodPost) router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricMetadata)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metric/highlights", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricHighlights)).Methods(http.MethodGet)
} }
func Intersection(a, b []int) (c []int) { func Intersection(a, b []int) (c []int) {

View File

@@ -3,16 +3,15 @@ package metricsexplorer
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/gorilla/mux"
"net/http" "net/http"
"strconv" "strconv"
"github.com/gorilla/mux"
"github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/constants"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/model" "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/model/metrics_explorer" "github.com/SigNoz/signoz/pkg/query-service/model/metrics_explorer"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
) )
func ParseFilterKeySuggestions(r *http.Request) (*metrics_explorer.FilterKeyRequest, *model.ApiError) { func ParseFilterKeySuggestions(r *http.Request) (*metrics_explorer.FilterKeyRequest, *model.ApiError) {

View File

@@ -10,6 +10,7 @@ import (
"slices" "slices"
"github.com/SigNoz/signoz/pkg/cache/memorycache" "github.com/SigNoz/signoz/pkg/cache/memorycache"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore" "github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"github.com/gorilla/handlers" "github.com/gorilla/handlers"
@@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore), FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz, Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics), QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
}) })
if err != nil { if err != nil {
return nil, err return nil, err

View File

@@ -127,7 +127,6 @@ type Reader interface {
GetInspectMetricsFingerprints(ctx context.Context, attributes []string, req *metrics_explorer.InspectMetricsRequest) ([]string, *model.ApiError) GetInspectMetricsFingerprints(ctx context.Context, attributes []string, req *metrics_explorer.InspectMetricsRequest) ([]string, *model.ApiError)
GetInspectMetrics(ctx context.Context, req *metrics_explorer.InspectMetricsRequest, fingerprints []string) (*metrics_explorer.InspectMetricsResponse, *model.ApiError) GetInspectMetrics(ctx context.Context, req *metrics_explorer.InspectMetricsRequest, fingerprints []string) (*metrics_explorer.InspectMetricsResponse, *model.ApiError)
DeleteMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricName string) *model.ApiError
UpdateMetricsMetadata(ctx context.Context, orgID valuer.UUID, req *model.UpdateMetricsMetadata) *model.ApiError UpdateMetricsMetadata(ctx context.Context, orgID valuer.UUID, req *model.UpdateMetricsMetadata) *model.ApiError
GetUpdatedMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) GetUpdatedMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError)

View File

@@ -198,7 +198,6 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
FieldMapper: v.fieldMapper, FieldMapper: v.fieldMapper,
ConditionBuilder: v.conditionBuilder, ConditionBuilder: v.conditionBuilder,
FullTextColumn: v.fullTextColumn, FullTextColumn: v.fullTextColumn,
JsonBodyPrefix: v.jsonBodyPrefix,
JsonKeyToKey: v.jsonKeyToKey, JsonKeyToKey: v.jsonKeyToKey,
}, 0, 0, }, 0, 0,
) )

View File

@@ -0,0 +1,17 @@
package querybuilder
import (
"os"
)
var (
BodyJSONQueryEnabled = GetOrDefaultEnv("BODY_JSON_QUERY_ENABLED", "false") == "true"
)
func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key)
if len(v) == 0 {
return fallback
}
return v
}

View File

@@ -7,7 +7,6 @@ import (
) )
func TestQueryToKeys(t *testing.T) { func TestQueryToKeys(t *testing.T) {
testCases := []struct { testCases := []struct {
query string query string
expectedKeys []telemetrytypes.FieldKeySelector expectedKeys []telemetrytypes.FieldKeySelector
@@ -66,9 +65,9 @@ func TestQueryToKeys(t *testing.T) {
query: `body.user_ids[*] = 123`, query: `body.user_ids[*] = 123`,
expectedKeys: []telemetrytypes.FieldKeySelector{ expectedKeys: []telemetrytypes.FieldKeySelector{
{ {
Name: "body.user_ids[*]", Name: "user_ids[*]",
Signal: telemetrytypes.SignalUnspecified, Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextUnspecified, FieldContext: telemetrytypes.FieldContextBody,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified, FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
}, },
}, },

View File

@@ -162,7 +162,6 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
ConditionBuilder: b.conditionBuilder, ConditionBuilder: b.conditionBuilder,
FieldKeys: keys, FieldKeys: keys,
FullTextColumn: b.fullTextColumn, FullTextColumn: b.fullTextColumn,
JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey, JsonKeyToKey: b.jsonKeyToKey,
SkipFullTextFilter: true, SkipFullTextFilter: true,
SkipFunctionCalls: true, SkipFunctionCalls: true,

View File

@@ -33,7 +33,6 @@ type filterExpressionVisitor struct {
mainErrorURL string mainErrorURL string
builder *sqlbuilder.SelectBuilder builder *sqlbuilder.SelectBuilder
fullTextColumn *telemetrytypes.TelemetryFieldKey fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc jsonKeyToKey qbtypes.JsonKeyToFieldFunc
skipResourceFilter bool skipResourceFilter bool
skipFullTextFilter bool skipFullTextFilter bool
@@ -53,7 +52,6 @@ type FilterExprVisitorOpts struct {
FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
Builder *sqlbuilder.SelectBuilder Builder *sqlbuilder.SelectBuilder
FullTextColumn *telemetrytypes.TelemetryFieldKey FullTextColumn *telemetrytypes.TelemetryFieldKey
JsonBodyPrefix string
JsonKeyToKey qbtypes.JsonKeyToFieldFunc JsonKeyToKey qbtypes.JsonKeyToFieldFunc
SkipResourceFilter bool SkipResourceFilter bool
SkipFullTextFilter bool SkipFullTextFilter bool
@@ -73,7 +71,6 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
fieldKeys: opts.FieldKeys, fieldKeys: opts.FieldKeys,
builder: opts.Builder, builder: opts.Builder,
fullTextColumn: opts.FullTextColumn, fullTextColumn: opts.FullTextColumn,
jsonBodyPrefix: opts.JsonBodyPrefix,
jsonKeyToKey: opts.JsonKeyToKey, jsonKeyToKey: opts.JsonKeyToKey,
skipResourceFilter: opts.SkipResourceFilter, skipResourceFilter: opts.SkipResourceFilter,
skipFullTextFilter: opts.SkipFullTextFilter, skipFullTextFilter: opts.SkipFullTextFilter,
@@ -173,7 +170,7 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64
whereClause := sqlbuilder.NewWhereClause().AddWhereExpr(visitor.builder.Args, cond) whereClause := sqlbuilder.NewWhereClause().AddWhereExpr(visitor.builder.Args, cond)
return &PreparedWhereClause{whereClause, visitor.warnings, visitor.mainWarnURL}, nil return &PreparedWhereClause{WhereClause: whereClause, Warnings: visitor.warnings, WarningsDocURL: visitor.mainWarnURL}, nil
} }
// Visit dispatches to the specific visit method based on node type // Visit dispatches to the specific visit method based on node type
@@ -718,7 +715,7 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
conds = append(conds, fmt.Sprintf("hasToken(LOWER(%s), LOWER(%s))", key.Name, v.builder.Var(value[0]))) conds = append(conds, fmt.Sprintf("hasToken(LOWER(%s), LOWER(%s))", key.Name, v.builder.Var(value[0])))
} else { } else {
// this is that all other functions only support array fields // this is that all other functions only support array fields
if strings.HasPrefix(key.Name, v.jsonBodyPrefix) { if key.FieldContext == telemetrytypes.FieldContextBody {
fieldName, _ = v.jsonKeyToKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value) fieldName, _ = v.jsonKeyToKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value)
} else { } else {
// TODO(add docs for json body search) // TODO(add docs for json body search)
@@ -809,10 +806,8 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
// VisitKey handles field/column references // VisitKey handles field/column references
func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any { func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(ctx.GetText()) fieldKey := telemetrytypes.GetFieldKeyFromKeyText(ctx.GetText())
keyName := fieldKey.Name
keyName := strings.TrimPrefix(fieldKey.Name, v.jsonBodyPrefix)
fieldKeysForName := v.fieldKeys[keyName] fieldKeysForName := v.fieldKeys[keyName]
@@ -846,10 +841,11 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
// if there is a field with the same name as attribute/resource attribute // if there is a field with the same name as attribute/resource attribute
// Since it will ORed with the fieldKeysForName, it will not result empty // Since it will ORed with the fieldKeysForName, it will not result empty
// when either of them have values // when either of them have values
if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" { // Note: Skip this logic if body json query is enabled so we can look up the key inside fields
if keyName != "" { //
fieldKeysForName = append(fieldKeysForName, &fieldKey) // TODO(Piyush): After entire migration this is supposed to be removed.
} if !BodyJSONQueryEnabled && fieldKey.FieldContext == telemetrytypes.FieldContextBody {
fieldKeysForName = append(fieldKeysForName, &fieldKey)
} }
if len(fieldKeysForName) == 0 { if len(fieldKeysForName) == 0 {
@@ -860,7 +856,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
return v.fieldKeys[keyWithContext] return v.fieldKeys[keyWithContext]
} }
if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" && keyName == "" { if fieldKey.FieldContext == telemetrytypes.FieldContextBody && keyName == "" {
v.errors = append(v.errors, "missing key for body json search - expected key of the form `body.key` (ex: `body.status`)") v.errors = append(v.errors, "missing key for body json search - expected key of the form `body.key` (ex: `body.status`)")
} else if !v.ignoreNotFoundKeys { } else if !v.ignoreNotFoundKeys {
// TODO(srikanthccv): do we want to return an error here? // TODO(srikanthccv): do we want to return an error here?

49
pkg/queryparser/api.go Normal file
View File

@@ -0,0 +1,49 @@
package queryparser
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/parsertypes"
)
type API struct {
queryParser QueryParser
settings factory.ProviderSettings
}
func NewAPI(settings factory.ProviderSettings, queryParser QueryParser) *API {
return &API{settings: settings, queryParser: queryParser}
}
// AnalyzeQueryFilter analyzes a query and extracts metric names and grouping columns
func (a *API) AnalyzeQueryFilter(w http.ResponseWriter, r *http.Request) {
// Limit request body size to 255 KB (CH query limit is 256 KB)
r.Body = http.MaxBytesReader(w, r.Body, 255*1024)
var req parsertypes.QueryFilterAnalyzeRequest
if err := binding.JSON.BindBody(r.Body, &req); err != nil {
render.Error(w, err)
return
}
result, err := a.queryParser.AnalyzeQueryFilter(r.Context(), req.QueryType, req.Query)
if err != nil {
a.settings.Logger.ErrorContext(r.Context(), "failed to analyze query filter", "error", err)
render.Error(w, err)
return
}
// prepare the response
var resp parsertypes.QueryFilterAnalyzeResponse
for _, group := range result.GroupByColumns {
resp.Groups = append(resp.Groups, parsertypes.ColumnInfoResponse{
Name: group.Name,
Alias: group.Alias,
}) // add the group name and alias to the response
}
resp.MetricNames = append(resp.MetricNames, result.MetricNames...) // add the metric names to the response
render.Success(w, http.StatusOK, resp)
}

258
pkg/queryparser/api_test.go Normal file
View File

@@ -0,0 +1,258 @@
package queryparser
import (
"bytes"
"context"
"encoding/json"
"net/http"
"net/http/httptest"
"reflect"
"sort"
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/types/parsertypes"
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
func TestAPI_AnalyzeQueryFilter(t *testing.T) {
queryParser := New(instrumentationtest.New().ToProviderSettings())
aH := NewAPI(instrumentationtest.New().ToProviderSettings(), queryParser)
tests := []struct {
name string
requestBody parsertypes.QueryFilterAnalyzeRequest
expectedStatus int
expectedStatusStr string
expectedError bool
errorContains string
expectedMetrics []string
expectedGroups []parsertypes.ColumnInfoResponse
}{
{
name: "PromQL - Nested aggregation inside subquery",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `max_over_time(sum(rate(cpu_usage_total[5m]))[1h:5m])`,
QueryType: querybuildertypesv5.QueryTypePromQL,
},
expectedStatus: http.StatusOK,
expectedStatusStr: "success",
expectedError: false,
expectedMetrics: []string{"cpu_usage_total"},
expectedGroups: []parsertypes.ColumnInfoResponse{},
},
{
name: "PromQL - Subquery with multiple metrics",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `avg_over_time((foo + bar)[10m:1m])`,
QueryType: querybuildertypesv5.QueryTypePromQL,
},
expectedStatus: http.StatusOK,
expectedStatusStr: "success",
expectedError: false,
expectedMetrics: []string{"bar", "foo"},
expectedGroups: []parsertypes.ColumnInfoResponse{},
},
{
name: "PromQL - Simple meta-metric with grouping",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `sum by (pod) (up)`,
QueryType: querybuildertypesv5.QueryTypePromQL,
},
expectedStatus: http.StatusOK,
expectedStatusStr: "success",
expectedError: false,
expectedMetrics: []string{"up"},
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "pod", Alias: ""}},
},
{
name: "ClickHouse - Simple CTE with GROUP BY",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `WITH aggregated AS (
SELECT region as region_alias, sum(value) AS total
FROM metrics
WHERE metric_name = 'cpu_usage'
GROUP BY region
)
SELECT * FROM aggregated`,
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
},
expectedStatus: http.StatusOK,
expectedStatusStr: "success",
expectedError: false,
expectedMetrics: []string{"cpu_usage"},
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "region", Alias: "region_alias"}},
},
{
name: "ClickHouse - CTE chain with last GROUP BY + Alias should be returned if exists",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `WITH step1 AS (
SELECT service as service_alias, timestamp as ts, value
FROM metrics
WHERE metric_name = 'requests'
GROUP BY service, timestamp
),
step2 AS (
SELECT ts, avg(value) AS avg_value
FROM step1
GROUP BY ts
)
SELECT * FROM step2`,
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
},
expectedStatus: http.StatusOK,
expectedStatusStr: "success",
expectedError: false,
expectedMetrics: []string{"requests"},
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "ts", Alias: ""}},
},
{
name: "ClickHouse - Outer GROUP BY overrides CTE GROUP BY + Alias should be returned if exists",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `WITH cte AS (
SELECT region, service, value
FROM metrics
WHERE metric_name = 'memory'
GROUP BY region, service
)
SELECT region as region_alias, sum(value) as total
FROM cte
GROUP BY region`,
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
},
expectedStatus: http.StatusOK,
expectedStatusStr: "success",
expectedError: false,
expectedMetrics: []string{"memory"},
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "region", Alias: "region_alias"}},
},
{
name: "ClickHouse - Invalid query should return error",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `SELECT WHERE metric_name = 'memory' GROUP BY region, service`,
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
},
expectedStatus: http.StatusBadRequest,
expectedStatusStr: "error",
expectedError: true,
errorContains: "failed to parse clickhouse query",
},
{
name: "Empty query should return error",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: "",
QueryType: querybuildertypesv5.QueryTypePromQL,
},
expectedStatus: http.StatusBadRequest,
expectedStatusStr: "error",
expectedError: true,
errorContains: "query is required and cannot be empty",
},
{
name: "Invalid queryType should return error",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `sum(rate(cpu_usage[5m]))`,
QueryType: querybuildertypesv5.QueryTypeUnknown,
},
expectedStatus: http.StatusBadRequest,
expectedStatusStr: "error",
expectedError: true,
errorContains: "unsupported queryType",
},
{
name: "Invalid PromQL syntax should return error",
requestBody: parsertypes.QueryFilterAnalyzeRequest{
Query: `sum by ((foo)(bar))(http_requests_total)`,
QueryType: querybuildertypesv5.QueryTypePromQL,
},
expectedStatus: http.StatusBadRequest,
expectedStatusStr: "error",
expectedError: true,
errorContains: "failed to parse promql query",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create request body
reqBody, err := json.Marshal(tt.requestBody)
if err != nil {
t.Fatalf("failed to marshal request body: %v", err)
}
// Create HTTP request
req := httptest.NewRequestWithContext(context.Background(), http.MethodPost, "/api/v1/query_filter/analyze", bytes.NewBuffer(reqBody))
req.Header.Set("Content-Type", "application/json")
// Create response recorder
rr := httptest.NewRecorder()
// Call handler
aH.AnalyzeQueryFilter(rr, req)
// Check status code
if rr.Code != tt.expectedStatus {
t.Errorf("expected status %d, got %d", tt.expectedStatus, rr.Code)
}
// Parse response
var resp map[string]interface{}
if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil {
t.Fatalf("failed to unmarshal response: %v, body: %s", err, rr.Body.String())
}
// Check status string
if resp["status"] != tt.expectedStatusStr {
t.Errorf("expected status '%s', got %v", tt.expectedStatusStr, resp["status"])
}
if tt.expectedError {
errorObj, ok := resp["error"].(map[string]interface{})
if !ok {
t.Fatalf("expected error to be a map, got %T", resp["error"])
}
errorMsg, ok := errorObj["message"].(string)
if !ok {
t.Fatalf("expected error message to be a string, got %T", errorObj["message"])
}
if !strings.Contains(errorMsg, tt.errorContains) {
t.Errorf("expected error message to contain '%s', got '%s'", tt.errorContains, errorMsg)
}
} else {
// Validate success response
data, ok := resp["data"].(map[string]interface{})
if !ok {
t.Fatalf("expected data to be a map, got %T", resp["data"])
}
// Marshal data back to JSON and unmarshal into QueryFilterAnalyzeResponse struct
dataBytes, err := json.Marshal(data)
if err != nil {
t.Fatalf("failed to marshal data: %v", err)
}
var responseData parsertypes.QueryFilterAnalyzeResponse
if err := json.Unmarshal(dataBytes, &responseData); err != nil {
t.Fatalf("failed to unmarshal data into QueryFilterAnalyzeResponse: %v", err)
}
// Sort the arrays for comparison
gotMetrics := make([]string, len(responseData.MetricNames))
copy(gotMetrics, responseData.MetricNames)
sort.Strings(gotMetrics)
gotGroups := make([]parsertypes.ColumnInfoResponse, len(responseData.Groups))
copy(gotGroups, responseData.Groups)
// Compare using deep equal
if !reflect.DeepEqual(gotMetrics, tt.expectedMetrics) {
t.Errorf("expected metricNames %v, got %v", tt.expectedMetrics, gotMetrics)
}
if !reflect.DeepEqual(gotGroups, tt.expectedGroups) {
t.Errorf("expected groups %v, got %v", tt.expectedGroups, gotGroups)
}
}
})
}
}

View File

@@ -4,11 +4,18 @@
// This is useful for metrics discovery, and query analysis. // This is useful for metrics discovery, and query analysis.
package queryfilterextractor package queryfilterextractor
import "github.com/SigNoz/signoz/pkg/errors" import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
const ( type ExtractorType struct {
ExtractorCH = "qfe_ch" valuer.String
ExtractorPromQL = "qfe_promql" }
var (
ExtractorTypeClickHouseSQL = ExtractorType{valuer.NewString("qfe_ch")}
ExtractorTypePromQL = ExtractorType{valuer.NewString("qfe_promql")}
) )
// ColumnInfo represents a column in the query // ColumnInfo represents a column in the query
@@ -46,13 +53,13 @@ type FilterExtractor interface {
Extract(query string) (*FilterResult, error) Extract(query string) (*FilterResult, error)
} }
func NewExtractor(extractorType string) (FilterExtractor, error) { func NewExtractor(extractorType ExtractorType) (FilterExtractor, error) {
switch extractorType { switch extractorType {
case ExtractorCH: case ExtractorTypeClickHouseSQL:
return NewClickHouseFilterExtractor(), nil return NewClickHouseFilterExtractor(), nil
case ExtractorPromQL: case ExtractorTypePromQL:
return NewPromQLFilterExtractor(), nil return NewPromQLFilterExtractor(), nil
default: default:
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid extractor type: %s", extractorType) return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid extractor type: %s", extractorType)
} }
} }

View File

@@ -0,0 +1,14 @@
package queryparser
import (
"context"
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// QueryParser defines the interface for parsing and analyzing queries.
type QueryParser interface {
// AnalyzeQueryFilter extracts filter conditions from a given query string.
AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error)
}

View File

@@ -0,0 +1,40 @@
package queryparser
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
type queryParserImpl struct {
settings factory.ProviderSettings
}
// New creates a new implementation of the QueryParser service.
func New(settings factory.ProviderSettings) QueryParser {
return &queryParserImpl{
settings: settings,
}
}
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error) {
var extractorType queryfilterextractor.ExtractorType
switch queryType {
case querybuildertypesv5.QueryTypePromQL:
extractorType = queryfilterextractor.ExtractorTypePromQL
case querybuildertypesv5.QueryTypeClickHouseSQL:
extractorType = queryfilterextractor.ExtractorTypeClickHouseSQL
default:
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %s. Supported values are '%s' and '%s'", queryType, querybuildertypesv5.QueryTypePromQL, querybuildertypesv5.QueryTypeClickHouseSQL)
}
// Create extractor
extractor, err := queryfilterextractor.NewExtractor(extractorType)
if err != nil {
return nil, err
}
return extractor.Extract(query)
}

View File

@@ -35,37 +35,37 @@ import (
) )
type Handlers struct { type Handlers struct {
Organization organization.Handler Organization organization.Handler
Preference preference.Handler Preference preference.Handler
User user.Handler User user.Handler
SavedView savedview.Handler SavedView savedview.Handler
Apdex apdex.Handler Apdex apdex.Handler
Dashboard dashboard.Handler Dashboard dashboard.Handler
QuickFilter quickfilter.Handler QuickFilter quickfilter.Handler
TraceFunnel tracefunnel.Handler TraceFunnel tracefunnel.Handler
RawDataExport rawdataexport.Handler RawDataExport rawdataexport.Handler
AuthDomain authdomain.Handler AuthDomain authdomain.Handler
Session session.Handler Session session.Handler
SpanPercentile spanpercentile.Handler SpanPercentile spanpercentile.Handler
Services services.Handler Services services.Handler
Metrics metricsexplorer.Handler MetricsExplorer metricsexplorer.Handler
} }
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing) Handlers { func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing) Handlers {
return Handlers{ return Handlers{
Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter), Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter),
Preference: implpreference.NewHandler(modules.Preference), Preference: implpreference.NewHandler(modules.Preference),
User: impluser.NewHandler(modules.User, modules.UserGetter), User: impluser.NewHandler(modules.User, modules.UserGetter),
SavedView: implsavedview.NewHandler(modules.SavedView), SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex), Apdex: implapdex.NewHandler(modules.Apdex),
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings, querier, licensing), Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings, querier, licensing),
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter), QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel), TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport), RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
AuthDomain: implauthdomain.NewHandler(modules.AuthDomain), AuthDomain: implauthdomain.NewHandler(modules.AuthDomain),
Session: implsession.NewHandler(modules.Session), Session: implsession.NewHandler(modules.Session),
Services: implservices.NewHandler(modules.Services), Services: implservices.NewHandler(modules.Services),
Metrics: implmetricsexplorer.NewHandler(modules.Metrics), MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile), SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
} }
} }

View File

@@ -47,22 +47,22 @@ import (
) )
type Modules struct { type Modules struct {
OrgGetter organization.Getter OrgGetter organization.Getter
OrgSetter organization.Setter OrgSetter organization.Setter
Preference preference.Module Preference preference.Module
User user.Module User user.Module
UserGetter user.Getter UserGetter user.Getter
SavedView savedview.Module SavedView savedview.Module
Apdex apdex.Module Apdex apdex.Module
Dashboard dashboard.Module Dashboard dashboard.Module
QuickFilter quickfilter.Module QuickFilter quickfilter.Module
TraceFunnel tracefunnel.Module TraceFunnel tracefunnel.Module
RawDataExport rawdataexport.Module RawDataExport rawdataexport.Module
AuthDomain authdomain.Module AuthDomain authdomain.Module
Session session.Module Session session.Module
Services services.Module Services services.Module
SpanPercentile spanpercentile.Module SpanPercentile spanpercentile.Module
Metrics metricsexplorer.Module MetricsExplorer metricsexplorer.Module
} }
func NewModules( func NewModules(
@@ -86,21 +86,21 @@ func NewModules(
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings)) userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
return Modules{ return Modules{
OrgGetter: orgGetter, OrgGetter: orgGetter,
OrgSetter: orgSetter, OrgSetter: orgSetter,
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()), Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
SavedView: implsavedview.NewModule(sqlstore), SavedView: implsavedview.NewModule(sqlstore),
Apdex: implapdex.NewModule(sqlstore), Apdex: implapdex.NewModule(sqlstore),
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil)), Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil)),
User: user, User: user,
UserGetter: userGetter, UserGetter: userGetter,
QuickFilter: quickfilter, QuickFilter: quickfilter,
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)), TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
RawDataExport: implrawdataexport.NewModule(querier), RawDataExport: implrawdataexport.NewModule(querier),
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs),
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter), Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings), SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
Services: implservices.NewModule(querier, telemetryStore), Services: implservices.NewModule(querier, telemetryStore),
Metrics: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings), MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings),
} }
} }

View File

@@ -20,6 +20,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/prometheus" "github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier" "github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder" "github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sqlmigration" "github.com/SigNoz/signoz/pkg/sqlmigration"
"github.com/SigNoz/signoz/pkg/sqlmigrator" "github.com/SigNoz/signoz/pkg/sqlmigrator"
@@ -62,6 +63,7 @@ type SigNoz struct {
Authz authz.AuthZ Authz authz.AuthZ
Modules Modules Modules Modules
Handlers Handlers Handlers Handlers
QueryParser queryparser.QueryParser
} }
func New( func New(
@@ -309,6 +311,9 @@ func New(
return nil, err return nil, err
} }
// Initialize query parser
queryParser := queryparser.New(providerSettings)
// Initialize authns // Initialize authns
store := sqlauthnstore.NewStore(sqlstore) store := sqlauthnstore.NewStore(sqlstore)
authNs, err := authNsCallback(ctx, providerSettings, store, licensing) authNs, err := authNsCallback(ctx, providerSettings, store, licensing)
@@ -402,5 +407,6 @@ func New(
Authz: authz, Authz: authz,
Modules: modules, Modules: modules,
Handlers: handlers, Handlers: handlers,
QueryParser: queryParser,
}, nil }, nil
} }

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
"fmt" "fmt"
"slices" "slices"
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
@@ -52,7 +51,8 @@ func (c *conditionBuilder) conditionFor(
return "", err return "", err
} }
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) { // Check if this is a body JSON search - either by FieldContext
if key.FieldContext == telemetrytypes.FieldContextBody {
tblFieldName, value = GetBodyJSONKey(ctx, key, operator, value) tblFieldName, value = GetBodyJSONKey(ctx, key, operator, value)
} }
@@ -164,7 +164,8 @@ func (c *conditionBuilder) conditionFor(
// key membership checks, so depending on the column type, the condition changes // key membership checks, so depending on the column type, the condition changes
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists: case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) { // Check if this is a body JSON search - by FieldContext
if key.FieldContext == telemetrytypes.FieldContextBody {
if operator == qbtypes.FilterOperatorExists { if operator == qbtypes.FilterOperatorExists {
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
} else { } else {
@@ -173,45 +174,57 @@ func (c *conditionBuilder) conditionFor(
} }
var value any var value any
switch column.Type { switch column.Type.GetType() {
case schema.JSONColumnType{}: case schema.ColumnTypeEnumJSON:
if operator == qbtypes.FilterOperatorExists { if operator == qbtypes.FilterOperatorExists {
return sb.IsNotNull(tblFieldName), nil return sb.IsNotNull(tblFieldName), nil
} else { } else {
return sb.IsNull(tblFieldName), nil return sb.IsNull(tblFieldName), nil
} }
case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}: case schema.ColumnTypeEnumLowCardinality:
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
case schema.ColumnTypeEnumString:
value = ""
if operator == qbtypes.FilterOperatorExists {
return sb.NE(tblFieldName, value), nil
}
return sb.E(tblFieldName, value), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
}
case schema.ColumnTypeEnumString:
value = "" value = ""
if operator == qbtypes.FilterOperatorExists { if operator == qbtypes.FilterOperatorExists {
return sb.NE(tblFieldName, value), nil return sb.NE(tblFieldName, value), nil
} else { } else {
return sb.E(tblFieldName, value), nil return sb.E(tblFieldName, value), nil
} }
case schema.ColumnTypeUInt64, schema.ColumnTypeUInt32, schema.ColumnTypeUInt8: case schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
value = 0 value = 0
if operator == qbtypes.FilterOperatorExists { if operator == qbtypes.FilterOperatorExists {
return sb.NE(tblFieldName, value), nil return sb.NE(tblFieldName, value), nil
} else { } else {
return sb.E(tblFieldName, value), nil return sb.E(tblFieldName, value), nil
} }
case schema.MapColumnType{ case schema.ColumnTypeEnumMap:
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, keyType := column.Type.(schema.MapColumnType).KeyType
ValueType: schema.ColumnTypeString, if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
}, schema.MapColumnType{ return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeBool,
}, schema.MapColumnType{
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeFloat64,
}:
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
if key.Materialized {
leftOperand = telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
} }
if operator == qbtypes.FilterOperatorExists {
return sb.E(leftOperand, true), nil switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
} else { case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
return sb.NE(leftOperand, true), nil leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
if key.Materialized {
leftOperand = telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
}
if operator == qbtypes.FilterOperatorExists {
return sb.E(leftOperand, true), nil
} else {
return sb.NE(leftOperand, true), nil
}
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
} }
default: default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type) return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type)
@@ -238,7 +251,7 @@ func (c *conditionBuilder) ConditionFor(
// skip adding exists filter for intrinsic fields // skip adding exists filter for intrinsic fields
// with an exception for body json search // with an exception for body json search
field, _ := c.fm.FieldFor(ctx, key) field, _ := c.fm.FieldFor(ctx, key)
if slices.Contains(maps.Keys(IntrinsicFields), field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) { if slices.Contains(maps.Keys(IntrinsicFields), field) && key.FieldContext != telemetrytypes.FieldContextBody {
return condition, nil return condition, nil
} }

View File

@@ -465,7 +465,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Equal operator - int64", name: "Equal operator - int64",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorEqual, operator: qbtypes.FilterOperatorEqual,
value: 200, value: 200,
@@ -475,7 +476,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Equal operator - float64", name: "Equal operator - float64",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.duration_ms", Name: "duration_ms",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorEqual, operator: qbtypes.FilterOperatorEqual,
value: 405.5, value: 405.5,
@@ -485,7 +487,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Equal operator - string", name: "Equal operator - string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.method", Name: "http.method",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorEqual, operator: qbtypes.FilterOperatorEqual,
value: "GET", value: "GET",
@@ -495,7 +498,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Equal operator - bool", name: "Equal operator - bool",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.success", Name: "http.success",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorEqual, operator: qbtypes.FilterOperatorEqual,
value: true, value: true,
@@ -505,7 +509,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Exists operator", name: "Exists operator",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorExists, operator: qbtypes.FilterOperatorExists,
value: nil, value: nil,
@@ -515,7 +520,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Not Exists operator", name: "Not Exists operator",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorNotExists, operator: qbtypes.FilterOperatorNotExists,
value: nil, value: nil,
@@ -525,7 +531,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Greater than operator - string", name: "Greater than operator - string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorGreaterThan, operator: qbtypes.FilterOperatorGreaterThan,
value: "200", value: "200",
@@ -535,7 +542,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Greater than operator - int64", name: "Greater than operator - int64",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorGreaterThan, operator: qbtypes.FilterOperatorGreaterThan,
value: 200, value: 200,
@@ -545,7 +553,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Less than operator - string", name: "Less than operator - string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorLessThan, operator: qbtypes.FilterOperatorLessThan,
value: "300", value: "300",
@@ -555,7 +564,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Less than operator - int64", name: "Less than operator - int64",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorLessThan, operator: qbtypes.FilterOperatorLessThan,
value: 300, value: 300,
@@ -565,7 +575,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Contains operator - string", name: "Contains operator - string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorContains, operator: qbtypes.FilterOperatorContains,
value: "200", value: "200",
@@ -575,7 +586,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Not Contains operator - string", name: "Not Contains operator - string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorNotContains, operator: qbtypes.FilterOperatorNotContains,
value: "200", value: "200",
@@ -585,7 +597,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Between operator - string", name: "Between operator - string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorBetween, operator: qbtypes.FilterOperatorBetween,
value: []any{"200", "300"}, value: []any{"200", "300"},
@@ -595,7 +608,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Between operator - int64", name: "Between operator - int64",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorBetween, operator: qbtypes.FilterOperatorBetween,
value: []any{400, 500}, value: []any{400, 500},
@@ -605,7 +619,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "In operator - string", name: "In operator - string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorIn, operator: qbtypes.FilterOperatorIn,
value: []any{"200", "300"}, value: []any{"200", "300"},
@@ -615,7 +630,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "In operator - int64", name: "In operator - int64",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.status_code", Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorIn, operator: qbtypes.FilterOperatorIn,
value: []any{401, 404, 500}, value: []any{401, 404, 500},
@@ -625,7 +641,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Regexp operator - json body string", name: "Regexp operator - json body string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.method", Name: "http.method",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorRegexp, operator: qbtypes.FilterOperatorRegexp,
value: "GET|POST|PUT", value: "GET|POST|PUT",
@@ -635,7 +652,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Not Regexp operator - json body string", name: "Not Regexp operator - json body string",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.method", Name: "http.method",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorNotRegexp, operator: qbtypes.FilterOperatorNotRegexp,
value: "DELETE|PATCH", value: "DELETE|PATCH",
@@ -645,7 +663,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Regexp operator - json body with dots in path", name: "Regexp operator - json body with dots in path",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.user.email", Name: "user.email",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorRegexp, operator: qbtypes.FilterOperatorRegexp,
value: "^.*@example\\.com$", value: "^.*@example\\.com$",
@@ -655,7 +674,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
{ {
name: "Not Regexp operator - json body nested path", name: "Not Regexp operator - json body nested path",
key: telemetrytypes.TelemetryFieldKey{ key: telemetrytypes.TelemetryFieldKey{
Name: "body.response.headers.content-type", Name: "response.headers.content-type",
FieldContext: telemetrytypes.FieldContextBody,
}, },
operator: qbtypes.FilterOperatorNotRegexp, operator: qbtypes.FilterOperatorNotRegexp,
value: "^text/.*", value: "^text/.*",

View File

@@ -1,6 +1,8 @@
package telemetrylogs package telemetrylogs
import ( import (
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
) )
@@ -16,6 +18,8 @@ const (
LogsV2TimestampColumn = "timestamp" LogsV2TimestampColumn = "timestamp"
LogsV2ObservedTimestampColumn = "observed_timestamp" LogsV2ObservedTimestampColumn = "observed_timestamp"
LogsV2BodyColumn = "body" LogsV2BodyColumn = "body"
LogsV2BodyJSONColumn = constants.BodyJSONColumn
LogsV2BodyPromotedColumn = constants.BodyPromotedColumn
LogsV2TraceIDColumn = "trace_id" LogsV2TraceIDColumn = "trace_id"
LogsV2SpanIDColumn = "span_id" LogsV2SpanIDColumn = "span_id"
LogsV2TraceFlagsColumn = "trace_flags" LogsV2TraceFlagsColumn = "trace_flags"
@@ -30,6 +34,11 @@ const (
LogsV2AttributesBoolColumn = "attributes_bool" LogsV2AttributesBoolColumn = "attributes_bool"
LogsV2ResourcesStringColumn = "resources_string" LogsV2ResourcesStringColumn = "resources_string"
LogsV2ScopeStringColumn = "scope_string" LogsV2ScopeStringColumn = "scope_string"
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
ArraySep = jsontypeexporter.ArraySeparator
ArrayAnyIndex = "[*]."
) )
var ( var (

View File

@@ -82,10 +82,13 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
case telemetrytypes.FieldDataTypeBool: case telemetrytypes.FieldDataTypeBool:
return logsV2Columns["attributes_bool"], nil return logsV2Columns["attributes_bool"], nil
} }
case telemetrytypes.FieldContextBody:
// body context fields are stored in the body column
return logsV2Columns["body"], nil
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified: case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
col, ok := logsV2Columns[key.Name] col, ok := logsV2Columns[key.Name]
if !ok { if !ok {
// check if the key has body JSON search // check if the key has body JSON search (backward compatibility)
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) { if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
return logsV2Columns["body"], nil return logsV2Columns["body"], nil
} }
@@ -103,8 +106,8 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
return "", err return "", err
} }
switch column.Type { switch column.Type.GetType() {
case schema.JSONColumnType{}: case schema.ColumnTypeEnumJSON:
// json is only supported for resource context as of now // json is only supported for resource context as of now
if key.FieldContext != telemetrytypes.FieldContextResource { if key.FieldContext != telemetrytypes.FieldContextResource {
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String) return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
@@ -121,40 +124,32 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
} else { } else {
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
} }
case schema.ColumnTypeEnumLowCardinality:
case schema.ColumnTypeString, switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, case schema.ColumnTypeEnumString:
schema.ColumnTypeUInt64, return column.Name, nil
schema.ColumnTypeUInt32, default:
schema.ColumnTypeUInt8: return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
}
case schema.ColumnTypeEnumString,
schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
return column.Name, nil return column.Name, nil
case schema.MapColumnType{ case schema.ColumnTypeEnumMap:
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, keyType := column.Type.(schema.MapColumnType).KeyType
ValueType: schema.ColumnTypeString, if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
}: return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
// a key could have been materialized, if so return the materialized column name
if key.Materialized {
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
} }
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
case schema.MapColumnType{ switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
ValueType: schema.ColumnTypeFloat64, // a key could have been materialized, if so return the materialized column name
}: if key.Materialized {
// a key could have been materialized, if so return the materialized column name return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
if key.Materialized { }
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
} }
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
case schema.MapColumnType{
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeBool,
}:
// a key could have been materialized, if so return the materialized column name
if key.Materialized {
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
}
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
} }
// should not reach here // should not reach here
return column.Name, nil return column.Name, nil

View File

@@ -21,7 +21,6 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
ConditionBuilder: cb, ConditionBuilder: cb,
FieldKeys: keys, FieldKeys: keys,
FullTextColumn: DefaultFullTextColumn, FullTextColumn: DefaultFullTextColumn,
JsonBodyPrefix: BodyJSONStringSearchPrefix,
JsonKeyToKey: GetBodyJSONKey, JsonKeyToKey: GetBodyJSONKey,
} }
@@ -58,7 +57,6 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
ConditionBuilder: cb, ConditionBuilder: cb,
FieldKeys: keys, FieldKeys: keys,
FullTextColumn: DefaultFullTextColumn, FullTextColumn: DefaultFullTextColumn,
JsonBodyPrefix: BodyJSONStringSearchPrefix,
JsonKeyToKey: GetBodyJSONKey, JsonKeyToKey: GetBodyJSONKey,
} }

View File

@@ -27,8 +27,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
FullTextColumn: &telemetrytypes.TelemetryFieldKey{ FullTextColumn: &telemetrytypes.TelemetryFieldKey{
Name: "body", Name: "body",
}, },
JsonBodyPrefix: "body", JsonKeyToKey: GetBodyJSONKey,
JsonKeyToKey: GetBodyJSONKey,
} }
testCases := []struct { testCases := []struct {
@@ -163,7 +162,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
for _, tc := range testCases { for _, tc := range testCases {
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) { t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0) clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass { if tc.shouldPass {
if err != nil { if err != nil {

View File

@@ -27,7 +27,6 @@ func TestFilterExprLogs(t *testing.T) {
ConditionBuilder: cb, ConditionBuilder: cb,
FieldKeys: keys, FieldKeys: keys,
FullTextColumn: DefaultFullTextColumn, FullTextColumn: DefaultFullTextColumn,
JsonBodyPrefix: BodyJSONStringSearchPrefix,
JsonKeyToKey: GetBodyJSONKey, JsonKeyToKey: GetBodyJSONKey,
} }
@@ -2448,7 +2447,6 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
ConditionBuilder: cb, ConditionBuilder: cb,
FieldKeys: keys, FieldKeys: keys,
FullTextColumn: DefaultFullTextColumn, FullTextColumn: DefaultFullTextColumn,
JsonBodyPrefix: BodyJSONStringSearchPrefix,
JsonKeyToKey: GetBodyJSONKey, JsonKeyToKey: GetBodyJSONKey,
} }

View File

@@ -69,7 +69,7 @@ func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp
} }
func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string { func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
parts := strings.Split(key.Name, ".")[1:] parts := strings.Split(key.Name, ".")
newParts := []string{} newParts := []string{}
for _, part := range parts { for _, part := range parts {
if strings.HasSuffix(part, "[*]") { if strings.HasSuffix(part, "[*]") {

View File

@@ -589,10 +589,9 @@ func (b *logQueryStatementBuilder) addFilterCondition(
FieldKeys: keys, FieldKeys: keys,
SkipResourceFilter: true, SkipResourceFilter: true,
FullTextColumn: b.fullTextColumn, FullTextColumn: b.fullTextColumn,
JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey, JsonKeyToKey: b.jsonKeyToKey,
Variables: variables, Variables: variables,
}, start, end) }, start, end)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@@ -8,4 +8,7 @@ const (
TagAttributesV2LocalTableName = "tag_attributes_v2" TagAttributesV2LocalTableName = "tag_attributes_v2"
LogAttributeKeysTblName = "distributed_logs_attribute_keys" LogAttributeKeysTblName = "distributed_logs_attribute_keys"
LogResourceKeysTblName = "distributed_logs_resource_keys" LogResourceKeysTblName = "distributed_logs_resource_keys"
PathTypesTableName = "distributed_json_path_types"
PromotedPathsTableName = "distributed_json_promoted_paths"
SkipIndexTableName = "system.data_skipping_indices"
) )

View File

@@ -0,0 +1,496 @@
package telemetrymetadata
import (
"context"
"fmt"
"reflect"
"strings"
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/ClickHouse/clickhouse-go/v2/lib/chcol"
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz-otel-collector/utils"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
)
var (
defaultPathLimit = 100 // Default limit to prevent full table scans
CodeUnknownJSONDataType = errors.MustNewCode("unknown_json_data_type")
CodeFailLoadPromotedPaths = errors.MustNewCode("fail_load_promoted_paths")
CodeFailCheckPathPromoted = errors.MustNewCode("fail_check_path_promoted")
CodeFailIterateBodyJSONKeys = errors.MustNewCode("fail_iterate_body_json_keys")
CodeFailExtractBodyJSONKeys = errors.MustNewCode("fail_extract_body_json_keys")
CodeFailLoadLogsJSONIndexes = errors.MustNewCode("fail_load_logs_json_indexes")
CodeFailListJSONValues = errors.MustNewCode("fail_list_json_values")
CodeFailScanJSONValue = errors.MustNewCode("fail_scan_json_value")
CodeFailScanVariant = errors.MustNewCode("fail_scan_variant")
CodeFailBuildJSONPathsQuery = errors.MustNewCode("fail_build_json_paths_query")
CodeNoPathsToQueryIndexes = errors.MustNewCode("no_paths_to_query_indexes_provided")
)
// GetBodyJSONPaths extracts body JSON paths from the path_types table
// This function can be used by both JSONQueryBuilder and metadata extraction
// uniquePathLimit: 0 for no limit, >0 for maximum number of unique paths to return
// - For startup load: set to 10000 to get top 10k unique paths
// - For lookup: set to 0 (no limit needed for single path)
// - For metadata API: set to desired pagination limit
//
// searchOperator: LIKE for pattern matching, EQUAL for exact match
// Returns: (paths, error)
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.TelemetryStore,
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
query, args, limit, err := buildGetBodyJSONPathsQuery(fieldKeySelectors)
if err != nil {
return nil, false, err
}
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
if err != nil {
return nil, false, errors.WrapInternalf(err, CodeFailExtractBodyJSONKeys, "failed to extract body JSON keys")
}
defer rows.Close()
fieldKeys := []*telemetrytypes.TelemetryFieldKey{}
paths := []string{}
rowCount := 0
for rows.Next() {
var path string
var typesArray []string // ClickHouse returns array as []string
var lastSeen uint64
err = rows.Scan(&path, &typesArray, &lastSeen)
if err != nil {
return nil, false, errors.WrapInternalf(err, CodeFailExtractBodyJSONKeys, "failed to scan body JSON key row")
}
for _, typ := range typesArray {
mapping, found := telemetrytypes.MappingStringToJSONDataType[typ]
if !found {
return nil, false, errors.NewInternalf(CodeUnknownJSONDataType, "failed to map type string to JSON data type: %s", typ)
}
fieldKeys = append(fieldKeys, &telemetrytypes.TelemetryFieldKey{
Name: path,
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextBody,
FieldDataType: telemetrytypes.MappingJSONDataTypeToFieldDataType[mapping],
JSONDataType: &mapping,
})
}
paths = append(paths, path)
rowCount++
}
if rows.Err() != nil {
return nil, false, errors.WrapInternalf(rows.Err(), CodeFailIterateBodyJSONKeys, "error iterating body JSON keys")
}
promoted, err := GetPromotedPaths(ctx, telemetryStore.ClickhouseDB(), paths...)
if err != nil {
return nil, false, err
}
indexes, err := getJSONPathIndexes(ctx, telemetryStore, paths...)
if err != nil {
return nil, false, err
}
for _, fieldKey := range fieldKeys {
fieldKey.Materialized = promoted.Contains(fieldKey.Name)
fieldKey.Indexes = indexes[fieldKey.Name]
}
return fieldKeys, rowCount <= limit, nil
}
func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySelector) (string, []any, int, error) {
if len(fieldKeySelectors) == 0 {
return "", nil, defaultPathLimit, errors.NewInternalf(CodeFailBuildJSONPathsQuery, "no field key selectors provided")
}
from := fmt.Sprintf("%s.%s", DBName, PathTypesTableName)
// Build a better query using GROUP BY to deduplicate at database level
// This aggregates all types per path and gets the max last_seen, then applies LIMIT
sb := sqlbuilder.Select(
"path",
"groupArray(DISTINCT type) AS types",
"max(last_seen) AS last_seen",
).From(from)
limit := 0
// Add search filter if provided
orClauses := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
// replace [*] with []
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrylogs.ArrayAnyIndex, telemetrylogs.ArraySep)
// Extract search text for body JSON keys
keyName := CleanPathPrefixes(fieldKeySelector.Name)
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
orClauses = append(orClauses, sb.Equal("path", keyName))
} else {
// Pattern matching for metadata API (defaults to LIKE behavior for other operators)
orClauses = append(orClauses, sb.Like("path", querybuilder.FormatValueForContains(keyName)))
}
limit += fieldKeySelector.Limit
}
sb.Where(sb.Or(orClauses...))
// Group by path to get unique paths with aggregated types
sb.GroupBy("path")
// Order by max last_seen to get most recent paths first
sb.OrderBy("last_seen DESC")
if limit == 0 {
limit = defaultPathLimit
}
sb.Limit(limit)
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return query, args, limit, nil
}
// TODO(Piyush): Remove this lint skip
//
// nolint:unused
func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
filteredPaths := []string{}
for _, path := range paths {
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
continue
}
filteredPaths = append(filteredPaths, path)
}
if len(filteredPaths) == 0 {
return nil, errors.NewInternalf(CodeNoPathsToQueryIndexes, "no paths to query indexes provided")
}
// list indexes for the paths
indexesMap, err := ListLogsJSONIndexes(ctx, telemetryStore, filteredPaths...)
if err != nil {
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to list JSON path indexes")
}
// build a set of indexes
cleanIndexes := make(map[string][]telemetrytypes.JSONDataTypeIndex)
for path, indexes := range indexesMap {
for _, index := range indexes {
columnExpr, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
if err != nil {
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to unfold JSON sub column index expression: %s", index.Expression)
}
jsonDataType, found := telemetrytypes.MappingStringToJSONDataType[columnType]
if !found {
return nil, errors.NewInternalf(CodeUnknownJSONDataType, "failed to map column type to JSON data type: %s", columnType)
}
if jsonDataType == telemetrytypes.String {
cleanIndexes[path] = append(cleanIndexes[path], telemetrytypes.JSONDataTypeIndex{
Type: telemetrytypes.String,
ColumnExpression: columnExpr,
IndexExpression: index.Expression,
})
} else if strings.HasPrefix(index.Type, "minmax") {
cleanIndexes[path] = append(cleanIndexes[path], telemetrytypes.JSONDataTypeIndex{
Type: jsonDataType,
ColumnExpression: columnExpr,
IndexExpression: index.Expression,
})
}
}
}
return cleanIndexes, nil
}
func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, []any) {
// This aggregates all types per path and gets the max last_seen, then applies LIMIT
sb := sqlbuilder.Select(
"name", "type_full", "expr", "granularity",
).From(fmt.Sprintf("clusterAllReplicas('%s', %s)", cluster, SkipIndexTableName))
sb.Where(sb.Equal("database", telemetrylogs.DBName))
sb.Where(sb.Equal("table", telemetrylogs.LogsV2LocalTableName))
sb.Where(sb.Or(
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix))),
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix))),
))
filterExprs := []string{}
for _, filter := range filters {
filterExprs = append(filterExprs, sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(filter))))
}
sb.Where(sb.Or(filterExprs...))
return sb.BuildWithFlavor(sqlbuilder.ClickHouse)
}
func ListLogsJSONIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, filters ...string) (map[string][]schemamigrator.Index, error) {
query, args := buildListLogsJSONIndexesQuery(telemetryStore.Cluster(), filters...)
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to load string indexed columns")
}
defer rows.Close()
indexesMap := make(map[string][]schemamigrator.Index)
for rows.Next() {
var name string
var typeFull string
var expr string
var granularity uint64
if err := rows.Scan(&name, &typeFull, &expr, &granularity); err != nil {
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to scan string indexed column")
}
indexesMap[name] = append(indexesMap[name], schemamigrator.Index{
Name: name,
Type: typeFull,
Expression: expr,
Granularity: int(granularity),
})
}
return indexesMap, nil
}
func ListPromotedPaths(ctx context.Context, conn clickhouse.Conn) (map[string]struct{}, error) {
query := fmt.Sprintf("SELECT path FROM %s.%s", DBName, PromotedPathsTableName)
rows, err := conn.Query(ctx, query)
if err != nil {
return nil, errors.WrapInternalf(err, CodeFailLoadPromotedPaths, "failed to load promoted paths")
}
defer rows.Close()
next := make(map[string]struct{})
for rows.Next() {
var path string
if err := rows.Scan(&path); err != nil {
return nil, errors.WrapInternalf(err, CodeFailLoadPromotedPaths, "failed to scan promoted path")
}
next[path] = struct{}{}
}
return next, nil
}
// TODO(Piyush): Remove this if not used in future
func ListJSONValues(ctx context.Context, conn clickhouse.Conn, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
path = CleanPathPrefixes(path)
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
}
promoted, err := IsPathPromoted(ctx, conn, path)
if err != nil {
return nil, false, err
}
if promoted {
path = telemetrylogs.BodyPromotedColumnPrefix + path
} else {
path = telemetrylogs.BodyJSONColumnPrefix + path
}
from := fmt.Sprintf("%s.%s", telemetrylogs.DBName, telemetrylogs.LogsV2TableName)
colExpr := func(typ telemetrytypes.JSONDataType) string {
return fmt.Sprintf("dynamicElement(%s, '%s')", path, typ.StringValue())
}
sb := sqlbuilder.Select(
colExpr(telemetrytypes.String),
colExpr(telemetrytypes.Int64),
colExpr(telemetrytypes.Float64),
colExpr(telemetrytypes.Bool),
colExpr(telemetrytypes.ArrayString),
colExpr(telemetrytypes.ArrayInt64),
colExpr(telemetrytypes.ArrayFloat64),
colExpr(telemetrytypes.ArrayBool),
colExpr(telemetrytypes.ArrayDynamic),
).From(from)
sb.Where(fmt.Sprintf("%s IS NOT NULL", path))
sb.Limit(limit)
contextWithTimeout, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
rows, err := conn.Query(contextWithTimeout, query, args...)
if err != nil {
if errors.Is(err, context.DeadlineExceeded) {
return nil, false, errors.WrapTimeoutf(err, errors.CodeTimeout, "query timed out").WithAdditional("failed to list JSON values")
}
return nil, false, errors.WrapInternalf(err, CodeFailListJSONValues, "failed to list JSON values")
}
defer rows.Close()
// Get column types to determine proper scan types
colTypes := rows.ColumnTypes()
scanTargets := make([]any, len(colTypes))
for i := range colTypes {
scanTargets[i] = reflect.New(colTypes[i].ScanType()).Interface()
}
values := &telemetrytypes.TelemetryFieldValues{}
for rows.Next() {
// Create fresh scan targets for each row
scan := make([]any, len(colTypes))
for i := range colTypes {
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
}
if err := rows.Scan(scan...); err != nil {
return nil, false, errors.WrapInternalf(err, CodeFailListJSONValues, "failed to scan JSON value row")
}
// Extract values from scan targets and process them
// Column order: String, Int64, Float64, Bool, ArrayString, ArrayInt64, ArrayFloat64, ArrayBool, ArrayDynamic
var consume func(scan []any) error
consume = func(scan []any) error {
for _, value := range scan {
value := derefValue(value) // dereference the double pointer if it is a pointer
switch value := value.(type) {
case string:
values.StringValues = append(values.StringValues, value)
case int64:
values.NumberValues = append(values.NumberValues, float64(value))
case float64:
values.NumberValues = append(values.NumberValues, value)
case bool:
values.BoolValues = append(values.BoolValues, value)
case []*string:
for _, str := range value {
if str != nil {
values.StringValues = append(values.StringValues, *str)
}
}
case []*int64:
for _, num := range value {
if num != nil {
values.NumberValues = append(values.NumberValues, float64(*num))
}
}
case []*float64:
for _, num := range value {
if num != nil {
values.NumberValues = append(values.NumberValues, float64(*num))
}
}
case []*bool:
for _, boolVal := range value {
if boolVal != nil {
values.BoolValues = append(values.BoolValues, *boolVal)
}
}
case chcol.Variant:
if !value.Nil() {
if err := consume([]any{value.Any()}); err != nil {
return err
}
}
case []chcol.Variant:
extractedValues := make([]any, len(value))
for idx, variant := range value {
if !variant.Nil() && variant.Type() != "JSON" { // skip JSON values cuz they're relevant for nested keys
extractedValues[idx] = variant.Any()
}
}
if err := consume(extractedValues); err != nil {
return err
}
default:
if value == nil {
continue
}
return errors.NewInternalf(CodeFailScanJSONValue, "unknown JSON value type: %T", value)
}
}
return nil
}
if err := consume(scan); err != nil {
return nil, false, err
}
}
if err := rows.Err(); err != nil {
return nil, false, errors.WrapInternalf(err, CodeFailListJSONValues, "error iterating JSON values")
}
return values, true, nil
}
func derefValue(v any) any {
if v == nil {
return nil
}
val := reflect.ValueOf(v)
for val.Kind() == reflect.Ptr {
if val.IsNil() {
return nil
}
val = val.Elem()
}
return val.Interface()
}
// IsPathPromoted checks if a specific path is promoted
func IsPathPromoted(ctx context.Context, conn clickhouse.Conn, path string) (bool, error) {
split := strings.Split(path, telemetrylogs.ArraySep)
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
rows, err := conn.Query(ctx, query, split[0])
if err != nil {
return false, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to check if path %s is promoted", path)
}
defer rows.Close()
return rows.Next(), nil
}
// GetPromotedPaths checks if a specific path is promoted
func GetPromotedPaths(ctx context.Context, conn clickhouse.Conn, paths ...string) (*utils.ConcurrentSet[string], error) {
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
pathConditions := []string{}
for _, path := range paths {
pathConditions = append(pathConditions, sb.Equal("path", path))
}
sb.Where(sb.Or(pathConditions...))
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
rows, err := conn.Query(ctx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to get promoted paths")
}
defer rows.Close()
promotedPaths := utils.NewConcurrentSet[string]()
for rows.Next() {
var path string
if err := rows.Scan(&path); err != nil {
return nil, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to scan promoted path")
}
promotedPaths.Insert(path)
}
return promotedPaths, nil
}
// TODO(Piyush): Remove this function
func CleanPathPrefixes(path string) string {
path = strings.TrimPrefix(path, telemetrytypes.BodyJSONStringSearchPrefix)
path = strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
return path
}

View File

@@ -0,0 +1,151 @@
package telemetrymetadata
import (
"fmt"
"testing"
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/require"
)
func TestBuildGetBodyJSONPathsQuery(t *testing.T) {
testCases := []struct {
name string
fieldKeySelectors []*telemetrytypes.FieldKeySelector
expectedSQL string
expectedArgs []any
expectedLimit int
}{
{
name: "Single search text with EQUAL operator",
fieldKeySelectors: []*telemetrytypes.FieldKeySelector{
{
Name: "user.name",
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
},
},
expectedSQL: "SELECT path, groupArray(DISTINCT type) AS types, max(last_seen) AS last_seen FROM signoz_metadata.distributed_json_path_types WHERE (path = ?) GROUP BY path ORDER BY last_seen DESC LIMIT ?",
expectedArgs: []any{"user.name", defaultPathLimit},
expectedLimit: defaultPathLimit,
},
{
name: "Single search text with LIKE operator",
fieldKeySelectors: []*telemetrytypes.FieldKeySelector{
{
Name: "user",
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
},
},
expectedSQL: "SELECT path, groupArray(DISTINCT type) AS types, max(last_seen) AS last_seen FROM signoz_metadata.distributed_json_path_types WHERE (path LIKE ?) GROUP BY path ORDER BY last_seen DESC LIMIT ?",
expectedArgs: []any{"user", 100},
expectedLimit: 100,
},
{
name: "Multiple search texts with EQUAL operator",
fieldKeySelectors: []*telemetrytypes.FieldKeySelector{
{
Name: "user.name",
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
},
{
Name: "user.age",
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
},
},
expectedSQL: "SELECT path, groupArray(DISTINCT type) AS types, max(last_seen) AS last_seen FROM signoz_metadata.distributed_json_path_types WHERE (path = ? OR path = ?) GROUP BY path ORDER BY last_seen DESC LIMIT ?",
expectedArgs: []any{"user.name", "user.age", defaultPathLimit},
expectedLimit: defaultPathLimit,
},
{
name: "Multiple search texts with LIKE operator",
fieldKeySelectors: []*telemetrytypes.FieldKeySelector{
{
Name: "user",
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
},
{
Name: "admin",
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
},
},
expectedSQL: "SELECT path, groupArray(DISTINCT type) AS types, max(last_seen) AS last_seen FROM signoz_metadata.distributed_json_path_types WHERE (path LIKE ? OR path LIKE ?) GROUP BY path ORDER BY last_seen DESC LIMIT ?",
expectedArgs: []any{"user", "admin", defaultPathLimit},
expectedLimit: defaultPathLimit,
},
{
name: "Search with Contains operator (should default to LIKE)",
fieldKeySelectors: []*telemetrytypes.FieldKeySelector{
{
Name: "test",
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
},
},
expectedSQL: "SELECT path, groupArray(DISTINCT type) AS types, max(last_seen) AS last_seen FROM signoz_metadata.distributed_json_path_types WHERE (path LIKE ?) GROUP BY path ORDER BY last_seen DESC LIMIT ?",
expectedArgs: []any{"test", defaultPathLimit},
expectedLimit: defaultPathLimit,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
query, args, limit, err := buildGetBodyJSONPathsQuery(tc.fieldKeySelectors)
require.NoError(t, err, "Error building query: %v", err)
require.Equal(t, tc.expectedSQL, query)
require.Equal(t, tc.expectedArgs, args)
require.Equal(t, tc.expectedLimit, limit)
})
}
}
func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
testCases := []struct {
name string
cluster string
filters []string
expectedSQL string
expectedArgs []any
}{
{
name: "No filters",
cluster: "test-cluster",
filters: nil,
expectedSQL: "SELECT name, type_full, expr, granularity FROM clusterAllReplicas('test-cluster', system.data_skipping_indices) " +
"WHERE database = ? AND table = ? AND (LOWER(expr) LIKE LOWER(?) OR LOWER(expr) LIKE LOWER(?))",
expectedArgs: []any{
telemetrylogs.DBName,
telemetrylogs.LogsV2LocalTableName,
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
},
},
{
name: "With filters",
cluster: "test-cluster",
filters: []string{"foo", "bar"},
expectedSQL: "SELECT name, type_full, expr, granularity FROM clusterAllReplicas('test-cluster', system.data_skipping_indices) " +
"WHERE database = ? AND table = ? AND (LOWER(expr) LIKE LOWER(?) OR LOWER(expr) LIKE LOWER(?)) AND (LOWER(expr) LIKE LOWER(?) OR LOWER(expr) LIKE LOWER(?))",
expectedArgs: []any{
telemetrylogs.DBName,
telemetrylogs.LogsV2LocalTableName,
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("foo")),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("bar")),
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
query, args := buildListLogsJSONIndexesQuery(tc.cluster, tc.filters...)
require.Equal(t, tc.expectedSQL, query)
require.Equal(t, tc.expectedArgs, args)
})
}
}

View File

@@ -1,7 +1,12 @@
package telemetrymetadata package telemetrymetadata
import otelcollectorconst "github.com/SigNoz/signoz-otel-collector/constants"
const ( const (
DBName = "signoz_metadata" DBName = "signoz_metadata"
AttributesMetadataTableName = "distributed_attributes_metadata" AttributesMetadataTableName = "distributed_attributes_metadata"
AttributesMetadataLocalTableName = "attributes_metadata" AttributesMetadataLocalTableName = "attributes_metadata"
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
PromotedPathsTableName = otelcollectorconst.DistributedPromotedPathsTable
SkipIndexTableName = "system.data_skipping_indices"
) )

View File

@@ -165,53 +165,65 @@ func (c *conditionBuilder) conditionFor(
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists: case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
var value any var value any
switch column.Type { switch column.Type.GetType() {
case schema.JSONColumnType{}: case schema.ColumnTypeEnumJSON:
if operator == qbtypes.FilterOperatorExists { if operator == qbtypes.FilterOperatorExists {
return sb.IsNotNull(tblFieldName), nil return sb.IsNotNull(tblFieldName), nil
} else { } else {
return sb.IsNull(tblFieldName), nil return sb.IsNull(tblFieldName), nil
} }
case schema.ColumnTypeString, case schema.ColumnTypeEnumString,
schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, schema.ColumnTypeEnumFixedString,
schema.FixedStringColumnType{Length: 32}, schema.ColumnTypeEnumDateTime64:
schema.DateTime64ColumnType{Precision: 9, Timezone: "UTC"}:
value = "" value = ""
if operator == qbtypes.FilterOperatorExists { if operator == qbtypes.FilterOperatorExists {
return sb.NE(tblFieldName, value), nil return sb.NE(tblFieldName, value), nil
} else { } else {
return sb.E(tblFieldName, value), nil return sb.E(tblFieldName, value), nil
} }
case schema.ColumnTypeUInt64, case schema.ColumnTypeEnumLowCardinality:
schema.ColumnTypeUInt32, switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
schema.ColumnTypeUInt8, case schema.ColumnTypeEnumString:
schema.ColumnTypeInt8, value = ""
schema.ColumnTypeInt16, if operator == qbtypes.FilterOperatorExists {
schema.ColumnTypeBool: return sb.NE(tblFieldName, value), nil
}
return sb.E(tblFieldName, value), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
}
case schema.ColumnTypeEnumUInt64,
schema.ColumnTypeEnumUInt32,
schema.ColumnTypeEnumUInt8,
schema.ColumnTypeEnumInt8,
schema.ColumnTypeEnumInt16,
schema.ColumnTypeEnumBool:
value = 0 value = 0
if operator == qbtypes.FilterOperatorExists { if operator == qbtypes.FilterOperatorExists {
return sb.NE(tblFieldName, value), nil return sb.NE(tblFieldName, value), nil
} else { } else {
return sb.E(tblFieldName, value), nil return sb.E(tblFieldName, value), nil
} }
case schema.MapColumnType{ case schema.ColumnTypeEnumMap:
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, keyType := column.Type.(schema.MapColumnType).KeyType
ValueType: schema.ColumnTypeString, if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
}, schema.MapColumnType{ return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeBool,
}, schema.MapColumnType{
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
ValueType: schema.ColumnTypeFloat64,
}:
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
if key.Materialized {
leftOperand = telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
} }
if operator == qbtypes.FilterOperatorExists {
return sb.E(leftOperand, true), nil switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
} else { case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
return sb.NE(leftOperand, true), nil leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
if key.Materialized {
leftOperand = telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
}
if operator == qbtypes.FilterOperatorExists {
return sb.E(leftOperand, true), nil
} else {
return sb.NE(leftOperand, true), nil
}
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
} }
default: default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type) return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type)

View File

@@ -239,8 +239,8 @@ func (m *defaultFieldMapper) FieldFor(
return "", err return "", err
} }
switch column.Type { switch column.Type.GetType() {
case schema.JSONColumnType{}: case schema.ColumnTypeEnumJSON:
// json is only supported for resource context as of now // json is only supported for resource context as of now
if key.FieldContext != telemetrytypes.FieldContextResource { if key.FieldContext != telemetrytypes.FieldContextResource {
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String) return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
@@ -256,44 +256,38 @@ func (m *defaultFieldMapper) FieldFor(
} else { } else {
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
} }
case schema.ColumnTypeEnumString,
case schema.ColumnTypeString, schema.ColumnTypeEnumUInt64,
schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, schema.ColumnTypeEnumUInt32,
schema.ColumnTypeUInt64, schema.ColumnTypeEnumInt8,
schema.ColumnTypeUInt32, schema.ColumnTypeEnumInt16,
schema.ColumnTypeInt8, schema.ColumnTypeEnumBool,
schema.ColumnTypeInt16, schema.ColumnTypeEnumDateTime64,
schema.ColumnTypeBool, schema.ColumnTypeEnumFixedString:
schema.DateTime64ColumnType{Precision: 9, Timezone: "UTC"},
schema.FixedStringColumnType{Length: 32}:
return column.Name, nil return column.Name, nil
case schema.MapColumnType{ case schema.ColumnTypeEnumLowCardinality:
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
ValueType: schema.ColumnTypeString, case schema.ColumnTypeEnumString:
}: return column.Name, nil
// a key could have been materialized, if so return the materialized column name default:
if key.Materialized { return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "value type %s is not supported for low cardinality column type %s", elementType, column.Type)
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
} }
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil case schema.ColumnTypeEnumMap:
case schema.MapColumnType{ keyType := column.Type.(schema.MapColumnType).KeyType
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
ValueType: schema.ColumnTypeFloat64, return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
}:
// a key could have been materialized, if so return the materialized column name
if key.Materialized {
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
} }
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
case schema.MapColumnType{ switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, case schema.ColumnTypeEnumString, schema.ColumnTypeEnumFloat64, schema.ColumnTypeEnumBool:
ValueType: schema.ColumnTypeBool, // a key could have been materialized, if so return the materialized column name
}: if key.Materialized {
// a key could have been materialized, if so return the materialized column name return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
if key.Materialized { }
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "value type %s is not supported for map column type %s", valueType, column.Type)
} }
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
} }
// should not reach here // should not reach here
return column.Name, nil return column.Name, nil

View File

@@ -93,6 +93,16 @@ func newConfig() factory.Config {
} }
func (c Config) Validate() error { func (c Config) Validate() error {
// Ensure that lifetime idle is not negative
if c.Lifetime.Idle < 0 {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "lifetime::idle must not be negative")
}
// Ensure that lifetime max is not negative
if c.Lifetime.Max < 0 {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "lifetime::max must not be negative")
}
// Ensure that rotation interval is smaller than lifetime idle // Ensure that rotation interval is smaller than lifetime idle
if c.Rotation.Interval >= c.Lifetime.Idle { if c.Rotation.Interval >= c.Lifetime.Idle {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "rotation::interval must be smaller than lifetime::idle") return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "rotation::interval must be smaller than lifetime::idle")

View File

@@ -263,7 +263,7 @@ func (provider *provider) getOrSetIdentity(ctx context.Context, orgID, userID va
return nil, err return nil, err
} }
err = provider.cache.Set(ctx, orgID, identityCacheKey(identity.UserID), identity, -1) err = provider.cache.Set(ctx, orgID, identityCacheKey(identity.UserID), identity, 0)
if err != nil { if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to cache identity", "error", err) provider.settings.Logger().ErrorContext(ctx, "failed to cache identity", "error", err)
} }

View File

@@ -410,7 +410,7 @@ func (provider *provider) setToken(ctx context.Context, token *authtypes.Token,
} }
func (provider *provider) setIdentity(ctx context.Context, identity *authtypes.Identity) error { func (provider *provider) setIdentity(ctx context.Context, identity *authtypes.Identity) error {
err := provider.cache.Set(ctx, emptyOrgID, identityCacheKey(identity.UserID), identity, -1) err := provider.cache.Set(ctx, emptyOrgID, identityCacheKey(identity.UserID), identity, 0)
if err != nil { if err != nil {
return err return err
} }
@@ -434,7 +434,7 @@ func (provider *provider) getOrGetSetIdentity(ctx context.Context, userID valuer
return nil, err return nil, err
} }
err = provider.cache.Set(ctx, emptyOrgID, identityCacheKey(userID), identity, -1) err = provider.cache.Set(ctx, emptyOrgID, identityCacheKey(userID), identity, 0)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -220,3 +220,61 @@ type TreemapResponse struct {
TimeSeries []TreemapEntry `json:"timeseries"` TimeSeries []TreemapEntry `json:"timeseries"`
Samples []TreemapEntry `json:"samples"` Samples []TreemapEntry `json:"samples"`
} }
// MetricHighlightsResponse is the output structure for the metric highlights endpoint.
type MetricHighlightsResponse struct {
DataPoints uint64 `json:"dataPoints"`
LastReceived uint64 `json:"lastReceived"`
TotalTimeSeries uint64 `json:"totalTimeSeries"`
ActiveTimeSeries uint64 `json:"activeTimeSeries"`
}
// MetricAttributesRequest represents the payload for the metric attributes endpoint.
type MetricAttributesRequest struct {
MetricName string `json:"metricName"`
Start *int64 `json:"start,omitempty"`
End *int64 `json:"end,omitempty"`
}
// Validate ensures MetricAttributesRequest contains acceptable values.
func (req *MetricAttributesRequest) Validate() error {
if req == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.MetricName == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required")
}
if req.Start != nil && req.End != nil {
if *req.Start >= *req.End {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start (%d) must be less than end (%d)", *req.Start, *req.End)
}
}
return nil
}
// UnmarshalJSON validates input immediately after decoding.
func (req *MetricAttributesRequest) UnmarshalJSON(data []byte) error {
type raw MetricAttributesRequest
var decoded raw
if err := json.Unmarshal(data, &decoded); err != nil {
return err
}
*req = MetricAttributesRequest(decoded)
return req.Validate()
}
// MetricAttribute represents a single attribute with its values and count.
type MetricAttribute struct {
Key string `json:"key"`
Values []string `json:"values"`
ValueCount uint64 `json:"valueCount"`
}
// MetricAttributesResponse is the output structure for the metric attributes endpoint.
type MetricAttributesResponse struct {
Attributes []MetricAttribute `json:"attributes"`
TotalKeys int64 `json:"totalKeys"`
}

View File

@@ -0,0 +1,49 @@
package parsertypes
import (
"encoding/json"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// QueryFilterAnalyzeRequest represents the request body for query filter analysis
type QueryFilterAnalyzeRequest struct {
Query string `json:"query"`
QueryType querybuildertypesv5.QueryType `json:"queryType"`
}
// UnmarshalJSON implements custom JSON unmarshaling with validation and normalization
func (q *QueryFilterAnalyzeRequest) UnmarshalJSON(data []byte) error {
// Use a temporary struct to avoid infinite recursion
type Alias QueryFilterAnalyzeRequest
aux := (*Alias)(q)
if err := json.Unmarshal(data, aux); err != nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse json: %v", err)
}
// Trim and validate query is not empty
q.Query = strings.TrimSpace(aux.Query)
if q.Query == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "query is required and cannot be empty")
}
// Validate query type
if aux.QueryType != querybuildertypesv5.QueryTypeClickHouseSQL && aux.QueryType != querybuildertypesv5.QueryTypePromQL {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %v. Supported values are '%s' and '%s'", aux.QueryType, querybuildertypesv5.QueryTypePromQL, querybuildertypesv5.QueryTypeClickHouseSQL)
}
return nil
}
type ColumnInfoResponse struct {
Name string `json:"columnName"`
Alias string `json:"columnAlias"`
}
// QueryFilterAnalyzeResponse represents the response body for query filter analysis
type QueryFilterAnalyzeResponse struct {
MetricNames []string `json:"metricNames"`
Groups []ColumnInfoResponse `json:"groups"`
}

View File

@@ -17,6 +17,10 @@ var (
FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")} FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")}
) )
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
// e.g., "body.status" where "body." is the prefix
const BodyJSONStringSearchPrefix = `body.`
type TelemetryFieldKey struct { type TelemetryFieldKey struct {
Name string `json:"name"` Name string `json:"name"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
@@ -24,7 +28,10 @@ type TelemetryFieldKey struct {
Signal Signal `json:"signal,omitempty"` Signal Signal `json:"signal,omitempty"`
FieldContext FieldContext `json:"fieldContext,omitempty"` FieldContext FieldContext `json:"fieldContext,omitempty"`
FieldDataType FieldDataType `json:"fieldDataType,omitempty"` FieldDataType FieldDataType `json:"fieldDataType,omitempty"`
Materialized bool `json:"-"`
JSONDataType *JSONDataType `json:"-,omitempty"`
Indexes []JSONDataTypeIndex `json:"-"`
Materialized bool `json:"-"` // refers to promoted in case of body.... fields
} }
func (f TelemetryFieldKey) String() string { func (f TelemetryFieldKey) String() string {

View File

@@ -36,6 +36,9 @@ import (
// //
// - Use `log.` for explicit log context // - Use `log.` for explicit log context
// - `log.severity_text` will always resolve to `severity_text` of log record // - `log.severity_text` will always resolve to `severity_text` of log record
//
// - Use `body.` to indicate and enforce body context
// - `body.key` will look for `key` in the body field
type FieldContext struct { type FieldContext struct {
valuer.String valuer.String
} }
@@ -49,6 +52,7 @@ var (
FieldContextScope = FieldContext{valuer.NewString("scope")} FieldContextScope = FieldContext{valuer.NewString("scope")}
FieldContextAttribute = FieldContext{valuer.NewString("attribute")} FieldContextAttribute = FieldContext{valuer.NewString("attribute")}
FieldContextEvent = FieldContext{valuer.NewString("event")} FieldContextEvent = FieldContext{valuer.NewString("event")}
FieldContextBody = FieldContext{valuer.NewString("body")}
FieldContextUnspecified = FieldContext{valuer.NewString("")} FieldContextUnspecified = FieldContext{valuer.NewString("")}
// Map string representations to FieldContext values // Map string representations to FieldContext values
@@ -65,6 +69,7 @@ var (
"point": FieldContextAttribute, "point": FieldContextAttribute,
"attribute": FieldContextAttribute, "attribute": FieldContextAttribute,
"event": FieldContextEvent, "event": FieldContextEvent,
"body": FieldContextBody,
"spanfield": FieldContextSpan, "spanfield": FieldContextSpan,
"span": FieldContextSpan, "span": FieldContextSpan,
"logfield": FieldContextLog, "logfield": FieldContextLog,
@@ -144,6 +149,8 @@ func (f FieldContext) TagType() string {
return "metricfield" return "metricfield"
case FieldContextEvent: case FieldContextEvent:
return "eventfield" return "eventfield"
case FieldContextBody:
return "body"
} }
return "" return ""
} }

View File

@@ -31,6 +31,9 @@ var (
FieldDataTypeArrayInt64 = FieldDataType{valuer.NewString("[]int64")} FieldDataTypeArrayInt64 = FieldDataType{valuer.NewString("[]int64")}
FieldDataTypeArrayNumber = FieldDataType{valuer.NewString("[]number")} FieldDataTypeArrayNumber = FieldDataType{valuer.NewString("[]number")}
FieldDataTypeArrayObject = FieldDataType{valuer.NewString("[]object")}
FieldDataTypeArrayDynamic = FieldDataType{valuer.NewString("[]dynamic")}
// Map string representations to FieldDataType values // Map string representations to FieldDataType values
// We want to handle all the possible string representations of the data types. // We want to handle all the possible string representations of the data types.
// Even if the user uses some non-standard representation, we want to be able to // Even if the user uses some non-standard representation, we want to be able to

View File

@@ -1,6 +1,7 @@
package telemetrytypes package telemetrytypes
import ( import (
"reflect"
"testing" "testing"
) )
@@ -86,7 +87,7 @@ func TestGetFieldKeyFromKeyText(t *testing.T) {
for _, testCase := range testCases { for _, testCase := range testCases {
result := GetFieldKeyFromKeyText(testCase.keyText) result := GetFieldKeyFromKeyText(testCase.keyText)
if result != testCase.expected { if !reflect.DeepEqual(result, testCase.expected) {
t.Errorf("expected %v, got %v", testCase.expected, result) t.Errorf("expected %v, got %v", testCase.expected, result)
} }
} }

View File

@@ -0,0 +1,80 @@
package telemetrytypes
type JSONDataTypeIndex struct {
Type JSONDataType
ColumnExpression string
IndexExpression string
}
type JSONDataType struct {
str string // Store the correct case for ClickHouse
IsArray bool
ScalerType string
IndexSupported bool
}
// Override StringValue to return the correct case
func (jdt JSONDataType) StringValue() string {
return jdt.str
}
var (
String = JSONDataType{"String", false, "", true}
Int64 = JSONDataType{"Int64", false, "", true}
Float64 = JSONDataType{"Float64", false, "", true}
Bool = JSONDataType{"Bool", false, "", false}
Dynamic = JSONDataType{"Dynamic", false, "", false}
ArrayString = JSONDataType{"Array(Nullable(String))", true, "String", false}
ArrayInt64 = JSONDataType{"Array(Nullable(Int64))", true, "Int64", false}
ArrayFloat64 = JSONDataType{"Array(Nullable(Float64))", true, "Float64", false}
ArrayBool = JSONDataType{"Array(Nullable(Bool))", true, "Bool", false}
ArrayDynamic = JSONDataType{"Array(Dynamic)", true, "Dynamic", false}
ArrayJSON = JSONDataType{"Array(JSON)", true, "JSON", false}
)
var MappingStringToJSONDataType = map[string]JSONDataType{
"String": String,
"Int64": Int64,
"Float64": Float64,
"Bool": Bool,
"Dynamic": Dynamic,
"Array(Nullable(String))": ArrayString,
"Array(Nullable(Int64))": ArrayInt64,
"Array(Nullable(Float64))": ArrayFloat64,
"Array(Nullable(Bool))": ArrayBool,
"Array(Dynamic)": ArrayDynamic,
"Array(JSON)": ArrayJSON,
}
var ScalerTypeToArrayType = map[JSONDataType]JSONDataType{
String: ArrayString,
Int64: ArrayInt64,
Float64: ArrayFloat64,
Bool: ArrayBool,
Dynamic: ArrayDynamic,
}
var MappingFieldDataTypeToJSONDataType = map[FieldDataType]JSONDataType{
FieldDataTypeString: String,
FieldDataTypeInt64: Int64,
FieldDataTypeFloat64: Float64,
FieldDataTypeNumber: Float64,
FieldDataTypeBool: Bool,
FieldDataTypeArrayString: ArrayString,
FieldDataTypeArrayInt64: ArrayInt64,
FieldDataTypeArrayFloat64: ArrayFloat64,
FieldDataTypeArrayBool: ArrayBool,
}
var MappingJSONDataTypeToFieldDataType = map[JSONDataType]FieldDataType{
String: FieldDataTypeString,
Int64: FieldDataTypeInt64,
Float64: FieldDataTypeFloat64,
Bool: FieldDataTypeBool,
ArrayString: FieldDataTypeArrayString,
ArrayInt64: FieldDataTypeArrayInt64,
ArrayFloat64: FieldDataTypeArrayFloat64,
ArrayBool: FieldDataTypeArrayBool,
ArrayDynamic: FieldDataTypeArrayDynamic,
ArrayJSON: FieldDataTypeArrayObject,
}

View File

@@ -11,7 +11,7 @@ type ThirdPartyApiRequest struct {
ShowIp bool `json:"show_ip,omitempty"` ShowIp bool `json:"show_ip,omitempty"`
Domain string `json:"domain,omitempty"` Domain string `json:"domain,omitempty"`
Endpoint string `json:"endpoint,omitempty"` Endpoint string `json:"endpoint,omitempty"`
Filter *qbtypes.Filter `json:"filters,omitempty"` Filter *qbtypes.Filter `json:"filter,omitempty"`
GroupBy []qbtypes.GroupByKey `json:"groupBy,omitempty"` GroupBy []qbtypes.GroupByKey `json:"groupBy,omitempty"`
} }