Compare commits
20 Commits
SIG-8604
...
issue_3017
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41f720950d | ||
|
|
d9bce4a3c6 | ||
|
|
a5ac40c33c | ||
|
|
86b1366d4a | ||
|
|
eddb43a901 | ||
|
|
505cfe2314 | ||
|
|
6e54ee822a | ||
|
|
d88cb8aba4 | ||
|
|
b823b2a1e1 | ||
|
|
7cfb7118a3 | ||
|
|
59dfe7c0ed | ||
|
|
96b68b91c9 | ||
|
|
be6ce8d4f1 | ||
|
|
1fc58695c6 | ||
|
|
43450a187e | ||
|
|
44f00943a8 | ||
|
|
8867e1ef38 | ||
|
|
c08e520941 | ||
|
|
139cc4452d | ||
|
|
f4666d9c97 |
@@ -22,7 +22,6 @@ import { StatusCodes } from 'http-status-codes';
|
||||
import history from 'lib/history';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
@@ -374,26 +373,24 @@ function App(): JSX.Element {
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<PreferenceContextProvider>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</PreferenceContextProvider>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<PreferenceContextProvider>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</PreferenceContextProvider>
|
||||
</AppLayout>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
|
||||
1
frontend/src/auto-import-registry.d.ts
vendored
1
frontend/src/auto-import-registry.d.ts
vendored
@@ -14,7 +14,6 @@ import '@signozhq/badge';
|
||||
import '@signozhq/button';
|
||||
import '@signozhq/calendar';
|
||||
import '@signozhq/callout';
|
||||
import '@signozhq/checkbox';
|
||||
import '@signozhq/design-tokens';
|
||||
import '@signozhq/input';
|
||||
import '@signozhq/popover';
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
.overflow-input {
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.overflow-input-mirror {
|
||||
position: absolute;
|
||||
visibility: hidden;
|
||||
white-space: pre;
|
||||
pointer-events: none;
|
||||
font: inherit;
|
||||
letter-spacing: inherit;
|
||||
height: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
import { render, screen, userEvent, waitFor, within } from 'tests/test-utils';
|
||||
|
||||
import OverflowInputToolTip from './OverflowInputToolTip';
|
||||
|
||||
const TOOLTIP_INNER_SELECTOR = '.ant-tooltip-inner';
|
||||
// Utility to mock overflow behaviour on inputs / elements.
|
||||
// Stubs HTMLElement.prototype.clientWidth, scrollWidth and offsetWidth used by component.
|
||||
function mockOverflow(clientWidth: number, scrollWidth: number): void {
|
||||
Object.defineProperty(HTMLElement.prototype, 'clientWidth', {
|
||||
configurable: true,
|
||||
value: clientWidth,
|
||||
});
|
||||
Object.defineProperty(HTMLElement.prototype, 'scrollWidth', {
|
||||
configurable: true,
|
||||
value: scrollWidth,
|
||||
});
|
||||
// mirror.offsetWidth is used to compute mirrorWidth = offsetWidth + 24.
|
||||
// Use clientWidth so the mirror measurement aligns with the mocked client width in tests.
|
||||
Object.defineProperty(HTMLElement.prototype, 'offsetWidth', {
|
||||
configurable: true,
|
||||
value: clientWidth,
|
||||
});
|
||||
}
|
||||
|
||||
function queryTooltipInner(): HTMLElement | null {
|
||||
// find element that has role="tooltip" (could be the inner itself)
|
||||
const tooltip = document.querySelector<HTMLElement>('[role="tooltip"]');
|
||||
if (!tooltip) return document.querySelector(TOOLTIP_INNER_SELECTOR);
|
||||
|
||||
// if the role element is already the inner, return it; otherwise return its descendant
|
||||
if (tooltip.classList.contains('ant-tooltip-inner')) return tooltip;
|
||||
return (
|
||||
(tooltip.querySelector(TOOLTIP_INNER_SELECTOR) as HTMLElement) ??
|
||||
document.querySelector(TOOLTIP_INNER_SELECTOR)
|
||||
);
|
||||
}
|
||||
|
||||
describe('OverflowInputToolTip', () => {
|
||||
beforeEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
test('shows tooltip when content overflows and input is clamped at maxAutoWidth', async () => {
|
||||
mockOverflow(150, 250); // clientWidth >= maxAutoWidth (150), scrollWidth > clientWidth
|
||||
|
||||
render(<OverflowInputToolTip value="Very long overflowing text" />);
|
||||
|
||||
await userEvent.hover(screen.getByRole('textbox'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(queryTooltipInner()).not.toBeNull();
|
||||
});
|
||||
|
||||
const tooltipInner = queryTooltipInner();
|
||||
if (!tooltipInner) throw new Error('Tooltip inner not found');
|
||||
expect(
|
||||
within(tooltipInner).getByText('Very long overflowing text'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('does NOT show tooltip when content does not overflow', async () => {
|
||||
mockOverflow(150, 100); // content fits (scrollWidth <= clientWidth)
|
||||
|
||||
render(<OverflowInputToolTip value="Short text" />);
|
||||
|
||||
await userEvent.hover(screen.getByRole('textbox'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(queryTooltipInner()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
test('does NOT show tooltip when content overflows but input is NOT at maxAutoWidth', async () => {
|
||||
mockOverflow(100, 250); // clientWidth < maxAutoWidth (150), scrollWidth > clientWidth
|
||||
|
||||
render(<OverflowInputToolTip value="Long but input not clamped" />);
|
||||
|
||||
await userEvent.hover(screen.getByRole('textbox'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(queryTooltipInner()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
test('uncontrolled input allows typing', async () => {
|
||||
render(<OverflowInputToolTip defaultValue="Init" />);
|
||||
|
||||
const input = screen.getByRole('textbox') as HTMLInputElement;
|
||||
await userEvent.type(input, 'ABC');
|
||||
|
||||
expect(input).toHaveValue('InitABC');
|
||||
});
|
||||
|
||||
test('disabled input never shows tooltip even if overflowing', async () => {
|
||||
mockOverflow(150, 300);
|
||||
|
||||
render(<OverflowInputToolTip value="Overflowing!" disabled />);
|
||||
|
||||
await userEvent.hover(screen.getByRole('textbox'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(queryTooltipInner()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
test('renders mirror span and input correctly (structural assertions instead of snapshot)', () => {
|
||||
const { container } = render(<OverflowInputToolTip value="Snapshot" />);
|
||||
const mirror = container.querySelector('.overflow-input-mirror');
|
||||
const input = container.querySelector('input') as HTMLInputElement | null;
|
||||
|
||||
expect(mirror).toBeTruthy();
|
||||
expect(mirror?.textContent).toBe('Snapshot');
|
||||
expect(input).toBeTruthy();
|
||||
expect(input?.value).toBe('Snapshot');
|
||||
|
||||
// width should be set inline (component calculates width on mount)
|
||||
expect(input?.getAttribute('style')).toContain('width:');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,73 @@
|
||||
/* eslint-disable react/require-default-props */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
|
||||
import './OverflowInputToolTip.scss';
|
||||
|
||||
import { Input, InputProps, InputRef, Tooltip } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
|
||||
export interface OverflowTooltipInputProps extends InputProps {
|
||||
tooltipPlacement?: 'top' | 'bottom' | 'left' | 'right';
|
||||
minAutoWidth?: number;
|
||||
maxAutoWidth?: number;
|
||||
}
|
||||
|
||||
function OverflowInputToolTip({
|
||||
value,
|
||||
defaultValue,
|
||||
onChange,
|
||||
disabled = false,
|
||||
tooltipPlacement = 'top',
|
||||
className,
|
||||
minAutoWidth = 70,
|
||||
maxAutoWidth = 150,
|
||||
...rest
|
||||
}: OverflowTooltipInputProps): JSX.Element {
|
||||
const inputRef = useRef<InputRef>(null);
|
||||
const mirrorRef = useRef<HTMLSpanElement | null>(null);
|
||||
const [isOverflowing, setIsOverflowing] = useState<boolean>(false);
|
||||
|
||||
useEffect(() => {
|
||||
const input = inputRef.current?.input;
|
||||
const mirror = mirrorRef.current;
|
||||
if (!input || !mirror) {
|
||||
setIsOverflowing(false);
|
||||
return;
|
||||
}
|
||||
|
||||
mirror.textContent = String(value ?? '') || ' ';
|
||||
const mirrorWidth = mirror.offsetWidth + 24;
|
||||
const newWidth = Math.min(maxAutoWidth, Math.max(minAutoWidth, mirrorWidth));
|
||||
input.style.width = `${newWidth}px`;
|
||||
|
||||
// consider clamped when mirrorWidth reaches maxAutoWidth (allow -5px tolerance)
|
||||
const isClamped = mirrorWidth >= maxAutoWidth - 5;
|
||||
const overflow = input.scrollWidth > input.clientWidth && isClamped;
|
||||
|
||||
setIsOverflowing(overflow);
|
||||
}, [value, disabled, minAutoWidth, maxAutoWidth]);
|
||||
|
||||
const tooltipTitle = !disabled && isOverflowing ? String(value ?? '') : '';
|
||||
|
||||
return (
|
||||
<>
|
||||
<span ref={mirrorRef} aria-hidden className="overflow-input-mirror" />
|
||||
<Tooltip title={tooltipTitle} placement={tooltipPlacement}>
|
||||
<Input
|
||||
{...rest}
|
||||
value={value}
|
||||
defaultValue={defaultValue}
|
||||
onChange={onChange}
|
||||
disabled={disabled}
|
||||
ref={inputRef}
|
||||
className={cx('overflow-input', className)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
OverflowInputToolTip.displayName = 'OverflowInputToolTip';
|
||||
|
||||
export default OverflowInputToolTip;
|
||||
3
frontend/src/components/OverflowInputToolTip/index.tsx
Normal file
3
frontend/src/components/OverflowInputToolTip/index.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
import OverflowInputToolTip from './OverflowInputToolTip';
|
||||
|
||||
export default OverflowInputToolTip;
|
||||
@@ -1,6 +1,8 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { Button, Flex, Input, Select } from 'antd';
|
||||
|
||||
import { Button, Flex, Select } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import OverflowInputToolTip from 'components/OverflowInputToolTip';
|
||||
import {
|
||||
logsQueryFunctionOptions,
|
||||
metricQueryFunctionOptions,
|
||||
@@ -9,6 +11,7 @@ import {
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { debounce, isNil } from 'lodash-es';
|
||||
import { X } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryFunction } from 'types/api/v5/queryRange';
|
||||
import { DataSource, QueryFunctionsTypes } from 'types/common/queryBuilder';
|
||||
@@ -47,9 +50,13 @@ export default function Function({
|
||||
functionValue = funcData.args?.[0]?.value;
|
||||
}
|
||||
|
||||
const debouncedhandleUpdateFunctionArgs = debounce(
|
||||
handleUpdateFunctionArgs,
|
||||
500,
|
||||
const [value, setValue] = useState<string>(
|
||||
functionValue !== undefined ? String(functionValue) : '',
|
||||
);
|
||||
|
||||
const debouncedhandleUpdateFunctionArgs = useMemo(
|
||||
() => debounce(handleUpdateFunctionArgs, 500),
|
||||
[handleUpdateFunctionArgs],
|
||||
);
|
||||
|
||||
// update the logic when we start supporting functions for traces
|
||||
@@ -89,13 +96,18 @@ export default function Function({
|
||||
/>
|
||||
|
||||
{showInput && (
|
||||
<Input
|
||||
className="query-function-value"
|
||||
<OverflowInputToolTip
|
||||
autoFocus
|
||||
defaultValue={functionValue}
|
||||
value={value}
|
||||
onChange={(event): void => {
|
||||
const newVal = event.target.value;
|
||||
setValue(newVal);
|
||||
debouncedhandleUpdateFunctionArgs(funcData, index, event.target.value);
|
||||
}}
|
||||
tooltipPlacement="top"
|
||||
minAutoWidth={70}
|
||||
maxAutoWidth={150}
|
||||
className="query-function-value"
|
||||
/>
|
||||
)}
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@
|
||||
}
|
||||
|
||||
.query-function-value {
|
||||
width: 55px;
|
||||
width: 70px;
|
||||
border-left: 0;
|
||||
background: var(--bg-ink-200);
|
||||
border-radius: 0;
|
||||
|
||||
@@ -13,12 +13,6 @@
|
||||
.nav-item-active-marker {
|
||||
background: #4e74f8;
|
||||
}
|
||||
|
||||
.nav-item-data {
|
||||
.nav-item-label {
|
||||
color: var(--Vanilla-100, #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&.disabled {
|
||||
@@ -126,12 +120,6 @@
|
||||
.nav-item-active-marker {
|
||||
background: #4e74f8;
|
||||
}
|
||||
|
||||
.nav-item-data {
|
||||
.nav-item-label {
|
||||
color: var(--bg-slate-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&:hover {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import './NavItem.styles.scss';
|
||||
|
||||
import { Tag, Tooltip } from 'antd';
|
||||
import { Tag } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { Pin, PinOff } from 'lucide-react';
|
||||
|
||||
@@ -74,25 +74,21 @@ export default function NavItem({
|
||||
)}
|
||||
|
||||
{onTogglePin && !isPinned && (
|
||||
<Tooltip title="Add to shortcuts" placement="right">
|
||||
<Pin
|
||||
size={12}
|
||||
className="nav-item-pin-icon"
|
||||
onClick={handleTogglePinClick}
|
||||
color="var(--Vanilla-400, #c0c1c3)"
|
||||
/>
|
||||
</Tooltip>
|
||||
<Pin
|
||||
size={12}
|
||||
className="nav-item-pin-icon"
|
||||
onClick={handleTogglePinClick}
|
||||
color="var(--Vanilla-400, #c0c1c3)"
|
||||
/>
|
||||
)}
|
||||
|
||||
{onTogglePin && isPinned && (
|
||||
<Tooltip title="Remove from shortcuts" placement="right">
|
||||
<PinOff
|
||||
size={12}
|
||||
className="nav-item-pin-icon"
|
||||
onClick={handleTogglePinClick}
|
||||
color="var(--Vanilla-400, #c0c1c3)"
|
||||
/>
|
||||
</Tooltip>
|
||||
<PinOff
|
||||
size={12}
|
||||
className="nav-item-pin-icon"
|
||||
onClick={handleTogglePinClick}
|
||||
color="var(--Vanilla-400, #c0c1c3)"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
.brand-container {
|
||||
padding: 8px 18px;
|
||||
max-width: 100%;
|
||||
background: linear-gradient(0deg, rgba(11, 12, 14, 0) 0%, #0b0c0e 27%);
|
||||
}
|
||||
|
||||
.brand-company-meta {
|
||||
@@ -35,7 +34,6 @@
|
||||
.brand {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
@@ -165,6 +163,7 @@
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 8px;
|
||||
margin-left: 2px;
|
||||
gap: 8px;
|
||||
|
||||
width: 100%;
|
||||
@@ -175,31 +174,6 @@
|
||||
background: var(--Slate-500, #161922);
|
||||
|
||||
box-shadow: none !important;
|
||||
|
||||
color: var(--bg-vanilla-400, #c0c1c3);
|
||||
|
||||
svg {
|
||||
color: var(--bg-vanilla-400, #c0c1c3);
|
||||
}
|
||||
|
||||
.nav-item-label {
|
||||
color: var(--bg-vanilla-400, #c0c1c3);
|
||||
}
|
||||
|
||||
&:hover:not(:disabled) {
|
||||
background: var(--bg-slate-400, #1d212d);
|
||||
border-color: var(--bg-slate-400, #1d212d);
|
||||
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
|
||||
svg {
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
}
|
||||
|
||||
.nav-item-label {
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -267,7 +241,7 @@
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
padding: 0 18px;
|
||||
padding: 0 20px;
|
||||
|
||||
.nav-section-title-text {
|
||||
display: none;
|
||||
@@ -281,11 +255,6 @@
|
||||
display: none;
|
||||
cursor: pointer;
|
||||
margin-left: auto;
|
||||
transition: color 0.2s;
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,7 +276,7 @@
|
||||
line-height: 14px; /* 150% */
|
||||
letter-spacing: 0.4px;
|
||||
|
||||
padding: 6px 20px;
|
||||
padding: 0 20px;
|
||||
opacity: 0.6;
|
||||
|
||||
display: none;
|
||||
@@ -463,33 +432,10 @@
|
||||
.nav-wrapper {
|
||||
.nav-top-section {
|
||||
.shortcut-nav-items {
|
||||
.nav-section-title {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
padding: 0;
|
||||
|
||||
.nav-section-title-text {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.nav-section-title-icon.reorder {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.nav-section-title,
|
||||
.nav-section-subtitle {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.nav-items-section {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.nav-title-section {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
gap: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,40 +494,7 @@
|
||||
background: var(--bg-slate-300);
|
||||
}
|
||||
|
||||
&:not(.pinned) {
|
||||
.nav-item {
|
||||
.nav-item-data {
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
.shortcut-nav-items,
|
||||
.more-nav-items {
|
||||
.nav-section-title {
|
||||
padding: 0 22px !important;
|
||||
}
|
||||
}
|
||||
|
||||
:hover,
|
||||
&.dropdown-open {
|
||||
.nav-item {
|
||||
.nav-item-data {
|
||||
flex-grow: 1;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
}
|
||||
|
||||
.shortcut-nav-items,
|
||||
.more-nav-items {
|
||||
.nav-section-title {
|
||||
padding: 0 18px !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&:not(.pinned):hover,
|
||||
&.dropdown-open {
|
||||
&:hover {
|
||||
flex: 0 0 240px;
|
||||
max-width: 240px;
|
||||
min-width: 240px;
|
||||
@@ -620,11 +533,6 @@
|
||||
.nav-section-title-icon {
|
||||
&.reorder {
|
||||
display: flex;
|
||||
transition: color 0.2s;
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -784,11 +692,6 @@
|
||||
.nav-section-title-icon {
|
||||
&.reorder {
|
||||
display: flex;
|
||||
transition: color 0.2s;
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -961,12 +864,6 @@
|
||||
line-height: normal;
|
||||
letter-spacing: 0.14px;
|
||||
}
|
||||
|
||||
&:hover:not(.ant-dropdown-menu-item-disabled) {
|
||||
.ant-dropdown-menu-title-content {
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1006,10 +903,6 @@
|
||||
.ant-dropdown-menu-item-disabled {
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.ant-dropdown-menu {
|
||||
width: 100% !important; // todo:sagar check with shuvam once
|
||||
}
|
||||
}
|
||||
|
||||
.settings-dropdown,
|
||||
@@ -1019,12 +912,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.secondary-nav-items {
|
||||
.nav-item-data {
|
||||
margin-left: 10px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.reorder-shortcut-nav-items-modal {
|
||||
width: 384px !important;
|
||||
|
||||
@@ -1141,6 +1028,7 @@
|
||||
display: flex;
|
||||
align-items: center;
|
||||
border-radius: 2px;
|
||||
border-radius: 2px;
|
||||
background: var(--Robin-500, #4e74f8) !important;
|
||||
color: var(--bg-vanilla-100) !important;
|
||||
font-family: Inter;
|
||||
@@ -1176,10 +1064,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.help-support-dropdown li.ant-dropdown-menu-item-divider {
|
||||
background-color: var(--Slate-500, #161922) !important;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.sideNav {
|
||||
background: var(--bg-vanilla-100);
|
||||
@@ -1211,32 +1095,8 @@
|
||||
.get-started-nav-items {
|
||||
.get-started-btn {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-200);
|
||||
color: var(--bg-slate-50, #62687c);
|
||||
|
||||
svg {
|
||||
color: var(--bg-slate-50, #62687c);
|
||||
}
|
||||
|
||||
.nav-item-label {
|
||||
color: var(--bg-ink-400, #62687c);
|
||||
}
|
||||
|
||||
// Hover state (light mode)
|
||||
&:hover:not(:disabled) {
|
||||
background: var(--bg-vanilla-300);
|
||||
border-color: var(--bg-vanilla-300);
|
||||
|
||||
color: var(--bg-slate-500, #161922);
|
||||
|
||||
svg {
|
||||
color: var(--bg-slate-500, #161922);
|
||||
}
|
||||
|
||||
.nav-item-label {
|
||||
color: var(--bg-slate-500, #161922);
|
||||
}
|
||||
}
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1248,25 +1108,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
.brand-container {
|
||||
background: linear-gradient(0deg, rgba(255, 255, 255, 0) 0%, #fff 27%);
|
||||
}
|
||||
|
||||
.nav-wrapper {
|
||||
.nav-top-section {
|
||||
.shortcut-nav-items {
|
||||
.nav-section-title {
|
||||
.nav-section-title-icon {
|
||||
&.reorder {
|
||||
&:hover {
|
||||
color: var(--bg-slate-400, #1d212d);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.secondary-nav-items {
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
@@ -1281,43 +1123,8 @@
|
||||
}
|
||||
}
|
||||
|
||||
&.pinned {
|
||||
.nav-wrapper {
|
||||
.nav-top-section {
|
||||
.shortcut-nav-items {
|
||||
.nav-section-title {
|
||||
.nav-section-title-icon {
|
||||
&.reorder {
|
||||
&:hover {
|
||||
color: var(--bg-slate-400, #1d212d);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&:not(.pinned):hover,
|
||||
&.dropdown-open {
|
||||
&:hover {
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
.nav-wrapper {
|
||||
.nav-top-section {
|
||||
.shortcut-nav-items {
|
||||
.nav-section-title {
|
||||
.nav-section-title-icon {
|
||||
&.reorder {
|
||||
&:hover {
|
||||
color: var(--bg-slate-400, #1d212d);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1327,12 +1134,6 @@
|
||||
.ant-dropdown-menu-title-content {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
&:hover:not(.ant-dropdown-menu-item-disabled) {
|
||||
.ant-dropdown-menu-title-content {
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1409,10 +1210,6 @@
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.help-support-dropdown li.ant-dropdown-menu-item-divider {
|
||||
background-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.version-tooltip-overlay {
|
||||
|
||||
@@ -155,15 +155,18 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
DefaultHelpSupportDropdownMenuItems,
|
||||
);
|
||||
|
||||
const [pinnedMenuItems, setPinnedMenuItems] = useState<SidebarItem[]>([]);
|
||||
|
||||
const [tempPinnedMenuItems, setTempPinnedMenuItems] = useState<SidebarItem[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
const [secondaryMenuItems, setSecondaryMenuItems] = useState<SidebarItem[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
const [hasScroll, setHasScroll] = useState(false);
|
||||
const navTopSectionRef = useRef<HTMLDivElement>(null);
|
||||
const [isDropdownOpen, setIsDropdownOpen] = useState(false);
|
||||
const prevSidebarOpenRef = useRef<boolean>(isPinned);
|
||||
const userManuallyCollapsedRef = useRef<boolean>(false);
|
||||
|
||||
const checkScroll = useCallback((): void => {
|
||||
if (navTopSectionRef.current) {
|
||||
@@ -212,72 +215,63 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
const isAdmin = user.role === USER_ROLES.ADMIN;
|
||||
const isEditor = user.role === USER_ROLES.EDITOR;
|
||||
|
||||
// Compute initial pinned items and secondary menu items synchronously to avoid flash
|
||||
const computedPinnedMenuItems = useMemo(() => {
|
||||
const navShortcutsPreference = userPreferences?.find(
|
||||
useEffect(() => {
|
||||
const navShortcuts = (userPreferences?.find(
|
||||
(preference) => preference.name === USER_PREFERENCES.NAV_SHORTCUTS,
|
||||
);
|
||||
const navShortcuts = (navShortcutsPreference?.value as unknown) as
|
||||
| string[]
|
||||
| undefined;
|
||||
)?.value as unknown) as string[];
|
||||
|
||||
// Check if preference exists (user has set it before, even if empty)
|
||||
const preferenceExists = navShortcutsPreference !== undefined;
|
||||
const shouldShowIntegrations =
|
||||
(isCloudUser || isEnterpriseSelfHostedUser) && (isAdmin || isEditor);
|
||||
|
||||
if (preferenceExists && isArray(navShortcuts)) {
|
||||
return navShortcuts
|
||||
if (navShortcuts && isArray(navShortcuts) && navShortcuts.length > 0) {
|
||||
// nav shortcuts is array of strings
|
||||
const pinnedItems = navShortcuts
|
||||
.map((shortcut) =>
|
||||
defaultMoreMenuItems.find((item) => item.itemKey === shortcut),
|
||||
)
|
||||
.filter((item): item is SidebarItem => item !== undefined);
|
||||
|
||||
// Set pinned items in the order they were stored
|
||||
setPinnedMenuItems(pinnedItems);
|
||||
|
||||
setSecondaryMenuItems(
|
||||
defaultMoreMenuItems.map((item) => ({
|
||||
...item,
|
||||
isPinned: pinnedItems.some((pinned) => pinned.itemKey === item.itemKey),
|
||||
isEnabled:
|
||||
item.key === ROUTES.INTEGRATIONS
|
||||
? shouldShowIntegrations
|
||||
: item.isEnabled,
|
||||
})),
|
||||
);
|
||||
} else {
|
||||
// Set default pinned items
|
||||
const defaultPinnedItems = defaultMoreMenuItems.filter(
|
||||
(item) => item.isPinned,
|
||||
);
|
||||
setPinnedMenuItems(defaultPinnedItems);
|
||||
|
||||
setSecondaryMenuItems(
|
||||
defaultMoreMenuItems.map((item) => ({
|
||||
...item,
|
||||
isPinned: defaultPinnedItems.some(
|
||||
(pinned) => pinned.itemKey === item.itemKey,
|
||||
),
|
||||
isEnabled:
|
||||
item.key === ROUTES.INTEGRATIONS
|
||||
? shouldShowIntegrations
|
||||
: item.isEnabled,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
// Preference doesn't exist or userPreferences not loaded yet
|
||||
// If userPreferences is null, return empty to avoid showing defaults before preferences load
|
||||
if (userPreferences === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Preference doesn't exist - use defaults for first-time users
|
||||
return defaultMoreMenuItems.filter((item) => item.isPinned);
|
||||
}, [userPreferences]);
|
||||
|
||||
const computedSecondaryMenuItems = useMemo(() => {
|
||||
const shouldShowIntegrationsValue =
|
||||
(isCloudUser || isEnterpriseSelfHostedUser) && (isAdmin || isEditor);
|
||||
|
||||
return defaultMoreMenuItems.map((item) => ({
|
||||
...item,
|
||||
isPinned: computedPinnedMenuItems.some(
|
||||
(pinned) => pinned.itemKey === item.itemKey,
|
||||
),
|
||||
isEnabled:
|
||||
item.key === ROUTES.INTEGRATIONS
|
||||
? shouldShowIntegrationsValue
|
||||
: item.isEnabled,
|
||||
}));
|
||||
}, [
|
||||
computedPinnedMenuItems,
|
||||
userPreferences,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isAdmin,
|
||||
isEditor,
|
||||
]);
|
||||
|
||||
const [pinnedMenuItems, setPinnedMenuItems] = useState<SidebarItem[]>(
|
||||
computedPinnedMenuItems,
|
||||
);
|
||||
const [secondaryMenuItems, setSecondaryMenuItems] = useState<SidebarItem[]>(
|
||||
computedSecondaryMenuItems,
|
||||
);
|
||||
|
||||
// Sync state when computed values change (when userPreferences loads or updates)
|
||||
// This ensures we respect user preferences without showing defaults first
|
||||
useEffect(() => {
|
||||
setPinnedMenuItems(computedPinnedMenuItems);
|
||||
setSecondaryMenuItems(computedSecondaryMenuItems);
|
||||
}, [computedPinnedMenuItems, computedSecondaryMenuItems]);
|
||||
|
||||
const isOnboardingV3Enabled = featureFlags?.find(
|
||||
(flag) => flag.name === FeatureKeys.ONBOARDING_V3,
|
||||
)?.active;
|
||||
@@ -305,8 +299,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
setShowVersionUpdateNotification,
|
||||
] = useState(false);
|
||||
|
||||
const [isMoreMenuCollapsed, setIsMoreMenuCollapsed] = useState(!isPinned);
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
const [isMoreMenuCollapsed, setIsMoreMenuCollapsed] = useState(false);
|
||||
|
||||
const [
|
||||
isReorderShortcutNavItemsModalOpen,
|
||||
@@ -332,17 +325,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
.map((item) => item.itemKey)
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
// Update context immediately (optimistically) so computed values reflect the change
|
||||
updateUserPreferenceInContext({
|
||||
name: USER_PREFERENCES.NAV_SHORTCUTS,
|
||||
description: USER_PREFERENCES.NAV_SHORTCUTS,
|
||||
valueType: 'array',
|
||||
defaultValue: false,
|
||||
allowedValues: [],
|
||||
allowedScopes: ['user'],
|
||||
value: navShortcuts,
|
||||
});
|
||||
|
||||
updateUserPreferenceMutation(
|
||||
{
|
||||
name: USER_PREFERENCES.NAV_SHORTCUTS,
|
||||
@@ -351,7 +333,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
{
|
||||
onSuccess: (response) => {
|
||||
if (response.data) {
|
||||
// Update context again on success to ensure consistency
|
||||
updateUserPreferenceInContext({
|
||||
name: USER_PREFERENCES.NAV_SHORTCUTS,
|
||||
description: USER_PREFERENCES.NAV_SHORTCUTS,
|
||||
@@ -385,13 +366,13 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
if (isCurrentlyPinned) {
|
||||
return prevItems.filter((i) => i.key !== item.key);
|
||||
}
|
||||
return [...prevItems, item];
|
||||
return [item, ...prevItems];
|
||||
});
|
||||
|
||||
// Get the updated pinned menu items for preference update
|
||||
const updatedPinnedItems = pinnedMenuItems.some((i) => i.key === item.key)
|
||||
? pinnedMenuItems.filter((i) => i.key !== item.key)
|
||||
: [...pinnedMenuItems, item];
|
||||
: [item, ...pinnedMenuItems];
|
||||
|
||||
// Update user preference with the ordered list of item keys
|
||||
updateNavShortcutsPreference(updatedPinnedItems);
|
||||
@@ -423,21 +404,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
}
|
||||
}, [isReorderShortcutNavItemsModalOpen, pinnedMenuItems]);
|
||||
|
||||
useEffect(() => {
|
||||
const isSidebarOpen = isPinned || isHovered;
|
||||
const wasSidebarOpen = prevSidebarOpenRef.current;
|
||||
|
||||
if (!isSidebarOpen) {
|
||||
// Sidebar is collapsed - always collapse more menu and reset manual collapse flag
|
||||
setIsMoreMenuCollapsed(true);
|
||||
userManuallyCollapsedRef.current = false;
|
||||
} else if (!wasSidebarOpen && !userManuallyCollapsedRef.current) {
|
||||
// Sidebar just opened (transitioned from collapsed) - auto-expand only if user didn't manually collapse
|
||||
setIsMoreMenuCollapsed(false);
|
||||
}
|
||||
prevSidebarOpenRef.current = isSidebarOpen;
|
||||
}, [isPinned, isHovered]);
|
||||
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
const isWorkspaceBlocked = trialInfo?.workSpaceBlock || false;
|
||||
@@ -626,7 +592,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
},
|
||||
{
|
||||
type: 'group',
|
||||
label: "WHAT'S NEW",
|
||||
label: "WHAT's NEW",
|
||||
},
|
||||
...dropdownItems,
|
||||
{
|
||||
@@ -884,15 +850,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
|
||||
return (
|
||||
<div className={cx('sidenav-container', isPinned && 'pinned')}>
|
||||
<div
|
||||
className={cx(
|
||||
'sideNav',
|
||||
isPinned && 'pinned',
|
||||
isDropdownOpen && 'dropdown-open',
|
||||
)}
|
||||
onMouseEnter={(): void => setIsHovered(true)}
|
||||
onMouseLeave={(): void => setIsHovered(false)}
|
||||
>
|
||||
<div className={cx('sideNav', isPinned && 'pinned')}>
|
||||
<div className="brand-container">
|
||||
<div className="brand">
|
||||
<div className="brand-company-meta">
|
||||
@@ -990,48 +948,44 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
{renderNavItems(primaryMenuItems)}
|
||||
</div>
|
||||
|
||||
{(pinnedMenuItems.length > 0 || isPinned || isHovered) && (
|
||||
<div className="shortcut-nav-items">
|
||||
<div className="nav-title-section">
|
||||
<div className="nav-section-title">
|
||||
<div className="nav-section-title-icon">
|
||||
<MousePointerClick size={16} />
|
||||
</div>
|
||||
|
||||
<div className="nav-section-title-text">SHORTCUTS</div>
|
||||
|
||||
{pinnedMenuItems.length > 1 && (
|
||||
<Tooltip title="Manage shortcuts" placement="right">
|
||||
<div
|
||||
className="nav-section-title-icon reorder"
|
||||
onClick={(): void => {
|
||||
logEvent('Sidebar V2: Manage shortcuts clicked', {});
|
||||
setIsReorderShortcutNavItemsModalOpen(true);
|
||||
}}
|
||||
>
|
||||
<Logs size={16} />
|
||||
</div>
|
||||
</Tooltip>
|
||||
)}
|
||||
<div className="shortcut-nav-items">
|
||||
<div className="nav-title-section">
|
||||
<div className="nav-section-title">
|
||||
<div className="nav-section-title-icon">
|
||||
<MousePointerClick size={16} />
|
||||
</div>
|
||||
|
||||
{pinnedMenuItems.length === 0 && (
|
||||
<div className="nav-section-subtitle">
|
||||
You have not added any shortcuts yet.
|
||||
</div>
|
||||
)}
|
||||
<div className="nav-section-title-text">SHORTCUTS</div>
|
||||
|
||||
{pinnedMenuItems.length > 0 && (
|
||||
<div className="nav-items-section">
|
||||
{renderNavItems(
|
||||
pinnedMenuItems.filter((item) => item.isEnabled),
|
||||
true,
|
||||
)}
|
||||
{pinnedMenuItems.length > 1 && (
|
||||
<div
|
||||
className="nav-section-title-icon reorder"
|
||||
onClick={(): void => {
|
||||
logEvent('Sidebar V2: Manage shortcuts clicked', {});
|
||||
setIsReorderShortcutNavItemsModalOpen(true);
|
||||
}}
|
||||
>
|
||||
<Logs size={16} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{pinnedMenuItems.length === 0 && (
|
||||
<div className="nav-section-subtitle">
|
||||
You have not added any shortcuts yet.
|
||||
</div>
|
||||
)}
|
||||
|
||||
{pinnedMenuItems.length > 0 && (
|
||||
<div className="nav-items-section">
|
||||
{renderNavItems(
|
||||
pinnedMenuItems.filter((item) => item.isEnabled),
|
||||
true,
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{moreMenuItems.length > 0 && (
|
||||
<div
|
||||
@@ -1044,21 +998,10 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
<div
|
||||
className="nav-section-title"
|
||||
onClick={(): void => {
|
||||
// Only allow toggling when sidebar is pinned or hovered
|
||||
if (!isPinned && !isHovered) {
|
||||
return;
|
||||
}
|
||||
const newCollapsedState = !isMoreMenuCollapsed;
|
||||
logEvent('Sidebar V2: More menu clicked', {
|
||||
action: isMoreMenuCollapsed ? 'expand' : 'collapse',
|
||||
});
|
||||
setIsMoreMenuCollapsed(newCollapsedState);
|
||||
// Track if user manually collapsed it
|
||||
if (newCollapsedState) {
|
||||
userManuallyCollapsedRef.current = true;
|
||||
} else {
|
||||
userManuallyCollapsedRef.current = false;
|
||||
}
|
||||
setIsMoreMenuCollapsed(!isMoreMenuCollapsed);
|
||||
}}
|
||||
>
|
||||
<div className="nav-section-title-icon">
|
||||
@@ -1108,7 +1051,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
placement="topLeft"
|
||||
overlayClassName="nav-dropdown-overlay help-support-dropdown"
|
||||
trigger={['click']}
|
||||
onOpenChange={(open): void => setIsDropdownOpen(open)}
|
||||
>
|
||||
<div className="nav-item">
|
||||
<div className="nav-item-data" data-testid="help-support-nav-item">
|
||||
@@ -1129,7 +1071,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
placement="topLeft"
|
||||
overlayClassName="nav-dropdown-overlay settings-dropdown"
|
||||
trigger={['click']}
|
||||
onOpenChange={(open): void => setIsDropdownOpen(open)}
|
||||
>
|
||||
<div className="nav-item">
|
||||
<div className="nav-item-data" data-testid="settings-nav-item">
|
||||
|
||||
@@ -1,3 +1,13 @@
|
||||
import AlertRuleProvider from 'providers/Alert';
|
||||
|
||||
import AlertDetails from './AlertDetails';
|
||||
|
||||
export default AlertDetails;
|
||||
function AlertDetailsPage(): JSX.Element {
|
||||
return (
|
||||
<AlertRuleProvider>
|
||||
<AlertDetails />
|
||||
</AlertRuleProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default AlertDetailsPage;
|
||||
|
||||
@@ -33,19 +33,6 @@
|
||||
height: calc(100vh - 48px);
|
||||
border-right: 1px solid var(--Slate-500, #161922);
|
||||
background: var(--Ink-500, #0b0c0e);
|
||||
|
||||
.nav-item {
|
||||
.nav-item-data {
|
||||
max-width: none;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
&.active {
|
||||
.nav-item-data .nav-item-label {
|
||||
color: var(--bg-vanilla-100, #fff);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.settings-page-content {
|
||||
@@ -94,14 +81,6 @@
|
||||
.settings-page-sidenav {
|
||||
border-right: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
.nav-item {
|
||||
&.active {
|
||||
.nav-item-data .nav-item-label {
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.settings-page-content {
|
||||
|
||||
@@ -12,7 +12,7 @@ import { SidebarItem } from 'container/SideNav/sideNav.types';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import history from 'lib/history';
|
||||
import { Cog } from 'lucide-react';
|
||||
import { Wrench } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -236,7 +236,7 @@ function SettingsPage(): JSX.Element {
|
||||
className="settings-page-header-title"
|
||||
data-testid="settings-page-title"
|
||||
>
|
||||
<Cog size={16} />
|
||||
<Wrench size={16} />
|
||||
Settings
|
||||
</div>
|
||||
</header>
|
||||
|
||||
6
pkg/cache/memorycache/provider.go
vendored
6
pkg/cache/memorycache/provider.go
vendored
@@ -105,6 +105,12 @@ func (provider *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey s
|
||||
return err
|
||||
}
|
||||
|
||||
// To make sure ristretto does not go into no-op
|
||||
if ttl < 0 {
|
||||
provider.settings.Logger().WarnContext(ctx, "ttl is less than 0, setting it to 0")
|
||||
ttl = 0
|
||||
}
|
||||
|
||||
if cloneable, ok := data.(cachetypes.Cloneable); ok {
|
||||
span.SetAttributes(attribute.Bool("memory.cloneable", true))
|
||||
span.SetAttributes(attribute.Int64("memory.cost", 1))
|
||||
|
||||
@@ -137,3 +137,27 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
render.Success(rw, http.StatusOK, metadata)
|
||||
}
|
||||
|
||||
func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var in metricsexplorertypes.MetricAttributesRequest
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
out, err := h.module.GetMetricAttributes(req.Context(), orgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, out)
|
||||
}
|
||||
|
||||
@@ -190,6 +190,22 @@ func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, re
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
attributes, err := m.fetchMetricAttributes(ctx, req.MetricName, req.Start, req.End)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &metricsexplorertypes.MetricAttributesResponse{
|
||||
Attributes: attributes,
|
||||
TotalKeys: int64(len(attributes)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *module) fetchMetadataFromCache(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, []string) {
|
||||
hits := make(map[string]*metricsexplorertypes.MetricMetadata)
|
||||
misses := make([]string, 0)
|
||||
@@ -771,3 +787,53 @@ func (m *module) computeSamplesTreemap(ctx context.Context, req *metricsexplorer
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
func (m *module) fetchMetricAttributes(ctx context.Context, metricName string, start, end *int64) ([]metricsexplorertypes.MetricAttribute, error) {
|
||||
// Build query using sqlbuilder
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(
|
||||
"attr_name AS key",
|
||||
"groupUniqArray(1000)(attr_string_value) AS values",
|
||||
"uniq(attr_string_value) AS valueCount",
|
||||
)
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
|
||||
sb.Where(sb.E("metric_name", metricName))
|
||||
sb.Where("NOT startsWith(attr_name, '__')")
|
||||
|
||||
// Add time range filtering if provided
|
||||
if start != nil {
|
||||
// Filter by start time: attributes that were active at or after start time
|
||||
sb.Where(sb.GE("last_reported_unix_milli", *start))
|
||||
}
|
||||
if end != nil {
|
||||
// Filter by end time: attributes that were active at or before end time
|
||||
sb.Where(sb.LE("first_reported_unix_milli", *end))
|
||||
}
|
||||
|
||||
sb.GroupBy("attr_name")
|
||||
sb.OrderBy("valueCount DESC")
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch metric attributes")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
attributes := make([]metricsexplorertypes.MetricAttribute, 0)
|
||||
for rows.Next() {
|
||||
var attr metricsexplorertypes.MetricAttribute
|
||||
if err := rows.Scan(&attr.Key, &attr.Values, &attr.ValueCount); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metric attribute row")
|
||||
}
|
||||
attributes = append(attributes, attr)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metric attribute rows")
|
||||
}
|
||||
|
||||
return attributes, nil
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ type Handler interface {
|
||||
GetStats(http.ResponseWriter, *http.Request)
|
||||
GetTreemap(http.ResponseWriter, *http.Request)
|
||||
GetMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
GetMetricAttributes(http.ResponseWriter, *http.Request)
|
||||
UpdateMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
@@ -22,4 +23,5 @@ type Module interface {
|
||||
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
|
||||
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
|
||||
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
|
||||
GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error)
|
||||
}
|
||||
|
||||
@@ -100,8 +100,10 @@ func newProvider(
|
||||
traceAggExprRewriter,
|
||||
)
|
||||
|
||||
logsKeyEvolutionMetadata := telemetrylogs.NewKeyEvolutionMetadata(telemetryStore, cache, settings.Logger)
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper(logsKeyEvolutionMetadata)
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
|
||||
@@ -668,10 +668,11 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
|
||||
am.ViewAccess(ah.UpdateMetricsMetadata)).
|
||||
Methods(http.MethodPost)
|
||||
// v2 endpoints
|
||||
router.HandleFunc("/api/v2/metrics/stats", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetStats)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/treemap", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetTreemap)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetMetricMetadata)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.UpdateMetricMetadata)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/stats", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetStats)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/treemap", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetTreemap)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/attributes", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricAttributes)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricMetadata)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
func Intersection(a, b []int) (c []int) {
|
||||
|
||||
@@ -3,16 +3,15 @@ package metricsexplorer
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gorilla/mux"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model/metrics_explorer"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func ParseFilterKeySuggestions(r *http.Request) (*metrics_explorer.FilterKeyRequest, *model.ApiError) {
|
||||
|
||||
@@ -51,6 +51,8 @@ func NewAggExprRewriter(
|
||||
// and the args if the parametric aggregation function is used.
|
||||
func (r *aggExprRewriter) Rewrite(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
expr string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -77,7 +79,12 @@ func (r *aggExprRewriter) Rewrite(
|
||||
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
|
||||
}
|
||||
|
||||
visitor := newExprVisitor(r.logger, keys,
|
||||
visitor := newExprVisitor(
|
||||
ctx,
|
||||
startNs,
|
||||
endNs,
|
||||
r.logger,
|
||||
keys,
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
@@ -98,6 +105,8 @@ func (r *aggExprRewriter) Rewrite(
|
||||
// RewriteMulti rewrites a slice of expressions.
|
||||
func (r *aggExprRewriter) RewriteMulti(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
exprs []string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -106,7 +115,7 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
var errs []error
|
||||
var chArgsList [][]any
|
||||
for i, e := range exprs {
|
||||
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
|
||||
w, chArgs, err := r.Rewrite(ctx, startNs, endNs, e, rateInterval, keys)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
out[i] = e
|
||||
@@ -123,6 +132,9 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
|
||||
// exprVisitor walks FunctionExpr nodes and applies the mappers.
|
||||
type exprVisitor struct {
|
||||
ctx context.Context
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
chparser.DefaultASTVisitor
|
||||
logger *slog.Logger
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
@@ -137,6 +149,9 @@ type exprVisitor struct {
|
||||
}
|
||||
|
||||
func newExprVisitor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
logger *slog.Logger,
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
@@ -146,6 +161,9 @@ func newExprVisitor(
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
ctx: ctx,
|
||||
startNs: startNs,
|
||||
endNs: endNs,
|
||||
logger: logger,
|
||||
fieldKeys: fieldKeys,
|
||||
fullTextColumn: fullTextColumn,
|
||||
@@ -190,7 +208,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
if aggFunc.FuncCombinator {
|
||||
// Map the predicate (last argument)
|
||||
origPred := args[len(args)-1].String()
|
||||
whereClause, err := PrepareWhereClause(
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Logger: v.logger,
|
||||
@@ -200,7 +218,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonBodyPrefix: v.jsonBodyPrefix,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, 0, 0,
|
||||
}, v.startNs, v.endNs,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -220,7 +238,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -238,7 +256,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -19,6 +19,8 @@ import (
|
||||
|
||||
func CollisionHandledFinalExpr(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
fm qbtypes.FieldMapper,
|
||||
cb qbtypes.ConditionBuilder,
|
||||
@@ -45,7 +47,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, startNs, endNs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -58,7 +60,7 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, err := fm.FieldFor(ctx, field)
|
||||
colName, err := fm.FieldFor(ctx, startNs, endNs, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -93,7 +95,7 @@ func CollisionHandledFinalExpr(
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
colName, _ = fm.FieldFor(ctx, key)
|
||||
colName, _ = fm.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
stmts = append(stmts, colName)
|
||||
}
|
||||
|
||||
@@ -48,8 +48,8 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
op qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
) (string, error) {
|
||||
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
@@ -68,7 +68,7 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
keyIdxFilter := sb.Like(column.Name, keyIndexFilter(key))
|
||||
valueForIndexFilter := valueForIndexFilter(op, key, value)
|
||||
|
||||
fieldName, err := b.fm.FieldFor(ctx, key)
|
||||
fieldName, err := b.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -47,6 +47,7 @@ func (m *defaultFieldMapper) ColumnFor(
|
||||
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
@@ -61,10 +62,11 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
colName, err := m.FieldFor(ctx, key)
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -35,37 +35,37 @@ import (
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
Organization organization.Handler
|
||||
Preference preference.Handler
|
||||
User user.Handler
|
||||
SavedView savedview.Handler
|
||||
Apdex apdex.Handler
|
||||
Dashboard dashboard.Handler
|
||||
QuickFilter quickfilter.Handler
|
||||
TraceFunnel tracefunnel.Handler
|
||||
RawDataExport rawdataexport.Handler
|
||||
AuthDomain authdomain.Handler
|
||||
Session session.Handler
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
Metrics metricsexplorer.Handler
|
||||
Organization organization.Handler
|
||||
Preference preference.Handler
|
||||
User user.Handler
|
||||
SavedView savedview.Handler
|
||||
Apdex apdex.Handler
|
||||
Dashboard dashboard.Handler
|
||||
QuickFilter quickfilter.Handler
|
||||
TraceFunnel tracefunnel.Handler
|
||||
RawDataExport rawdataexport.Handler
|
||||
AuthDomain authdomain.Handler
|
||||
Session session.Handler
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
MetricsExplorer metricsexplorer.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing) Handlers {
|
||||
return Handlers{
|
||||
Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter),
|
||||
Preference: implpreference.NewHandler(modules.Preference),
|
||||
User: impluser.NewHandler(modules.User, modules.UserGetter),
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings, querier, licensing),
|
||||
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
|
||||
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
|
||||
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
|
||||
AuthDomain: implauthdomain.NewHandler(modules.AuthDomain),
|
||||
Session: implsession.NewHandler(modules.Session),
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
Metrics: implmetricsexplorer.NewHandler(modules.Metrics),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter),
|
||||
Preference: implpreference.NewHandler(modules.Preference),
|
||||
User: impluser.NewHandler(modules.User, modules.UserGetter),
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings, querier, licensing),
|
||||
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
|
||||
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
|
||||
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),
|
||||
AuthDomain: implauthdomain.NewHandler(modules.AuthDomain),
|
||||
Session: implsession.NewHandler(modules.Session),
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,22 +47,22 @@ import (
|
||||
)
|
||||
|
||||
type Modules struct {
|
||||
OrgGetter organization.Getter
|
||||
OrgSetter organization.Setter
|
||||
Preference preference.Module
|
||||
User user.Module
|
||||
UserGetter user.Getter
|
||||
SavedView savedview.Module
|
||||
Apdex apdex.Module
|
||||
Dashboard dashboard.Module
|
||||
QuickFilter quickfilter.Module
|
||||
TraceFunnel tracefunnel.Module
|
||||
RawDataExport rawdataexport.Module
|
||||
AuthDomain authdomain.Module
|
||||
Session session.Module
|
||||
Services services.Module
|
||||
SpanPercentile spanpercentile.Module
|
||||
Metrics metricsexplorer.Module
|
||||
OrgGetter organization.Getter
|
||||
OrgSetter organization.Setter
|
||||
Preference preference.Module
|
||||
User user.Module
|
||||
UserGetter user.Getter
|
||||
SavedView savedview.Module
|
||||
Apdex apdex.Module
|
||||
Dashboard dashboard.Module
|
||||
QuickFilter quickfilter.Module
|
||||
TraceFunnel tracefunnel.Module
|
||||
RawDataExport rawdataexport.Module
|
||||
AuthDomain authdomain.Module
|
||||
Session session.Module
|
||||
Services services.Module
|
||||
SpanPercentile spanpercentile.Module
|
||||
MetricsExplorer metricsexplorer.Module
|
||||
}
|
||||
|
||||
func NewModules(
|
||||
@@ -86,21 +86,21 @@ func NewModules(
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
|
||||
return Modules{
|
||||
OrgGetter: orgGetter,
|
||||
OrgSetter: orgSetter,
|
||||
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
|
||||
SavedView: implsavedview.NewModule(sqlstore),
|
||||
Apdex: implapdex.NewModule(sqlstore),
|
||||
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil)),
|
||||
User: user,
|
||||
UserGetter: userGetter,
|
||||
QuickFilter: quickfilter,
|
||||
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
|
||||
RawDataExport: implrawdataexport.NewModule(querier),
|
||||
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs),
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
Metrics: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings),
|
||||
OrgGetter: orgGetter,
|
||||
OrgSetter: orgSetter,
|
||||
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
|
||||
SavedView: implsavedview.NewModule(sqlstore),
|
||||
Apdex: implapdex.NewModule(sqlstore),
|
||||
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil)),
|
||||
User: user,
|
||||
UserGetter: userGetter,
|
||||
QuickFilter: quickfilter,
|
||||
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
|
||||
RawDataExport: implrawdataexport.NewModule(querier),
|
||||
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs),
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -47,7 +48,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -226,10 +227,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -237,12 +238,12 @@ func (c *conditionBuilder) ConditionFor(
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -372,7 +373,8 @@ func TestConditionFor(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -425,7 +427,8 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -664,7 +667,8 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
||||
@@ -4,11 +4,14 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
@@ -55,10 +58,13 @@ var (
|
||||
)
|
||||
|
||||
type fieldMapper struct {
|
||||
evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore
|
||||
}
|
||||
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
func NewFieldMapper(evolutionMetadataStore telemetrytypes.KeyEvolutionMetadataStore) qbtypes.FieldMapper {
|
||||
return &fieldMapper{
|
||||
evolutionMetadataStore: evolutionMetadataStore,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
@@ -97,7 +103,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -109,19 +115,35 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
baseColumn := logsV2Columns["resources_string"]
|
||||
tsStartTime := time.Unix(0, int64(tsStart))
|
||||
|
||||
// Extract orgId from context
|
||||
var orgID valuer.UUID
|
||||
if claims, err := authtypes.ClaimsFromContext(ctx); err == nil {
|
||||
orgID, err = valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid orgId %s", claims.OrgID)
|
||||
}
|
||||
}
|
||||
|
||||
// get all evolution for the column
|
||||
evolutions := m.evolutionMetadataStore.Get(ctx, orgID, baseColumn.Name)
|
||||
|
||||
// restricting now to just one entry where we know we changes from map to json
|
||||
if len(evolutions) > 0 && evolutions[0].ReleaseTime.Before(tsStartTime) {
|
||||
return fmt.Sprintf("%s.`%s`::String", column.Name, key.Name), nil
|
||||
}
|
||||
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyName := telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
} else {
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
attrVal := fmt.Sprintf("%s['%s']", baseColumn.Name, key.Name)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, baseColumn.Name, key.Name, attrVal), nil
|
||||
}
|
||||
|
||||
case schema.ColumnTypeString,
|
||||
schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||
schema.ColumnTypeUInt64,
|
||||
@@ -166,11 +188,12 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -180,7 +203,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := logsV2Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextLog
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -198,12 +221,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -3,10 +3,14 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -164,7 +168,8 @@ func TestGetColumn(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(mockStore)
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -229,7 +234,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute",
|
||||
name: "Map column type - resource attribute - json",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
@@ -238,7 +243,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute - Materialized",
|
||||
name: "Map column type - resource attribute - Materialized - json",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
@@ -261,8 +266,97 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
mockStore := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(mockStore)
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedResult, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFieldForWithEvolutionMetadata(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
|
||||
OrgID: orgId.String(),
|
||||
})
|
||||
|
||||
// Create a test release time
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTimeNano := uint64(releaseTime.UnixNano())
|
||||
|
||||
// Common test key
|
||||
serviceNameKey := telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
|
||||
// Common expected results
|
||||
jsonOnlyResult := "resource.`service.name`::String"
|
||||
fallbackResult := "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)"
|
||||
|
||||
// Set up stores once
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
setupResourcesStringEvolutionMetadata(ctx, storeWithMetadata, orgId, releaseTime)
|
||||
storeWithoutMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
mockStore *telemetrytypestest.MockKeyEvolutionMetadataStore
|
||||
expectedResult string
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Resource attribute - tsStart before release time (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - tsStart after release time (use new json column)",
|
||||
tsStart: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
tsEnd: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: jsonOnlyResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - no evolution metadata (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano,
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithoutMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Resource attribute - tsStart exactly at release time (use fallback with multiIf)",
|
||||
tsStart: releaseTimeNano,
|
||||
tsEnd: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
key: serviceNameKey,
|
||||
mockStore: storeWithMetadata,
|
||||
expectedResult: fallbackResult,
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper(tc.mockStore)
|
||||
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -5,12 +5,14 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -34,7 +36,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -47,7 +49,8 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
|
||||
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -71,7 +74,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -7,13 +7,15 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
|
||||
func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
@@ -163,7 +165,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -9,13 +9,15 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
@@ -2423,7 +2425,8 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
|
||||
163
pkg/telemetrylogs/key_evolution_metadata.go
Normal file
163
pkg/telemetrylogs/key_evolution_metadata.go
Normal file
@@ -0,0 +1,163 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
const (
|
||||
// KeyEvolutionMetadataTableName is the table name for key evolution metadata
|
||||
KeyEvolutionMetadataTableName = "distributed_column_key_evolution_metadata"
|
||||
// KeyEvolutionMetadataDBName is the database name for key evolution metadata
|
||||
KeyEvolutionMetadataDBName = "signoz_logs"
|
||||
// KeyEvolutionMetadataCacheKeyPrefix is the prefix for cache keys
|
||||
KeyEvolutionMetadataCacheKeyPrefix = "key_evolution_metadata:"
|
||||
)
|
||||
|
||||
// CachedKeyEvolutionMetadata is a cacheable type for storing key evolution metadata
|
||||
type CachedKeyEvolutionMetadata struct {
|
||||
Keys []*telemetrytypes.KeyEvolutionMetadataKey `json:"keys"`
|
||||
}
|
||||
|
||||
var _ cachetypes.Cacheable = (*CachedKeyEvolutionMetadata)(nil)
|
||||
|
||||
func (c *CachedKeyEvolutionMetadata) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
|
||||
func (c *CachedKeyEvolutionMetadata) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
// Each key can have multiple evolution entries, allowing for multiple column transitions over time.
|
||||
// The cache is organized by orgId, then by key name.
|
||||
type KeyEvolutionMetadata struct {
|
||||
cache cache.Cache
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewKeyEvolutionMetadata(telemetryStore telemetrystore.TelemetryStore, cache cache.Cache, logger *slog.Logger) *KeyEvolutionMetadata {
|
||||
return &KeyEvolutionMetadata{
|
||||
cache: cache,
|
||||
telemetryStore: telemetryStore,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (k *KeyEvolutionMetadata) fetchFromClickHouse(ctx context.Context, orgID valuer.UUID) {
|
||||
store := k.telemetryStore
|
||||
logger := k.logger
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if store.ClickhouseDB() == nil {
|
||||
logger.WarnContext(ctx, "ClickHouse connection not available for key evolution metadata fetch")
|
||||
return
|
||||
}
|
||||
|
||||
// Build query to fetch all key evolution metadata
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(
|
||||
"base_column",
|
||||
"base_column_type",
|
||||
"new_column",
|
||||
"new_column_type",
|
||||
"release_time",
|
||||
)
|
||||
sb.From(fmt.Sprintf("%s.%s", KeyEvolutionMetadataDBName, KeyEvolutionMetadataTableName))
|
||||
sb.OrderBy("base_column", "release_time")
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := store.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "Failed to fetch key evolution metadata from ClickHouse", "error", err)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Group metadata by base_column
|
||||
metadataByKey := make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
baseColumn string
|
||||
baseColumnType string
|
||||
newColumn string
|
||||
newColumnType string
|
||||
releaseTime uint64
|
||||
)
|
||||
|
||||
if err := rows.Scan(&baseColumn, &baseColumnType, &newColumn, &newColumnType, &releaseTime); err != nil {
|
||||
logger.WarnContext(ctx, "Failed to scan key evolution metadata row", "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
key := &telemetrytypes.KeyEvolutionMetadataKey{
|
||||
BaseColumn: baseColumn,
|
||||
BaseColumnType: baseColumnType,
|
||||
NewColumn: newColumn,
|
||||
NewColumnType: newColumnType,
|
||||
ReleaseTime: time.Unix(0, int64(releaseTime)),
|
||||
}
|
||||
|
||||
metadataByKey[baseColumn] = append(metadataByKey[baseColumn], key)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
logger.WarnContext(ctx, "Error iterating key evolution metadata rows", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Store each key's metadata in cache
|
||||
for keyName, keys := range metadataByKey {
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
cachedData := &CachedKeyEvolutionMetadata{Keys: keys}
|
||||
if err := k.cache.Set(ctx, orgID, cacheKey, cachedData, 24*time.Hour); err != nil {
|
||||
logger.WarnContext(ctx, "Failed to set key evolution metadata in cache", "key", keyName, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
logger.DebugContext(ctx, "Successfully fetched key evolution metadata from ClickHouse", "count", len(metadataByKey))
|
||||
}
|
||||
|
||||
// Add adds a metadata key for the given key name and orgId.
|
||||
// This is primarily for testing purposes. In production, data should come from ClickHouse.
|
||||
func (k *KeyEvolutionMetadata) Add(ctx context.Context, orgId valuer.UUID, keyName string, key *telemetrytypes.KeyEvolutionMetadataKey) {
|
||||
k.logger.WarnContext(ctx, "Add is not implemented for key evolution metadata")
|
||||
|
||||
}
|
||||
|
||||
// Get retrieves all metadata keys for the given key name and orgId from cache.
|
||||
// Returns an empty slice if the key is not found in cache.
|
||||
func (k *KeyEvolutionMetadata) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
|
||||
cacheKey := KeyEvolutionMetadataCacheKeyPrefix + keyName
|
||||
var cachedData CachedKeyEvolutionMetadata
|
||||
if err := k.cache.Get(ctx, orgId, cacheKey, &cachedData); err != nil {
|
||||
// Cache miss - fetch from ClickHouse and try again
|
||||
k.fetchFromClickHouse(ctx, orgId)
|
||||
|
||||
// Check cache again after fetching
|
||||
if err := k.cache.Get(ctx, orgId, cacheKey, &cachedData); err != nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Return a copy to prevent external modification
|
||||
result := make([]*telemetrytypes.KeyEvolutionMetadataKey, len(cachedData.Keys))
|
||||
copy(result, cachedData.Keys)
|
||||
return result
|
||||
}
|
||||
@@ -247,7 +247,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
continue
|
||||
}
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -267,7 +267,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -333,7 +333,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -347,7 +347,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, agg.Expression,
|
||||
ctx, start, end, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -479,7 +479,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -496,7 +496,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, aggExpr.Expression,
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -592,7 +592,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -8,9 +8,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
@@ -38,7 +40,14 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
}
|
||||
|
||||
func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
|
||||
// Create a test release time
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTimeNano := uint64(releaseTime.UnixNano())
|
||||
|
||||
cases := []struct {
|
||||
startTs uint64
|
||||
endTs uint64
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
@@ -46,14 +55,16 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "Time series with limit",
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit and count distinct on service.name",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
Expression: "count_distinct(service.name)",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -69,20 +80,22 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Time series with OR b/w resource attr and attribute filter",
|
||||
startTs: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with OR b/w resource attr and attribute filter and count distinct on service.name",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
Expression: "count_distinct(service.name)",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -98,12 +111,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1705224600), uint64(1705485600), "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600), 10, "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit + custom order by",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -137,12 +152,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with group by on materialized column",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -169,10 +186,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `materialized.key.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`materialized.key.name`) GLOBAL IN (SELECT `materialized.key.name` FROM __limit_cte) GROUP BY ts, `materialized.key.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
},
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with materialised column using or with regex operator",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -190,13 +209,21 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "redis.*", true, "memcached", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
Args: []any{uint64(1705397400), uint64(1705485600), "redis.*", true, "memcached", true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
ctx := context.Background()
|
||||
orgId := valuer.GenerateUUID()
|
||||
ctx = authtypes.NewContextWithClaims(ctx, authtypes.Claims{
|
||||
OrgID: orgId.String(),
|
||||
})
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
setupResourcesStringEvolutionMetadata(ctx, storeWithMetadata, orgId, releaseTime)
|
||||
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -220,7 +247,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(ctx, c.startTs, c.endTs, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
@@ -317,7 +344,8 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -426,7 +454,8 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -500,7 +529,8 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
@@ -596,7 +626,8 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
storeWithMetadata := telemetrytypestest.NewMockKeyEvolutionMetadataStore()
|
||||
fm := NewFieldMapper(storeWithMetadata)
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Helper function to limit string length for display
|
||||
@@ -951,3 +955,20 @@ func buildCompleteFieldKeyMapCollision() map[string][]*telemetrytypes.TelemetryF
|
||||
}
|
||||
return keysMap
|
||||
}
|
||||
|
||||
// buildKeyEvolutionMetadataForResourcesString returns key evolution metadata for resources_string.
|
||||
// This can be used to populate a mock key evolution metadata store in tests.
|
||||
func buildKeyEvolutionMetadataForResourcesString(releaseTime time.Time) *telemetrytypes.KeyEvolutionMetadataKey {
|
||||
return &telemetrytypes.KeyEvolutionMetadataKey{
|
||||
BaseColumn: "resources_string",
|
||||
BaseColumnType: "Map(LowCardinality(String), String)",
|
||||
NewColumn: "resource",
|
||||
NewColumnType: "JSON(max_dynamic_paths=100)",
|
||||
ReleaseTime: releaseTime,
|
||||
}
|
||||
}
|
||||
|
||||
// setupResourcesStringEvolutionMetadata sets up resources_string evolution metadata in the mock store.
|
||||
func setupResourcesStringEvolutionMetadata(ctx context.Context, m *telemetrytypestest.MockKeyEvolutionMetadataStore, orgId valuer.UUID, releaseTime time.Time) {
|
||||
m.Add(ctx, orgId, "resources_string", buildKeyEvolutionMetadataForResourcesString(releaseTime))
|
||||
}
|
||||
|
||||
@@ -25,8 +25,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
tsStart, tsEnd uint64,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
@@ -45,7 +44,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return "", nil
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
if err != nil {
|
||||
// if we don't have a table field name, we can't build a condition for related values
|
||||
return "", nil
|
||||
|
||||
@@ -53,7 +53,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -51,7 +51,7 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
return column, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -69,11 +69,12 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -83,7 +84,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := attributeMetadataColumns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -101,12 +102,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -145,6 +145,8 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
@@ -203,7 +205,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -942,18 +942,18 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldDataType: fieldValueSelector.FieldDataType,
|
||||
}
|
||||
|
||||
selectColumn, err := t.fm.FieldFor(ctx, key)
|
||||
selectColumn, err := t.fm.FieldFor(ctx, 0, 0, key)
|
||||
|
||||
if err != nil {
|
||||
// we don't have a explicit column to select from the related metadata table
|
||||
// so we will select either from resource_attributes or attributes table
|
||||
// in that order
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
})
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
@@ -978,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldMapper: t.fm,
|
||||
ConditionBuilder: t.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
}, 0, 0)
|
||||
}, 0, 0)
|
||||
if err == nil {
|
||||
sb.AddWhereClause(whereClause.WhereClause)
|
||||
} else {
|
||||
@@ -1002,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
|
||||
// search on attributes
|
||||
key.FieldContext = telemetrytypes.FieldContextAttribute
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
// search on resource
|
||||
key.FieldContext = telemetrytypes.FieldContextResource
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
key.FieldContext = origContext
|
||||
} else {
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -148,7 +148,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -200,7 +200,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
))
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -231,7 +231,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -270,7 +270,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -295,7 +295,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -23,6 +23,8 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -39,7 +41,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -139,10 +141,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -92,11 +92,12 @@ func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.Telemet
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -207,7 +207,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -359,7 +359,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
|
||||
sb.Select("fingerprint")
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -29,6 +29,8 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -52,7 +54,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
// then ask the mapper for the actual SQL reference
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -227,20 +229,20 @@ func (c *conditionBuilder) ConditionFor(
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
_ uint64,
|
||||
endNs uint64,
|
||||
) (string, error) {
|
||||
if c.isSpanScopeField(key.Name) {
|
||||
return c.buildSpanScopeCondition(key, operator, value, startNs)
|
||||
}
|
||||
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), field) ||
|
||||
@@ -248,7 +250,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -225,6 +225,7 @@ func (m *defaultFieldMapper) ColumnFor(
|
||||
// otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
// Special handling for span scope fields
|
||||
@@ -303,11 +304,12 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
// if it exists otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -317,7 +319,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
if _, ok := indexV3Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -335,12 +337,12 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -92,7 +92,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -293,7 +293,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs?
|
||||
for _, field := range selectedFields {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &field, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -311,7 +311,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -509,7 +509,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, agg.Expression,
|
||||
ctx, start, end, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -654,7 +654,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, aggExpr.Expression,
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
}, b.start, b.end,
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -450,7 +450,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
if selectedFields[field.Name] {
|
||||
continue
|
||||
}
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &field, keys)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.WarnContext(ctx, "failed to map select field",
|
||||
"field", field.Name, "error", err)
|
||||
@@ -465,7 +465,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
// Add order by support using ColumnExpressionFor
|
||||
orderApplied := false
|
||||
for _, orderBy := range b.operator.Order {
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -547,6 +547,8 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -572,6 +574,8 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64(b.operator.StepInterval.Seconds()),
|
||||
keys,
|
||||
@@ -657,6 +661,8 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -684,6 +690,8 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
@@ -797,6 +805,8 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -822,6 +832,8 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64((b.end-b.start)/querybuilder.NsToSeconds),
|
||||
keys,
|
||||
|
||||
@@ -93,6 +93,16 @@ func newConfig() factory.Config {
|
||||
}
|
||||
|
||||
func (c Config) Validate() error {
|
||||
// Ensure that lifetime idle is not negative
|
||||
if c.Lifetime.Idle < 0 {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "lifetime::idle must not be negative")
|
||||
}
|
||||
|
||||
// Ensure that lifetime max is not negative
|
||||
if c.Lifetime.Max < 0 {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "lifetime::max must not be negative")
|
||||
}
|
||||
|
||||
// Ensure that rotation interval is smaller than lifetime idle
|
||||
if c.Rotation.Interval >= c.Lifetime.Idle {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "rotation::interval must be smaller than lifetime::idle")
|
||||
|
||||
@@ -263,7 +263,7 @@ func (provider *provider) getOrSetIdentity(ctx context.Context, orgID, userID va
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = provider.cache.Set(ctx, orgID, identityCacheKey(identity.UserID), identity, -1)
|
||||
err = provider.cache.Set(ctx, orgID, identityCacheKey(identity.UserID), identity, 0)
|
||||
if err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to cache identity", "error", err)
|
||||
}
|
||||
|
||||
@@ -410,7 +410,7 @@ func (provider *provider) setToken(ctx context.Context, token *authtypes.Token,
|
||||
}
|
||||
|
||||
func (provider *provider) setIdentity(ctx context.Context, identity *authtypes.Identity) error {
|
||||
err := provider.cache.Set(ctx, emptyOrgID, identityCacheKey(identity.UserID), identity, -1)
|
||||
err := provider.cache.Set(ctx, emptyOrgID, identityCacheKey(identity.UserID), identity, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -434,7 +434,7 @@ func (provider *provider) getOrGetSetIdentity(ctx context.Context, userID valuer
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = provider.cache.Set(ctx, emptyOrgID, identityCacheKey(userID), identity, -1)
|
||||
err = provider.cache.Set(ctx, emptyOrgID, identityCacheKey(userID), identity, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -220,3 +220,53 @@ type TreemapResponse struct {
|
||||
TimeSeries []TreemapEntry `json:"timeseries"`
|
||||
Samples []TreemapEntry `json:"samples"`
|
||||
}
|
||||
|
||||
// MetricAttributesRequest represents the payload for the metric attributes endpoint.
|
||||
type MetricAttributesRequest struct {
|
||||
MetricName string `json:"metricName"`
|
||||
Start *int64 `json:"start,omitempty"`
|
||||
End *int64 `json:"end,omitempty"`
|
||||
}
|
||||
|
||||
// Validate ensures MetricAttributesRequest contains acceptable values.
|
||||
func (req *MetricAttributesRequest) Validate() error {
|
||||
if req == nil {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
|
||||
}
|
||||
|
||||
if req.MetricName == "" {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required")
|
||||
}
|
||||
|
||||
if req.Start != nil && req.End != nil {
|
||||
if *req.Start >= *req.End {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start (%d) must be less than end (%d)", *req.Start, *req.End)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON validates input immediately after decoding.
|
||||
func (req *MetricAttributesRequest) UnmarshalJSON(data []byte) error {
|
||||
type raw MetricAttributesRequest
|
||||
var decoded raw
|
||||
if err := json.Unmarshal(data, &decoded); err != nil {
|
||||
return err
|
||||
}
|
||||
*req = MetricAttributesRequest(decoded)
|
||||
return req.Validate()
|
||||
}
|
||||
|
||||
// MetricAttribute represents a single attribute with its values and count.
|
||||
type MetricAttribute struct {
|
||||
Key string `json:"key"`
|
||||
Values []string `json:"values"`
|
||||
ValueCount uint64 `json:"valueCount"`
|
||||
}
|
||||
|
||||
// MetricAttributesResponse is the output structure for the metric attributes endpoint.
|
||||
type MetricAttributesResponse struct {
|
||||
Attributes []MetricAttribute `json:"attributes"`
|
||||
TotalKeys int64 `json:"totalKeys"`
|
||||
}
|
||||
|
||||
@@ -21,11 +21,11 @@ type JsonKeyToFieldFunc func(context.Context, *telemetrytypes.TelemetryFieldKey,
|
||||
// FieldMapper maps the telemetry field key to the table field name.
|
||||
type FieldMapper interface {
|
||||
// FieldFor returns the field name for the given key.
|
||||
FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
// ColumnFor returns the column for the given key.
|
||||
ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error)
|
||||
// ColumnExpressionFor returns the column expression for the given key.
|
||||
ColumnExpressionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
ColumnExpressionFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error)
|
||||
}
|
||||
|
||||
// ConditionBuilder builds the condition for the filter.
|
||||
@@ -37,8 +37,8 @@ type ConditionBuilder interface {
|
||||
|
||||
type AggExprRewriter interface {
|
||||
// Rewrite rewrites the aggregation expression to be used in the query.
|
||||
Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||
Rewrite(ctx context.Context, startNs, endNs uint64, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, startNs, endNs uint64, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||
}
|
||||
|
||||
type Statement struct {
|
||||
|
||||
21
pkg/types/telemetrytypes/evolution_metadata.go
Normal file
21
pkg/types/telemetrytypes/evolution_metadata.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type KeyEvolutionMetadataKey struct {
|
||||
BaseColumn string
|
||||
BaseColumnType string
|
||||
NewColumn string
|
||||
NewColumnType string
|
||||
ReleaseTime time.Time
|
||||
}
|
||||
|
||||
type KeyEvolutionMetadataStore interface {
|
||||
Get(ctx context.Context, orgId valuer.UUID, keyName string) []*KeyEvolutionMetadataKey
|
||||
Add(ctx context.Context, orgId valuer.UUID, keyName string, key *KeyEvolutionMetadataKey)
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
package telemetrytypestest
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// MockKeyEvolutionMetadataStore implements the KeyEvolutionMetadataStore interface for testing purposes
|
||||
type MockKeyEvolutionMetadataStore struct {
|
||||
metadata map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey // orgId -> keyName -> metadata
|
||||
}
|
||||
|
||||
// NewMockKeyEvolutionMetadataStore creates a new instance of MockKeyEvolutionMetadataStore with initialized maps
|
||||
func NewMockKeyEvolutionMetadataStore() *MockKeyEvolutionMetadataStore {
|
||||
return &MockKeyEvolutionMetadataStore{
|
||||
metadata: make(map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey),
|
||||
}
|
||||
}
|
||||
|
||||
// Get retrieves all metadata keys for the given key name and orgId.
|
||||
// Returns an empty slice if the key is not found.
|
||||
func (m *MockKeyEvolutionMetadataStore) Get(ctx context.Context, orgId valuer.UUID, keyName string) []*telemetrytypes.KeyEvolutionMetadataKey {
|
||||
if m.metadata == nil {
|
||||
return nil
|
||||
}
|
||||
orgMetadata, orgExists := m.metadata[orgId.String()]
|
||||
if !orgExists {
|
||||
return nil
|
||||
}
|
||||
keys, exists := orgMetadata[keyName]
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
// Return a copy to prevent external modification
|
||||
result := make([]*telemetrytypes.KeyEvolutionMetadataKey, len(keys))
|
||||
copy(result, keys)
|
||||
return result
|
||||
}
|
||||
|
||||
// Add adds a metadata key for the given key name and orgId
|
||||
func (m *MockKeyEvolutionMetadataStore) Add(ctx context.Context, orgId valuer.UUID, keyName string, key *telemetrytypes.KeyEvolutionMetadataKey) {
|
||||
if m.metadata == nil {
|
||||
m.metadata = make(map[string]map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
}
|
||||
if m.metadata[orgId.String()] == nil {
|
||||
m.metadata[orgId.String()] = make(map[string][]*telemetrytypes.KeyEvolutionMetadataKey)
|
||||
}
|
||||
m.metadata[orgId.String()][keyName] = append(m.metadata[orgId.String()][keyName], key)
|
||||
}
|
||||
Reference in New Issue
Block a user