mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-27 09:22:12 +00:00
Compare commits
2 Commits
chore/test
...
testing-qb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8d03e35d84 | ||
|
|
f267ed8ad1 |
@@ -5,8 +5,6 @@
|
||||
<br>SigNoz
|
||||
</h1>
|
||||
|
||||
ok
|
||||
|
||||
<p align="center">All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit <a href="https://signoz.io" target="_blank">signoz.io</a> for the full documentation, tutorials, and guide.</p>
|
||||
|
||||
<p align="center">
|
||||
|
||||
@@ -224,6 +224,3 @@ statsreporter:
|
||||
enabled: true
|
||||
# The interval at which the stats are collected.
|
||||
interval: 6h
|
||||
collect:
|
||||
# Whether to collect identities and traits (emails).
|
||||
identities: true
|
||||
|
||||
@@ -119,7 +119,6 @@ export const updateFunnelSteps = async (
|
||||
export interface ValidateFunnelPayload {
|
||||
start_time: number;
|
||||
end_time: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface ValidateFunnelResponse {
|
||||
@@ -133,11 +132,12 @@ export interface ValidateFunnelResponse {
|
||||
}
|
||||
|
||||
export const validateFunnelSteps = async (
|
||||
funnelId: string,
|
||||
payload: ValidateFunnelPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<ValidateFunnelResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/validate`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/validate`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
@@ -185,7 +185,6 @@ export interface FunnelOverviewPayload {
|
||||
end_time: number;
|
||||
step_start?: number;
|
||||
step_end?: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelOverviewResponse {
|
||||
@@ -203,11 +202,12 @@ export interface FunnelOverviewResponse {
|
||||
}
|
||||
|
||||
export const getFunnelOverview = async (
|
||||
funnelId: string,
|
||||
payload: FunnelOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelOverviewResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/overview`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/overview`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -235,11 +235,12 @@ export interface SlowTraceData {
|
||||
}
|
||||
|
||||
export const getFunnelSlowTraces = async (
|
||||
funnelId: string,
|
||||
payload: FunnelOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<SlowTraceData> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/slow-traces`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/slow-traces`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -272,7 +273,7 @@ export const getFunnelErrorTraces = async (
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<ErrorTraceData> | ErrorResponse> => {
|
||||
const response: AxiosResponse = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/error-traces`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/error-traces`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -290,7 +291,6 @@ export const getFunnelErrorTraces = async (
|
||||
export interface FunnelStepsPayload {
|
||||
start_time: number;
|
||||
end_time: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelStepGraphMetrics {
|
||||
@@ -307,11 +307,12 @@ export interface FunnelStepsResponse {
|
||||
}
|
||||
|
||||
export const getFunnelSteps = async (
|
||||
funnelId: string,
|
||||
payload: FunnelStepsPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelStepsResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/steps`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
@@ -329,7 +330,6 @@ export interface FunnelStepsOverviewPayload {
|
||||
end_time: number;
|
||||
step_start?: number;
|
||||
step_end?: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelStepsOverviewResponse {
|
||||
@@ -341,11 +341,12 @@ export interface FunnelStepsOverviewResponse {
|
||||
}
|
||||
|
||||
export const getFunnelStepsOverview = async (
|
||||
funnelId: string,
|
||||
payload: FunnelStepsOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelStepsOverviewResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/analytics/steps/overview`,
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps/overview`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
|
||||
@@ -30,5 +30,5 @@ export enum LOCALSTORAGE {
|
||||
SHOW_EXCEPTIONS_QUICK_FILTERS = 'SHOW_EXCEPTIONS_QUICK_FILTERS',
|
||||
BANNER_DISMISSED = 'BANNER_DISMISSED',
|
||||
QUICK_FILTERS_SETTINGS_ANNOUNCEMENT = 'QUICK_FILTERS_SETTINGS_ANNOUNCEMENT',
|
||||
FUNNEL_STEPS = 'FUNNEL_STEPS',
|
||||
UNEXECUTED_FUNNELS = 'UNEXECUTED_FUNNELS',
|
||||
}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsEmptyOrIncorrectMetrics from '../HostsEmptyOrIncorrectMetrics';
|
||||
|
||||
describe('HostsEmptyOrIncorrectMetrics', () => {
|
||||
it('shows no data message when noData is true', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData incorrectData={false} />);
|
||||
expect(
|
||||
screen.getByText('No host metrics data received yet.'),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/Infrastructure monitoring requires the/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows incorrect data message when incorrectData is true', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData />);
|
||||
expect(
|
||||
screen.getByText(
|
||||
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
|
||||
),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show no data message when noData is false', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
|
||||
expect(
|
||||
screen.queryByText('No host metrics data received yet.'),
|
||||
).not.toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText(/Infrastructure monitoring requires the/),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show incorrect data message when incorrectData is false', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
|
||||
expect(
|
||||
screen.queryByText(
|
||||
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
|
||||
),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,166 +0,0 @@
|
||||
/* eslint-disable react/button-has-type */
|
||||
import { render } from '@testing-library/react';
|
||||
import ROUTES from 'constants/routes';
|
||||
import * as useGetHostListHooks from 'hooks/infraMonitoring/useGetHostList';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import * as timezoneHooks from 'providers/Timezone';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
|
||||
import HostsList from '../HostsList';
|
||||
|
||||
jest.mock('lib/getMinMax', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => ({
|
||||
minTime: 1713734400000,
|
||||
maxTime: 1713738000000,
|
||||
isValidTimeFormat: jest.fn().mockReturnValue(true),
|
||||
})),
|
||||
}));
|
||||
jest.mock('components/CustomTimePicker/CustomTimePicker', () => ({
|
||||
__esModule: true,
|
||||
default: ({ onSelect, selectedTime, selectedValue }: any): JSX.Element => (
|
||||
<div data-testid="custom-time-picker">
|
||||
<button onClick={(): void => onSelect('custom')}>
|
||||
{selectedTime} - {selectedValue}
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: (): any => ({
|
||||
globalTime: {
|
||||
selectedTime: {
|
||||
startTime: 1713734400000,
|
||||
endTime: 1713738000000,
|
||||
},
|
||||
maxTime: 1713738000000,
|
||||
minTime: 1713734400000,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn().mockReturnValue({
|
||||
pathname: ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
|
||||
}),
|
||||
}));
|
||||
jest.mock('react-router-dom-v5-compat', () => {
|
||||
const actual = jest.requireActual('react-router-dom-v5-compat');
|
||||
return {
|
||||
...actual,
|
||||
useSearchParams: jest
|
||||
.fn()
|
||||
.mockReturnValue([
|
||||
{ get: jest.fn(), entries: jest.fn().mockReturnValue([]) },
|
||||
jest.fn(),
|
||||
]),
|
||||
useNavigationType: (): any => 'PUSH',
|
||||
};
|
||||
});
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
|
||||
timezone: {
|
||||
offset: 0,
|
||||
},
|
||||
browserTimezone: {
|
||||
offset: 0,
|
||||
},
|
||||
} as any);
|
||||
jest.spyOn(useGetHostListHooks, 'useGetHostList').mockReturnValue({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
records: [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: true,
|
||||
cpu: 0.75,
|
||||
memory: 0.65,
|
||||
wait: 0.03,
|
||||
},
|
||||
],
|
||||
isSendingK8SAgentMetrics: false,
|
||||
sentAnyHostMetricsData: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
} as any);
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
describe('HostsList', () => {
|
||||
it('renders hosts list table', () => {
|
||||
const { container } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<HostsList />
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
expect(container.querySelector('.hosts-list-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders filters', () => {
|
||||
const { container } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<HostsList />
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
expect(container.querySelector('.filters')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,37 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsListControls from '../HostsListControls';
|
||||
|
||||
jest.mock('container/QueryBuilder/filters/QueryBuilderSearch', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="query-builder-search">Search</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
describe('HostsListControls', () => {
|
||||
const mockHandleFiltersChange = jest.fn();
|
||||
const mockFilters = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
it('renders search and date time filters', () => {
|
||||
render(
|
||||
<HostsListControls
|
||||
handleFiltersChange={mockHandleFiltersChange}
|
||||
filters={mockFilters}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('query-builder-search')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,139 +0,0 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsListTable from '../HostsListTable';
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
const EMPTY_STATE_CONTAINER_CLASS = '.hosts-empty-state-container';
|
||||
|
||||
describe('HostsListTable', () => {
|
||||
const mockHost = {
|
||||
hostName: 'test-host-1',
|
||||
active: true,
|
||||
cpu: 0.75,
|
||||
memory: 0.65,
|
||||
wait: 0.03,
|
||||
load15: 1.5,
|
||||
os: 'linux',
|
||||
};
|
||||
|
||||
const mockTableData = {
|
||||
payload: {
|
||||
data: {
|
||||
hosts: [mockHost],
|
||||
},
|
||||
},
|
||||
};
|
||||
const mockOnHostClick = jest.fn();
|
||||
const mockSetCurrentPage = jest.fn();
|
||||
const mockSetOrderBy = jest.fn();
|
||||
const mockSetPageSize = jest.fn();
|
||||
const mockProps = {
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
tableData: mockTableData,
|
||||
hostMetricsData: [mockHost],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
onHostClick: mockOnHostClick,
|
||||
currentPage: 1,
|
||||
setCurrentPage: mockSetCurrentPage,
|
||||
pageSize: 10,
|
||||
setOrderBy: mockSetOrderBy,
|
||||
setPageSize: mockSetPageSize,
|
||||
} as any;
|
||||
|
||||
it('renders loading state if isLoading is true', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} isLoading />);
|
||||
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders loading state if isFetching is true', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} isFetching />);
|
||||
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders error state if isError is true', () => {
|
||||
render(<HostsListTable {...mockProps} isError />);
|
||||
expect(screen.getByText('Something went wrong')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if no hosts are found', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} />);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if sentAnyHostMetricsData is false', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
sentAnyHostMetricsData: false,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if isSendingIncorrectK8SAgentMetrics is true', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
isSendingIncorrectK8SAgentMetrics: true,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders table data', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
isSendingIncorrectK8SAgentMetrics: false,
|
||||
sentAnyHostMetricsData: true,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector('.hosts-list-table')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
@@ -1,104 +0,0 @@
|
||||
import { render } from '@testing-library/react';
|
||||
|
||||
import { formatDataForTable, GetHostsQuickFiltersConfig } from '../utils';
|
||||
|
||||
const PROGRESS_BAR_CLASS = '.progress-bar';
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
describe('InfraMonitoringHosts utils', () => {
|
||||
describe('formatDataForTable', () => {
|
||||
it('should format host data correctly', () => {
|
||||
const mockData = [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: true,
|
||||
cpu: 0.95,
|
||||
memory: 0.85,
|
||||
wait: 0.05,
|
||||
load15: 2.5,
|
||||
os: 'linux',
|
||||
},
|
||||
] as any;
|
||||
|
||||
const result = formatDataForTable(mockData);
|
||||
|
||||
expect(result[0].hostName).toBe('test-host');
|
||||
expect(result[0].wait).toBe('5%');
|
||||
expect(result[0].load15).toBe(2.5);
|
||||
|
||||
// Test active tag rendering
|
||||
const activeTag = render(result[0].active as JSX.Element);
|
||||
expect(activeTag.container.textContent).toBe('ACTIVE');
|
||||
expect(activeTag.container.querySelector('.active')).toBeTruthy();
|
||||
|
||||
// Test CPU progress bar
|
||||
const cpuProgress = render(result[0].cpu as JSX.Element);
|
||||
const cpuProgressBar = cpuProgress.container.querySelector(
|
||||
PROGRESS_BAR_CLASS,
|
||||
);
|
||||
expect(cpuProgressBar).toBeTruthy();
|
||||
|
||||
// Test memory progress bar
|
||||
const memoryProgress = render(result[0].memory as JSX.Element);
|
||||
const memoryProgressBar = memoryProgress.container.querySelector(
|
||||
PROGRESS_BAR_CLASS,
|
||||
);
|
||||
expect(memoryProgressBar).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should handle inactive hosts', () => {
|
||||
const mockData = [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: false,
|
||||
cpu: 0.3,
|
||||
memory: 0.4,
|
||||
wait: 0.02,
|
||||
load15: 1.2,
|
||||
os: 'linux',
|
||||
cpuTimeSeries: [],
|
||||
memoryTimeSeries: [],
|
||||
waitTimeSeries: [],
|
||||
load15TimeSeries: [],
|
||||
},
|
||||
] as any;
|
||||
|
||||
const result = formatDataForTable(mockData);
|
||||
|
||||
const inactiveTag = render(result[0].active as JSX.Element);
|
||||
expect(inactiveTag.container.textContent).toBe('INACTIVE');
|
||||
expect(inactiveTag.container.querySelector('.inactive')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('GetHostsQuickFiltersConfig', () => {
|
||||
it('should return correct config when dotMetricsEnabled is true', () => {
|
||||
const result = GetHostsQuickFiltersConfig(true);
|
||||
|
||||
expect(result[0].attributeKey.key).toBe('host.name');
|
||||
expect(result[1].attributeKey.key).toBe('os.type');
|
||||
expect(result[0].aggregateAttribute).toBe('system.cpu.load_average.15m');
|
||||
});
|
||||
|
||||
it('should return correct config when dotMetricsEnabled is false', () => {
|
||||
const result = GetHostsQuickFiltersConfig(false);
|
||||
|
||||
expect(result[0].attributeKey.key).toBe('host_name');
|
||||
expect(result[1].attributeKey.key).toBe('os_type');
|
||||
expect(result[0].aggregateAttribute).toBe('system_cpu_load_average_15m');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -611,7 +611,9 @@ export const errorPercentage = ({
|
||||
{
|
||||
id: '',
|
||||
key: {
|
||||
key: dotMetricsEnabled ? WidgetKeys.StatusCode : WidgetKeys.StatusCodeNorm,
|
||||
key: dotMetricsEnabled
|
||||
? WidgetKeys.Service_name
|
||||
: WidgetKeys.StatusCodeNorm,
|
||||
dataType: DataTypes.Int64,
|
||||
isColumn: false,
|
||||
type: MetricsType.Tag,
|
||||
|
||||
@@ -241,15 +241,6 @@
|
||||
&-title {
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
|
||||
&-footer {
|
||||
border-top-color: var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
.add-span-to-funnel-modal__discard-button {
|
||||
background: var(--bg-vanilla-200);
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -72,6 +72,7 @@ function FunnelDetailsView({
|
||||
funnel={funnel}
|
||||
isTraceDetailsPage
|
||||
span={span}
|
||||
disableAutoSave
|
||||
triggerAutoSave={triggerAutoSave}
|
||||
showNotifications={showNotifications}
|
||||
/>
|
||||
@@ -142,19 +143,13 @@ function AddSpanToFunnelModal({
|
||||
const handleSaveFunnel = (): void => {
|
||||
setTriggerSave(true);
|
||||
// Reset trigger after a brief moment to allow the save to be processed
|
||||
setTimeout(() => {
|
||||
setTriggerSave(false);
|
||||
onClose();
|
||||
}, 100);
|
||||
setTimeout(() => setTriggerSave(false), 100);
|
||||
};
|
||||
|
||||
const handleDiscard = (): void => {
|
||||
setTriggerDiscard(true);
|
||||
// Reset trigger after a brief moment
|
||||
setTimeout(() => {
|
||||
setTriggerDiscard(false);
|
||||
onClose();
|
||||
}, 100);
|
||||
setTimeout(() => setTriggerDiscard(false), 100);
|
||||
};
|
||||
|
||||
const renderListView = (): JSX.Element => (
|
||||
@@ -244,6 +239,9 @@ function AddSpanToFunnelModal({
|
||||
footer={
|
||||
activeView === ModalView.DETAILS
|
||||
? [
|
||||
<Button key="close" onClick={onClose}>
|
||||
Close
|
||||
</Button>,
|
||||
<Button
|
||||
type="default"
|
||||
key="discard"
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { FunnelData, FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import { useUpdateFunnelSteps } from './useFunnels';
|
||||
@@ -16,30 +13,22 @@ interface UseFunnelConfiguration {
|
||||
isPopoverOpen: boolean;
|
||||
setIsPopoverOpen: (isPopoverOpen: boolean) => void;
|
||||
steps: FunnelStepData[];
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
// Add this helper function
|
||||
export const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
if (steps.some((step) => !step.filters)) return steps;
|
||||
|
||||
return steps.map((step) => ({
|
||||
...step,
|
||||
filters: {
|
||||
...step.filters,
|
||||
items: step.filters.items.map((item) => {
|
||||
const {
|
||||
id: unusedId,
|
||||
isIndexed,
|
||||
...keyObj
|
||||
} = item.key as BaseAutocompleteData;
|
||||
return {
|
||||
id: '',
|
||||
key: keyObj,
|
||||
value: item.value,
|
||||
op: item.op,
|
||||
};
|
||||
}),
|
||||
items: step.filters.items.map((item) => ({
|
||||
id: '',
|
||||
key: item.key,
|
||||
value: item.value,
|
||||
op: item.op,
|
||||
})),
|
||||
},
|
||||
}));
|
||||
};
|
||||
@@ -47,22 +36,22 @@ export const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export default function useFunnelConfiguration({
|
||||
funnel,
|
||||
disableAutoSave = false,
|
||||
triggerAutoSave = false,
|
||||
showNotifications = false,
|
||||
}: {
|
||||
funnel: FunnelData;
|
||||
disableAutoSave?: boolean;
|
||||
triggerAutoSave?: boolean;
|
||||
showNotifications?: boolean;
|
||||
}): UseFunnelConfiguration {
|
||||
const { notifications } = useNotifications();
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
steps,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
initialSteps,
|
||||
hasIncompleteStepFields,
|
||||
handleRestoreSteps,
|
||||
selectedTime,
|
||||
setIsUpdatingFunnel,
|
||||
handleRunFunnel,
|
||||
} = useFunnelContext();
|
||||
|
||||
// State management
|
||||
@@ -70,6 +59,10 @@ export default function useFunnelConfiguration({
|
||||
|
||||
const debouncedSteps = useDebounce(steps, 200);
|
||||
|
||||
const [lastValidatedSteps, setLastValidatedSteps] = useState<FunnelStepData[]>(
|
||||
initialSteps,
|
||||
);
|
||||
|
||||
// Mutation hooks
|
||||
const updateStepsMutation = useUpdateFunnelSteps(
|
||||
funnel.funnel_id,
|
||||
@@ -78,15 +71,6 @@ export default function useFunnelConfiguration({
|
||||
|
||||
// Derived state
|
||||
const lastSavedStepsStateRef = useRef<FunnelStepData[]>(steps);
|
||||
const hasRestoredFromLocalStorage = useRef(false);
|
||||
|
||||
// localStorage hook for funnel steps
|
||||
const localStorageKey = `${LOCALSTORAGE.FUNNEL_STEPS}_${funnel.funnel_id}`;
|
||||
const [
|
||||
localStorageSavedSteps,
|
||||
setLocalStorageSavedSteps,
|
||||
clearLocalStorageSavedSteps,
|
||||
] = useLocalStorage<FunnelStepData[] | null>(localStorageKey, null);
|
||||
|
||||
const hasStepsChanged = useCallback(() => {
|
||||
const normalizedLastSavedSteps = normalizeSteps(
|
||||
@@ -96,34 +80,6 @@ export default function useFunnelConfiguration({
|
||||
return !isEqual(normalizedDebouncedSteps, normalizedLastSavedSteps);
|
||||
}, [debouncedSteps]);
|
||||
|
||||
// Handle localStorage for funnel steps
|
||||
useEffect(() => {
|
||||
// Restore from localStorage on first run if
|
||||
if (!hasRestoredFromLocalStorage.current) {
|
||||
const savedSteps = localStorageSavedSteps;
|
||||
if (savedSteps) {
|
||||
handleRestoreSteps(savedSteps);
|
||||
hasRestoredFromLocalStorage.current = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Save steps to localStorage
|
||||
if (hasStepsChanged()) {
|
||||
setLocalStorageSavedSteps(debouncedSteps);
|
||||
}
|
||||
}, [
|
||||
debouncedSteps,
|
||||
funnel.funnel_id,
|
||||
hasStepsChanged,
|
||||
handleRestoreSteps,
|
||||
localStorageSavedSteps,
|
||||
setLocalStorageSavedSteps,
|
||||
queryClient,
|
||||
selectedTime,
|
||||
lastUpdatedSteps,
|
||||
]);
|
||||
|
||||
const hasFunnelStepDefinitionsChanged = useCallback(
|
||||
(prevSteps: FunnelStepData[], nextSteps: FunnelStepData[]): boolean => {
|
||||
if (prevSteps.length !== nextSteps.length) return true;
|
||||
@@ -141,6 +97,15 @@ export default function useFunnelConfiguration({
|
||||
[],
|
||||
);
|
||||
|
||||
const hasFunnelLatencyTypeChanged = useCallback(
|
||||
(prevSteps: FunnelStepData[], nextSteps: FunnelStepData[]): boolean =>
|
||||
prevSteps.some((step, index) => {
|
||||
const nextStep = nextSteps[index];
|
||||
return step.latency_type !== nextStep.latency_type;
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
// Mutation payload preparation
|
||||
const getUpdatePayload = useCallback(
|
||||
() => ({
|
||||
@@ -151,19 +116,33 @@ export default function useFunnelConfiguration({
|
||||
[funnel.funnel_id, debouncedSteps],
|
||||
);
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const { selectedTime } = useFunnelContext();
|
||||
|
||||
const validateStepsQueryKey = useMemo(
|
||||
() => [REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS, funnel.funnel_id, selectedTime],
|
||||
[funnel.funnel_id, selectedTime],
|
||||
);
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
if (triggerAutoSave && !isEqual(debouncedSteps, lastUpdatedSteps)) {
|
||||
setIsUpdatingFunnel(true);
|
||||
// Determine if we should save based on the mode
|
||||
let shouldSave = false;
|
||||
|
||||
if (disableAutoSave) {
|
||||
// Manual save mode: only save when explicitly triggered
|
||||
shouldSave = triggerAutoSave;
|
||||
} else {
|
||||
// Auto-save mode: save when steps have changed and no incomplete fields
|
||||
shouldSave = hasStepsChanged() && !hasIncompleteStepFields;
|
||||
}
|
||||
|
||||
if (shouldSave && !isEqual(debouncedSteps, lastValidatedSteps)) {
|
||||
updateStepsMutation.mutate(getUpdatePayload(), {
|
||||
onSuccess: (data) => {
|
||||
const updatedFunnelSteps = data?.payload?.steps;
|
||||
|
||||
if (!updatedFunnelSteps) return;
|
||||
|
||||
// Clear localStorage since steps are saved successfully
|
||||
clearLocalStorageSavedSteps();
|
||||
|
||||
queryClient.setQueryData(
|
||||
[REACT_QUERY_KEY.GET_FUNNEL_DETAILS, funnel.funnel_id],
|
||||
(oldData: any) => {
|
||||
@@ -184,9 +163,17 @@ export default function useFunnelConfiguration({
|
||||
(step) => step.service_name === '' || step.span_name === '',
|
||||
);
|
||||
|
||||
if (hasFunnelLatencyTypeChanged(lastValidatedSteps, debouncedSteps)) {
|
||||
handleRunFunnel();
|
||||
setLastValidatedSteps(debouncedSteps);
|
||||
}
|
||||
// Only validate if funnel steps definitions
|
||||
if (!hasIncompleteStepFields) {
|
||||
setLastUpdatedSteps(debouncedSteps);
|
||||
else if (
|
||||
!hasIncompleteStepFields &&
|
||||
hasFunnelStepDefinitionsChanged(lastValidatedSteps, debouncedSteps)
|
||||
) {
|
||||
queryClient.refetchQueries(validateStepsQueryKey);
|
||||
setLastValidatedSteps(debouncedSteps);
|
||||
}
|
||||
|
||||
// Show success notification only when requested
|
||||
@@ -229,18 +216,17 @@ export default function useFunnelConfiguration({
|
||||
getUpdatePayload,
|
||||
hasFunnelStepDefinitionsChanged,
|
||||
hasStepsChanged,
|
||||
lastUpdatedSteps,
|
||||
lastValidatedSteps,
|
||||
queryClient,
|
||||
validateStepsQueryKey,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
localStorageSavedSteps,
|
||||
clearLocalStorageSavedSteps,
|
||||
disableAutoSave,
|
||||
]);
|
||||
|
||||
return {
|
||||
isPopoverOpen,
|
||||
setIsPopoverOpen,
|
||||
steps,
|
||||
isSaving: updateStepsMutation.isLoading,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -20,11 +20,10 @@ export function useFunnelMetrics({
|
||||
metricsData: MetricItem[];
|
||||
conversionRate: number;
|
||||
} {
|
||||
const { startTime, endTime, steps } = useFunnelContext();
|
||||
const { startTime, endTime } = useFunnelContext();
|
||||
const payload = {
|
||||
start_time: startTime,
|
||||
end_time: endTime,
|
||||
steps,
|
||||
};
|
||||
|
||||
const {
|
||||
@@ -82,7 +81,6 @@ export function useFunnelStepsMetrics({
|
||||
end_time: endTime,
|
||||
step_start: stepStart,
|
||||
step_end: stepEnd,
|
||||
steps,
|
||||
};
|
||||
|
||||
const {
|
||||
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
FunnelOverviewResponse,
|
||||
FunnelStepsOverviewPayload,
|
||||
FunnelStepsOverviewResponse,
|
||||
FunnelStepsPayload,
|
||||
FunnelStepsResponse,
|
||||
getFunnelById,
|
||||
getFunnelErrorTraces,
|
||||
@@ -38,7 +37,6 @@ import {
|
||||
CreateFunnelPayload,
|
||||
CreateFunnelResponse,
|
||||
FunnelData,
|
||||
FunnelStepData,
|
||||
} from 'types/api/traceFunnels';
|
||||
|
||||
export const useFunnelsList = (): UseQueryResult<
|
||||
@@ -119,14 +117,12 @@ export const useValidateFunnelSteps = ({
|
||||
startTime,
|
||||
endTime,
|
||||
enabled,
|
||||
steps,
|
||||
}: {
|
||||
funnelId: string;
|
||||
selectedTime: string;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
enabled: boolean;
|
||||
steps: FunnelStepData[];
|
||||
}): UseQueryResult<
|
||||
SuccessResponse<ValidateFunnelResponse> | ErrorResponse,
|
||||
Error
|
||||
@@ -134,19 +130,11 @@ export const useValidateFunnelSteps = ({
|
||||
useQuery({
|
||||
queryFn: ({ signal }) =>
|
||||
validateFunnelSteps(
|
||||
{ start_time: startTime, end_time: endTime, steps },
|
||||
funnelId,
|
||||
{ start_time: startTime, end_time: endTime },
|
||||
signal,
|
||||
),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
steps.map((step) => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { latency_type, ...rest } = step;
|
||||
return rest;
|
||||
}),
|
||||
],
|
||||
queryKey: [REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS, funnelId, selectedTime],
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
});
|
||||
@@ -180,17 +168,18 @@ export const useFunnelOverview = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelOverview(payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelOverview(funnelId, payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.steps,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -201,19 +190,18 @@ export const useFunnelSlowTraces = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery<SuccessResponse<SlowTraceData> | ErrorResponse, Error>({
|
||||
queryFn: ({ signal }) => getFunnelSlowTraces(payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelSlowTraces(funnelId, payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_SLOW_TRACES,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -224,7 +212,7 @@ export const useFunnelErrorTraces = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelErrorTraces(funnelId, payload, signal),
|
||||
@@ -234,31 +222,35 @@ export const useFunnelErrorTraces = (
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
export function useFunnelStepsGraphData(
|
||||
funnelId: string,
|
||||
payload: FunnelStepsPayload,
|
||||
): UseQueryResult<SuccessResponse<FunnelStepsResponse> | ErrorResponse, Error> {
|
||||
const {
|
||||
startTime,
|
||||
endTime,
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelSteps(payload, signal),
|
||||
queryFn: ({ signal }) =>
|
||||
getFunnelSteps(
|
||||
funnelId,
|
||||
{ start_time: startTime, end_time: endTime },
|
||||
signal,
|
||||
),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_STEPS_GRAPH_DATA,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -272,18 +264,17 @@ export const useFunnelStepsOverview = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
isUpdatingFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelStepsOverview(payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelStepsOverview(funnelId, payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_STEPS_OVERVIEW,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -7,7 +7,6 @@ import NewWidget from 'container/NewWidget';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { generatePath, useLocation, useParams } from 'react-router-dom';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
@@ -53,13 +52,11 @@ function DashboardWidget(): JSX.Element | null {
|
||||
}
|
||||
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
yAxisUnit={selectedWidget?.yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
fillSpans={selectedWidget?.fillSpans}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
<NewWidget
|
||||
yAxisUnit={selectedWidget?.yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
fillSpans={selectedWidget?.fillSpans}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,14 +3,9 @@ import ROUTES from 'constants/routes';
|
||||
import InfraMonitoringHosts from 'container/InfraMonitoringHosts';
|
||||
import InfraMonitoringK8s from 'container/InfraMonitoringK8s';
|
||||
import { Inbox } from 'lucide-react';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
|
||||
export const Hosts: TabRoutes = {
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<InfraMonitoringHosts />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
Component: InfraMonitoringHosts,
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Inbox size={16} /> Hosts
|
||||
@@ -21,11 +16,7 @@ export const Hosts: TabRoutes = {
|
||||
};
|
||||
|
||||
export const Kubernetes: TabRoutes = {
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<InfraMonitoringK8s />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
Component: InfraMonitoringK8s,
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Inbox size={16} /> Kubernetes
|
||||
|
||||
@@ -10,7 +10,6 @@ import LogsFilters from 'container/LogsFilters';
|
||||
import LogsSearchFilter from 'container/LogsSearchFilter';
|
||||
import LogsTable from 'container/LogsTable';
|
||||
import history from 'lib/history';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -83,71 +82,69 @@ function OldLogsExplorer(): JSX.Element {
|
||||
};
|
||||
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<div className="old-logs-explorer">
|
||||
<SpaceContainer
|
||||
split={<Divider type="vertical" />}
|
||||
align="center"
|
||||
direction="horizontal"
|
||||
>
|
||||
<LogsSearchFilter />
|
||||
<LogLiveTail />
|
||||
</SpaceContainer>
|
||||
<div className="old-logs-explorer">
|
||||
<SpaceContainer
|
||||
split={<Divider type="vertical" />}
|
||||
align="center"
|
||||
direction="horizontal"
|
||||
>
|
||||
<LogsSearchFilter />
|
||||
<LogLiveTail />
|
||||
</SpaceContainer>
|
||||
|
||||
<LogsAggregate />
|
||||
<LogsAggregate />
|
||||
|
||||
<Row gutter={20} wrap={false}>
|
||||
<LogsFilters />
|
||||
<Col flex={1} className="logs-col-container">
|
||||
<Row>
|
||||
<Col flex={1}>
|
||||
<Space align="baseline" direction="horizontal">
|
||||
<Select
|
||||
<Row gutter={20} wrap={false}>
|
||||
<LogsFilters />
|
||||
<Col flex={1} className="logs-col-container">
|
||||
<Row>
|
||||
<Col flex={1}>
|
||||
<Space align="baseline" direction="horizontal">
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
value={selectedViewModeOption}
|
||||
onChange={onChangeVeiwMode}
|
||||
>
|
||||
{viewModeOptionList.map((option) => (
|
||||
<Select.Option key={option.value}>{option.label}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
|
||||
{isFormatButtonVisible && (
|
||||
<Popover
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
value={selectedViewModeOption}
|
||||
onChange={onChangeVeiwMode}
|
||||
placement="right"
|
||||
content={renderPopoverContent}
|
||||
>
|
||||
{viewModeOptionList.map((option) => (
|
||||
<Select.Option key={option.value}>{option.label}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
<Button>Format</Button>
|
||||
</Popover>
|
||||
)}
|
||||
|
||||
{isFormatButtonVisible && (
|
||||
<Popover
|
||||
getPopupContainer={popupContainer}
|
||||
placement="right"
|
||||
content={renderPopoverContent}
|
||||
>
|
||||
<Button>Format</Button>
|
||||
</Popover>
|
||||
)}
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
defaultValue={order}
|
||||
onChange={handleChangeOrder}
|
||||
>
|
||||
{orderItems.map((item) => (
|
||||
<Select.Option key={item.enum}>{item.name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
</Col>
|
||||
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
defaultValue={order}
|
||||
onChange={handleChangeOrder}
|
||||
>
|
||||
{orderItems.map((item) => (
|
||||
<Select.Option key={item.enum}>{item.name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
</Col>
|
||||
<Col>
|
||||
<LogControls />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<Col>
|
||||
<LogControls />
|
||||
</Col>
|
||||
</Row>
|
||||
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<LogDetailedView />
|
||||
</div>
|
||||
</PreferenceContextProvider>
|
||||
<LogDetailedView />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import NewDashboard from 'container/NewDashboard';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect } from 'react';
|
||||
import { ErrorType } from 'types/common';
|
||||
|
||||
@@ -36,11 +35,7 @@ function DashboardPage(): JSX.Element {
|
||||
return <Spinner tip="Loading.." />;
|
||||
}
|
||||
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<NewDashboard />
|
||||
</PreferenceContextProvider>
|
||||
);
|
||||
return <NewDashboard />;
|
||||
}
|
||||
|
||||
export default DashboardPage;
|
||||
|
||||
@@ -2,7 +2,6 @@ import './DeleteFunnelStep.styles.scss';
|
||||
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { Trash2, X } from 'lucide-react';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
|
||||
interface DeleteFunnelStepProps {
|
||||
isOpen: boolean;
|
||||
@@ -15,10 +14,8 @@ function DeleteFunnelStep({
|
||||
onClose,
|
||||
onStepRemove,
|
||||
}: DeleteFunnelStepProps): JSX.Element {
|
||||
const { handleRunFunnel } = useFunnelContext();
|
||||
const handleStepRemoval = (): void => {
|
||||
onStepRemove();
|
||||
handleRunFunnel();
|
||||
onClose();
|
||||
};
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import useFunnelConfiguration from 'hooks/TracesFunnels/useFunnelConfiguration';
|
||||
import { PencilLine } from 'lucide-react';
|
||||
import FunnelItemPopover from 'pages/TracesFunnels/components/FunnelsList/FunnelItemPopover';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import CopyToClipboard from 'periscope/components/CopyToClipboard';
|
||||
import { memo, useState } from 'react';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
@@ -22,6 +21,7 @@ interface FunnelConfigurationProps {
|
||||
funnel: FunnelData;
|
||||
isTraceDetailsPage?: boolean;
|
||||
span?: Span;
|
||||
disableAutoSave?: boolean;
|
||||
triggerAutoSave?: boolean;
|
||||
showNotifications?: boolean;
|
||||
}
|
||||
@@ -30,19 +30,15 @@ function FunnelConfiguration({
|
||||
funnel,
|
||||
isTraceDetailsPage,
|
||||
span,
|
||||
disableAutoSave,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
}: FunnelConfigurationProps): JSX.Element {
|
||||
const { triggerSave } = useFunnelContext();
|
||||
const {
|
||||
isPopoverOpen,
|
||||
setIsPopoverOpen,
|
||||
steps,
|
||||
isSaving,
|
||||
} = useFunnelConfiguration({
|
||||
const { isPopoverOpen, setIsPopoverOpen, steps } = useFunnelConfiguration({
|
||||
funnel,
|
||||
triggerAutoSave: triggerAutoSave || triggerSave,
|
||||
showNotifications: showNotifications || triggerSave,
|
||||
disableAutoSave,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
});
|
||||
const [isDescriptionModalOpen, setIsDescriptionModalOpen] = useState<boolean>(
|
||||
false,
|
||||
@@ -110,7 +106,7 @@ function FunnelConfiguration({
|
||||
|
||||
{!isTraceDetailsPage && (
|
||||
<>
|
||||
<StepsFooter stepsCount={steps.length} isSaving={isSaving || false} />
|
||||
<StepsFooter stepsCount={steps.length} />
|
||||
<AddFunnelDescriptionModal
|
||||
isOpen={isDescriptionModalOpen}
|
||||
onClose={handleDescriptionModalClose}
|
||||
@@ -126,6 +122,7 @@ function FunnelConfiguration({
|
||||
FunnelConfiguration.defaultProps = {
|
||||
isTraceDetailsPage: false,
|
||||
span: undefined,
|
||||
disableAutoSave: false,
|
||||
triggerAutoSave: false,
|
||||
showNotifications: false,
|
||||
};
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
color: var(--bg-vanilla-400);
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-radius: 6px;
|
||||
width: 100%;
|
||||
.step-popover {
|
||||
opacity: 0;
|
||||
width: 22px;
|
||||
|
||||
@@ -40,6 +40,11 @@
|
||||
letter-spacing: 0.12px;
|
||||
border-radius: 2px;
|
||||
|
||||
&--sync {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
&--run {
|
||||
background-color: var(--bg-robin-500);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,53 @@
|
||||
import './StepsFooter.styles.scss';
|
||||
|
||||
import { Button, Skeleton } from 'antd';
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Button, Skeleton, Spin } from 'antd';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Check, Cone } from 'lucide-react';
|
||||
import { Cone, Play, RefreshCcw } from 'lucide-react';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useIsMutating } from 'react-query';
|
||||
import { useMemo } from 'react';
|
||||
import { useIsFetching, useIsMutating } from 'react-query';
|
||||
|
||||
const useFunnelResultsLoading = (): boolean => {
|
||||
const { funnelId } = useFunnelContext();
|
||||
|
||||
const isFetchingFunnelOverview = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingStepsGraphData = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_STEPS_GRAPH_DATA, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingErrorTraces = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_ERROR_TRACES, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingSlowTraces = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_SLOW_TRACES, funnelId],
|
||||
});
|
||||
|
||||
return useMemo(() => {
|
||||
if (!funnelId) {
|
||||
return false;
|
||||
}
|
||||
return (
|
||||
!!isFetchingFunnelOverview ||
|
||||
!!isFetchingStepsGraphData ||
|
||||
!!isFetchingErrorTraces ||
|
||||
!!isFetchingSlowTraces
|
||||
);
|
||||
}, [
|
||||
funnelId,
|
||||
isFetchingFunnelOverview,
|
||||
isFetchingStepsGraphData,
|
||||
isFetchingErrorTraces,
|
||||
isFetchingSlowTraces,
|
||||
]);
|
||||
};
|
||||
|
||||
interface StepsFooterProps {
|
||||
stepsCount: number;
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
function ValidTracesCount(): JSX.Element {
|
||||
@@ -54,13 +93,21 @@ function ValidTracesCount(): JSX.Element {
|
||||
return <span className="steps-footer__valid-traces">Valid traces found</span>;
|
||||
}
|
||||
|
||||
function StepsFooter({ stepsCount, isSaving }: StepsFooterProps): JSX.Element {
|
||||
function StepsFooter({ stepsCount }: StepsFooterProps): JSX.Element {
|
||||
const {
|
||||
hasIncompleteStepFields,
|
||||
handleSaveFunnel,
|
||||
hasUnsavedChanges,
|
||||
validTracesCount,
|
||||
handleRunFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
funnelId,
|
||||
} = useFunnelContext();
|
||||
|
||||
const isFunnelResultsLoading = useFunnelResultsLoading();
|
||||
|
||||
const isFunnelUpdateMutating = useIsMutating([
|
||||
REACT_QUERY_KEY.UPDATE_FUNNEL_STEPS,
|
||||
funnelId,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="steps-footer">
|
||||
<div className="steps-footer__left">
|
||||
@@ -70,16 +117,38 @@ function StepsFooter({ stepsCount, isSaving }: StepsFooterProps): JSX.Element {
|
||||
<ValidTracesCount />
|
||||
</div>
|
||||
<div className="steps-footer__right">
|
||||
<Button
|
||||
disabled={hasIncompleteStepFields || !hasUnsavedChanges}
|
||||
onClick={handleSaveFunnel}
|
||||
type="primary"
|
||||
className="steps-footer__button steps-footer__button--run"
|
||||
icon={<Check size={14} />}
|
||||
loading={isSaving}
|
||||
>
|
||||
Save funnel
|
||||
</Button>
|
||||
{!!isFunnelUpdateMutating && (
|
||||
<div className="steps-footer__button steps-footer__button--updating">
|
||||
<Spin
|
||||
indicator={<LoadingOutlined style={{ color: 'grey' }} />}
|
||||
size="small"
|
||||
/>
|
||||
Updating
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!hasFunnelBeenExecuted ? (
|
||||
<Button
|
||||
disabled={validTracesCount === 0}
|
||||
onClick={handleRunFunnel}
|
||||
type="primary"
|
||||
className="steps-footer__button steps-footer__button--run"
|
||||
icon={<Play size={16} />}
|
||||
>
|
||||
Run funnel
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
type="text"
|
||||
className="steps-footer__button steps-footer__button--sync"
|
||||
icon={<RefreshCcw size={16} />}
|
||||
onClick={handleRunFunnel}
|
||||
loading={isFunnelResultsLoading}
|
||||
disabled={validTracesCount === 0}
|
||||
>
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -29,20 +29,13 @@ Chart.register(
|
||||
);
|
||||
|
||||
function FunnelGraph(): JSX.Element {
|
||||
const { funnelId, startTime, endTime, steps } = useFunnelContext();
|
||||
|
||||
const payload = {
|
||||
start_time: startTime,
|
||||
end_time: endTime,
|
||||
steps,
|
||||
};
|
||||
|
||||
const { funnelId } = useFunnelContext();
|
||||
const {
|
||||
data: stepsData,
|
||||
isLoading,
|
||||
isFetching,
|
||||
isError,
|
||||
} = useFunnelStepsGraphData(funnelId, payload);
|
||||
} = useFunnelStepsGraphData(funnelId);
|
||||
|
||||
const data = useMemo(() => stepsData?.payload?.data?.[0]?.data, [
|
||||
stepsData?.payload?.data,
|
||||
|
||||
@@ -16,6 +16,7 @@ function FunnelResults(): JSX.Element {
|
||||
isValidateStepsLoading,
|
||||
hasIncompleteStepFields,
|
||||
hasAllEmptyStepFields,
|
||||
hasFunnelBeenExecuted,
|
||||
funnelId,
|
||||
} = useFunnelContext();
|
||||
|
||||
@@ -46,6 +47,14 @@ function FunnelResults(): JSX.Element {
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (!hasFunnelBeenExecuted) {
|
||||
return (
|
||||
<EmptyFunnelResults
|
||||
title="Funnel has not been run yet."
|
||||
description="Run the funnel to see the results"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="funnel-results">
|
||||
|
||||
@@ -7,7 +7,6 @@ import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTable from './FunnelTable';
|
||||
import { topTracesTableColumns } from './utils';
|
||||
@@ -25,7 +24,6 @@ interface FunnelTopTracesTableProps {
|
||||
SuccessResponse<SlowTraceData | ErrorTraceData> | ErrorResponse,
|
||||
Error
|
||||
>;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function FunnelTopTracesTable({
|
||||
@@ -34,7 +32,6 @@ function FunnelTopTracesTable({
|
||||
stepBOrder,
|
||||
title,
|
||||
tooltip,
|
||||
steps,
|
||||
useQueryHook,
|
||||
}: FunnelTopTracesTableProps): JSX.Element {
|
||||
const { startTime, endTime } = useFunnelContext();
|
||||
@@ -44,9 +41,8 @@ function FunnelTopTracesTable({
|
||||
end_time: endTime,
|
||||
step_start: stepAOrder,
|
||||
step_end: stepBOrder,
|
||||
steps,
|
||||
}),
|
||||
[startTime, endTime, stepAOrder, stepBOrder, steps],
|
||||
[startTime, endTime, stepAOrder, stepBOrder],
|
||||
);
|
||||
|
||||
const { data: response, isLoading, isFetching } = useQueryHook(
|
||||
|
||||
@@ -6,7 +6,7 @@ import FunnelMetricsTable from './FunnelMetricsTable';
|
||||
function OverallMetrics(): JSX.Element {
|
||||
const { funnelId } = useParams<{ funnelId: string }>();
|
||||
const { isLoading, metricsData, conversionRate, isError } = useFunnelMetrics({
|
||||
funnelId,
|
||||
funnelId: funnelId || '',
|
||||
});
|
||||
|
||||
return (
|
||||
|
||||
@@ -52,13 +52,11 @@ function StepsTransitionResults(): JSX.Element {
|
||||
funnelId={funnelId}
|
||||
stepAOrder={stepAOrder}
|
||||
stepBOrder={stepBOrder}
|
||||
steps={steps}
|
||||
/>
|
||||
<TopTracesWithErrors
|
||||
funnelId={funnelId}
|
||||
stepAOrder={stepAOrder}
|
||||
stepBOrder={stepBOrder}
|
||||
steps={steps}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useFunnelSlowTraces } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTopTracesTable from './FunnelTopTracesTable';
|
||||
|
||||
@@ -7,7 +6,6 @@ interface TopSlowestTracesProps {
|
||||
funnelId: string;
|
||||
stepAOrder: number;
|
||||
stepBOrder: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function TopSlowestTraces(props: TopSlowestTracesProps): JSX.Element {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useFunnelErrorTraces } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTopTracesTable from './FunnelTopTracesTable';
|
||||
|
||||
@@ -7,7 +6,6 @@ interface TopTracesWithErrorsProps {
|
||||
funnelId: string;
|
||||
stepAOrder: number;
|
||||
stepBOrder: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function TopTracesWithErrors(props: TopTracesWithErrorsProps): JSX.Element {
|
||||
|
||||
@@ -18,4 +18,10 @@ export const topTracesTableColumns = [
|
||||
key: 'duration_ms',
|
||||
render: (value: string): string => getYAxisFormattedValue(value, 'ms'),
|
||||
},
|
||||
{
|
||||
title: 'SPAN COUNT',
|
||||
dataIndex: 'span_count',
|
||||
key: 'span_count',
|
||||
render: (value: number): string => value.toString(),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -14,6 +14,8 @@ export const initialStepsData: FunnelStepData[] = [
|
||||
latency_pointer: 'start',
|
||||
latency_type: undefined,
|
||||
has_errors: false,
|
||||
name: '',
|
||||
description: '',
|
||||
},
|
||||
{
|
||||
id: v4(),
|
||||
@@ -27,6 +29,8 @@ export const initialStepsData: FunnelStepData[] = [
|
||||
latency_pointer: 'start',
|
||||
latency_type: LatencyOptions.P95,
|
||||
has_errors: false,
|
||||
name: '',
|
||||
description: '',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ValidateFunnelResponse } from 'api/traceFunnels';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Time } from 'container/TopNav/DateTimeSelection/config';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time as TimeV2,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { normalizeSteps } from 'hooks/TracesFunnels/useFunnelConfiguration';
|
||||
import { useValidateFunnelSteps } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { initialStepsData } from 'pages/TracesFunnelDetails/constants';
|
||||
import {
|
||||
createContext,
|
||||
@@ -41,9 +41,6 @@ interface FunnelContextType {
|
||||
handleStepChange: (index: number, newStep: Partial<FunnelStepData>) => void;
|
||||
handleStepRemoval: (index: number) => void;
|
||||
handleRunFunnel: () => void;
|
||||
handleSaveFunnel: () => void;
|
||||
triggerSave: boolean;
|
||||
hasUnsavedChanges: boolean;
|
||||
validationResponse:
|
||||
| SuccessResponse<ValidateFunnelResponse>
|
||||
| ErrorResponse
|
||||
@@ -57,10 +54,8 @@ interface FunnelContextType {
|
||||
spanName: string,
|
||||
) => void;
|
||||
handleRestoreSteps: (oldSteps: FunnelStepData[]) => void;
|
||||
isUpdatingFunnel: boolean;
|
||||
setIsUpdatingFunnel: Dispatch<SetStateAction<boolean>>;
|
||||
lastUpdatedSteps: FunnelStepData[];
|
||||
setLastUpdatedSteps: Dispatch<SetStateAction<FunnelStepData[]>>;
|
||||
hasFunnelBeenExecuted: boolean;
|
||||
setHasFunnelBeenExecuted: Dispatch<SetStateAction<boolean>>;
|
||||
}
|
||||
|
||||
const FunnelContext = createContext<FunnelContextType | undefined>(undefined);
|
||||
@@ -91,19 +86,6 @@ export function FunnelProvider({
|
||||
const funnel = data?.payload;
|
||||
const initialSteps = funnel?.steps?.length ? funnel.steps : initialStepsData;
|
||||
const [steps, setSteps] = useState<FunnelStepData[]>(initialSteps);
|
||||
const [triggerSave, setTriggerSave] = useState<boolean>(false);
|
||||
const [isUpdatingFunnel, setIsUpdatingFunnel] = useState<boolean>(false);
|
||||
const [lastUpdatedSteps, setLastUpdatedSteps] = useState<FunnelStepData[]>(
|
||||
initialSteps,
|
||||
);
|
||||
|
||||
// Check if there are unsaved changes by comparing with initial steps from API
|
||||
const hasUnsavedChanges = useMemo(() => {
|
||||
const normalizedCurrentSteps = normalizeSteps(steps);
|
||||
const normalizedInitialSteps = normalizeSteps(lastUpdatedSteps);
|
||||
return !isEqual(normalizedCurrentSteps, normalizedInitialSteps);
|
||||
}, [steps, lastUpdatedSteps]);
|
||||
|
||||
const { hasIncompleteStepFields, hasAllEmptyStepFields } = useMemo(
|
||||
() => ({
|
||||
hasAllEmptyStepFields: steps.every(
|
||||
@@ -116,6 +98,15 @@ export function FunnelProvider({
|
||||
[steps],
|
||||
);
|
||||
|
||||
const [unexecutedFunnels, setUnexecutedFunnels] = useLocalStorage<string[]>(
|
||||
LOCALSTORAGE.UNEXECUTED_FUNNELS,
|
||||
[],
|
||||
);
|
||||
|
||||
const [hasFunnelBeenExecuted, setHasFunnelBeenExecuted] = useState(
|
||||
!unexecutedFunnels.includes(funnelId),
|
||||
);
|
||||
|
||||
const {
|
||||
data: validationResponse,
|
||||
isLoading: isValidationLoading,
|
||||
@@ -125,13 +116,7 @@ export function FunnelProvider({
|
||||
selectedTime,
|
||||
startTime,
|
||||
endTime,
|
||||
enabled:
|
||||
!!funnelId &&
|
||||
!!selectedTime &&
|
||||
!!startTime &&
|
||||
!!endTime &&
|
||||
!hasIncompleteStepFields,
|
||||
steps,
|
||||
enabled: !!funnelId && !!selectedTime && !!startTime && !!endTime,
|
||||
});
|
||||
|
||||
const validTracesCount = useMemo(
|
||||
@@ -200,7 +185,11 @@ export function FunnelProvider({
|
||||
|
||||
const handleRunFunnel = useCallback(async (): Promise<void> => {
|
||||
if (validTracesCount === 0) return;
|
||||
if (!hasFunnelBeenExecuted) {
|
||||
setUnexecutedFunnels(unexecutedFunnels.filter((id) => id !== funnelId));
|
||||
|
||||
setHasFunnelBeenExecuted(true);
|
||||
}
|
||||
queryClient.refetchQueries([
|
||||
REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW,
|
||||
funnelId,
|
||||
@@ -226,13 +215,15 @@ export function FunnelProvider({
|
||||
funnelId,
|
||||
selectedTime,
|
||||
]);
|
||||
}, [funnelId, queryClient, selectedTime, validTracesCount]);
|
||||
|
||||
const handleSaveFunnel = useCallback(() => {
|
||||
setTriggerSave(true);
|
||||
// Reset the trigger after a brief moment to allow useFunnelConfiguration to pick it up
|
||||
setTimeout(() => setTriggerSave(false), 100);
|
||||
}, []);
|
||||
}, [
|
||||
funnelId,
|
||||
hasFunnelBeenExecuted,
|
||||
unexecutedFunnels,
|
||||
queryClient,
|
||||
selectedTime,
|
||||
setUnexecutedFunnels,
|
||||
validTracesCount,
|
||||
]);
|
||||
|
||||
const value = useMemo<FunnelContextType>(
|
||||
() => ({
|
||||
@@ -248,19 +239,14 @@ export function FunnelProvider({
|
||||
handleAddStep: addNewStep,
|
||||
handleStepRemoval,
|
||||
handleRunFunnel,
|
||||
handleSaveFunnel,
|
||||
triggerSave,
|
||||
validationResponse,
|
||||
isValidateStepsLoading: isValidationLoading || isValidationFetching,
|
||||
hasIncompleteStepFields,
|
||||
hasAllEmptyStepFields,
|
||||
handleReplaceStep,
|
||||
handleRestoreSteps,
|
||||
hasUnsavedChanges,
|
||||
setIsUpdatingFunnel,
|
||||
isUpdatingFunnel,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
hasFunnelBeenExecuted,
|
||||
setHasFunnelBeenExecuted,
|
||||
}),
|
||||
[
|
||||
funnelId,
|
||||
@@ -274,8 +260,6 @@ export function FunnelProvider({
|
||||
addNewStep,
|
||||
handleStepRemoval,
|
||||
handleRunFunnel,
|
||||
handleSaveFunnel,
|
||||
triggerSave,
|
||||
validationResponse,
|
||||
isValidationLoading,
|
||||
isValidationFetching,
|
||||
@@ -283,11 +267,8 @@ export function FunnelProvider({
|
||||
hasAllEmptyStepFields,
|
||||
handleReplaceStep,
|
||||
handleRestoreSteps,
|
||||
hasUnsavedChanges,
|
||||
setIsUpdatingFunnel,
|
||||
isUpdatingFunnel,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
hasFunnelBeenExecuted,
|
||||
setHasFunnelBeenExecuted,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -4,9 +4,11 @@ import { Input } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { AxiosError } from 'axios';
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useCreateFunnel } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { Check, X } from 'lucide-react';
|
||||
@@ -32,6 +34,11 @@ function CreateFunnel({
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const [unexecutedFunnels, setUnexecutedFunnels] = useLocalStorage<string[]>(
|
||||
LOCALSTORAGE.UNEXECUTED_FUNNELS,
|
||||
[],
|
||||
);
|
||||
|
||||
const handleCreate = (): void => {
|
||||
createFunnelMutation.mutate(
|
||||
{
|
||||
@@ -54,6 +61,9 @@ function CreateFunnel({
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_FUNNELS_LIST]);
|
||||
|
||||
const funnelId = data?.payload?.funnel_id;
|
||||
if (funnelId) {
|
||||
setUnexecutedFunnels([...unexecutedFunnels, funnelId]);
|
||||
}
|
||||
|
||||
onClose(funnelId);
|
||||
if (funnelId && redirectToDetails) {
|
||||
|
||||
@@ -2,16 +2,13 @@ import '../RenameFunnel/RenameFunnel.styles.scss';
|
||||
import './DeleteFunnel.styles.scss';
|
||||
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useDeleteFunnel } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Trash2, X } from 'lucide-react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
interface DeleteFunnelProps {
|
||||
isOpen: boolean;
|
||||
@@ -32,13 +29,6 @@ function DeleteFunnel({
|
||||
|
||||
const history = useHistory();
|
||||
const { pathname } = history.location;
|
||||
|
||||
// localStorage hook for funnel steps
|
||||
const localStorageKey = `${LOCALSTORAGE.FUNNEL_STEPS}_${funnelId}`;
|
||||
const [, , clearLocalStorageSavedSteps] = useLocalStorage<
|
||||
FunnelStepData[] | null
|
||||
>(localStorageKey, null);
|
||||
|
||||
const handleDelete = (): void => {
|
||||
deleteFunnelMutation.mutate(
|
||||
{
|
||||
@@ -49,7 +39,6 @@ function DeleteFunnel({
|
||||
notifications.success({
|
||||
message: 'Funnel deleted successfully',
|
||||
});
|
||||
clearLocalStorageSavedSteps();
|
||||
onClose();
|
||||
|
||||
if (
|
||||
|
||||
12
go.mod
12
go.mod
@@ -8,7 +8,7 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
@@ -69,8 +69,8 @@ require (
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.38.0
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0
|
||||
golang.org/x/oauth2 v0.26.0
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842
|
||||
golang.org/x/oauth2 v0.24.0
|
||||
golang.org/x/sync v0.14.0
|
||||
golang.org/x/text v0.25.0
|
||||
google.golang.org/protobuf v1.36.0
|
||||
@@ -125,7 +125,7 @@ require (
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/go-openapi/validate v0.24.0 // indirect
|
||||
github.com/goccy/go-json v0.10.4 // indirect
|
||||
github.com/goccy/go-json v0.10.3 // indirect
|
||||
github.com/gofrs/uuid v4.4.0+incompatible // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf // indirect
|
||||
@@ -182,7 +182,7 @@ require (
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.111.0 // indirect
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.21 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c // indirect
|
||||
@@ -267,7 +267,7 @@ require (
|
||||
golang.org/x/net v0.40.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/time v0.8.0 // indirect
|
||||
golang.org/x/tools v0.29.0 // indirect
|
||||
golang.org/x/tools v0.28.0 // indirect
|
||||
gonum.org/v1/gonum v0.15.1 // indirect
|
||||
google.golang.org/api v0.213.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20241216192217-9240e9c98484 // indirect
|
||||
|
||||
24
go.sum
24
go.sum
@@ -100,8 +100,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43 h1:upWUoxDl5kCE/WI5+di2oqA/wJi2NU/PRyN8zDR078c=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43/go.mod h1:iUGoKEaNQmLNptTwEz9o5kZ0ntbCMQsrV53Y2TDd1Qg=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39 h1:Dl8QqZNAsj2atxP572OzsszPK0XPpd3LLPNPRAUJ5wo=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
@@ -322,8 +322,8 @@ github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpG
|
||||
github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/go-zookeeper/zk v1.0.4 h1:DPzxraQx7OrPyXq2phlGlNSIyWEsAox0RJmjTseMV6I=
|
||||
github.com/go-zookeeper/zk v1.0.4/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
|
||||
github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
|
||||
github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
|
||||
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA=
|
||||
@@ -758,8 +758,8 @@ github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3v
|
||||
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
|
||||
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
@@ -1138,8 +1138,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -1241,8 +1241,8 @@ golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ
|
||||
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE=
|
||||
golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -1425,8 +1425,8 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
|
||||
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
|
||||
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
|
||||
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
||||
@@ -75,15 +75,17 @@ comparison
|
||||
| key NOT CONTAINS value
|
||||
;
|
||||
|
||||
// in(...) or in[...]
|
||||
// in(...) or in[...] - now also supports variables
|
||||
inClause
|
||||
: IN LPAREN valueList RPAREN
|
||||
| IN LBRACK valueList RBRACK
|
||||
| IN variable // NEW: support for IN $var, IN {{var}}, IN [[var]]
|
||||
;
|
||||
|
||||
notInClause
|
||||
: NOT IN LPAREN valueList RPAREN
|
||||
| NOT IN LBRACK valueList RBRACK
|
||||
| NOT IN variable // NEW: support for NOT IN $var, etc.
|
||||
;
|
||||
|
||||
// List of values for in(...) or in[...]
|
||||
@@ -126,13 +128,21 @@ array
|
||||
|
||||
/*
|
||||
* A 'value' can be a string literal (double or single-quoted),
|
||||
// a numeric literal, boolean, or a "bare" token as needed.
|
||||
// a numeric literal, boolean, a "bare" token, or a variable.
|
||||
*/
|
||||
value
|
||||
: QUOTED_TEXT
|
||||
| NUMBER
|
||||
| BOOL
|
||||
| KEY
|
||||
| variable // NEW: variables can be used as values
|
||||
;
|
||||
|
||||
// NEW: Variable rule to support different variable syntaxes
|
||||
variable
|
||||
: DOLLAR_VAR
|
||||
| CURLY_VAR
|
||||
| SQUARE_VAR
|
||||
;
|
||||
|
||||
/*
|
||||
@@ -190,6 +200,11 @@ BOOL
|
||||
| [Ff][Aa][Ll][Ss][Ee]
|
||||
;
|
||||
|
||||
// NEW: Variable token types
|
||||
DOLLAR_VAR : '$' [a-zA-Z_] [a-zA-Z0-9._]* ;
|
||||
CURLY_VAR : '{{' [ \t]* '.'? [a-zA-Z_] [a-zA-Z0-9._]* [ \t]* '}}' ;
|
||||
SQUARE_VAR : '[[' [ \t]* '.'? [a-zA-Z_] [a-zA-Z0-9._]* [ \t]* ']]' ;
|
||||
|
||||
fragment SIGN : [+-] ;
|
||||
|
||||
// Numbers: optional sign, then digits, optional fractional part,
|
||||
|
||||
@@ -12,16 +12,4 @@ type Analytics interface {
|
||||
|
||||
// Sends analytics messages to an analytics backend.
|
||||
Send(context.Context, ...analyticstypes.Message)
|
||||
|
||||
// Tracks an event on a group level. Input is group, event name, and attributes. The user is "stats_<org_id>".
|
||||
TrackGroup(context.Context, string, string, map[string]any)
|
||||
|
||||
// Tracks an event on a user level and attributes it with the group. Input is group, user, event name, and attributes.
|
||||
TrackUser(context.Context, string, string, string, map[string]any)
|
||||
|
||||
// Identifies a group. Input is group, traits.
|
||||
IdentifyGroup(context.Context, string, map[string]any)
|
||||
|
||||
// Identifies a user. Input is group, user, traits.
|
||||
IdentifyUser(context.Context, string, string, map[string]any)
|
||||
}
|
||||
|
||||
@@ -24,18 +24,6 @@ func (provider *Provider) Start(_ context.Context) error {
|
||||
|
||||
func (provider *Provider) Send(ctx context.Context, messages ...analyticstypes.Message) {}
|
||||
|
||||
func (provider *Provider) TrackGroup(ctx context.Context, group, event string, attributes map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) TrackUser(ctx context.Context, group, user, event string, attributes map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) Stop(_ context.Context) error {
|
||||
close(provider.stopC)
|
||||
return nil
|
||||
|
||||
@@ -27,25 +27,7 @@ func (provider *provider) Start(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.Message) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) TrackGroup(ctx context.Context, group, event string, attributes map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) TrackUser(ctx context.Context, group, user, event string, attributes map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.Message) {}
|
||||
|
||||
func (provider *provider) Stop(_ context.Context) error {
|
||||
close(provider.stopC)
|
||||
|
||||
@@ -50,100 +50,6 @@ func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.M
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) TrackGroup(ctx context.Context, group, event string, properties map[string]any) {
|
||||
if properties == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping event", "group", group, "event", event)
|
||||
return
|
||||
}
|
||||
|
||||
err := provider.client.Enqueue(analyticstypes.Track{
|
||||
UserId: "stats_" + group,
|
||||
Event: event,
|
||||
Properties: analyticstypes.NewPropertiesFromMap(properties),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: group,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) TrackUser(ctx context.Context, group, user, event string, properties map[string]any) {
|
||||
if properties == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping event", "user", user, "group", group, "event", event)
|
||||
return
|
||||
}
|
||||
|
||||
err := provider.client.Enqueue(analyticstypes.Track{
|
||||
UserId: user,
|
||||
Event: event,
|
||||
Properties: analyticstypes.NewPropertiesFromMap(properties),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: group,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
if traits == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping identify", "group", group)
|
||||
return
|
||||
}
|
||||
|
||||
// identify the user
|
||||
err := provider.client.Enqueue(analyticstypes.Identify{
|
||||
UserId: "stats_" + group,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
|
||||
// identify the group using the stats user
|
||||
err = provider.client.Enqueue(analyticstypes.Group{
|
||||
UserId: "stats_" + group,
|
||||
GroupId: group,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
if traits == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping identify", "user", user, "group", group)
|
||||
return
|
||||
}
|
||||
|
||||
// identify the user
|
||||
err := provider.client.Enqueue(analyticstypes.Identify{
|
||||
UserId: user,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
|
||||
// associate the user with the group
|
||||
err = provider.client.Enqueue(analyticstypes.Group{
|
||||
UserId: user,
|
||||
GroupId: group,
|
||||
Traits: analyticstypes.NewTraits().Set("id", group), // A trait is required
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
if err := provider.client.Close(); err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to close segment client", "err", err)
|
||||
|
||||
@@ -70,7 +70,7 @@ func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, er
|
||||
}
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("name")
|
||||
name := r.URL.Query().Get("searchText")
|
||||
|
||||
req = telemetrytypes.FieldKeySelector{
|
||||
StartUnixMilli: startUnixMilli,
|
||||
@@ -92,8 +92,10 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("name")
|
||||
keySelector.Name = name
|
||||
existingQuery := r.URL.Query().Get("existingQuery")
|
||||
value := r.URL.Query().Get("value")
|
||||
value := r.URL.Query().Get("searchText")
|
||||
|
||||
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -44,7 +45,19 @@ func (module *module) Create(ctx context.Context, orgID valuer.UUID, createdBy s
|
||||
return nil, err
|
||||
}
|
||||
|
||||
module.analytics.TrackUser(ctx, orgID.String(), creator.String(), "Dashboard Created", dashboardtypes.NewStatsFromStorableDashboards([]*dashboardtypes.StorableDashboard{storableDashboard}))
|
||||
module.analytics.Send(ctx,
|
||||
analyticstypes.Track{
|
||||
UserId: creator.String(),
|
||||
Event: "Dashboard Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(dashboardtypes.NewStatsFromStorableDashboards([]*dashboardtypes.StorableDashboard{storableDashboard})),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: orgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return dashboard, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type getter struct {
|
||||
store types.UserStore
|
||||
}
|
||||
|
||||
func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
gettableUsers, err := module.store.ListUsers(ctx, orgID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users := make([]*types.User, len(gettableUsers))
|
||||
for i, user := range gettableUsers {
|
||||
users[i] = &user.User
|
||||
}
|
||||
|
||||
return users, nil
|
||||
}
|
||||
@@ -326,7 +326,7 @@ func (h *handler) UpdateUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
user.UpdatedAt = time.Now()
|
||||
|
||||
updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user, claims.UserID)
|
||||
updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
@@ -347,7 +347,7 @@ func (h *handler) DeleteUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.module.DeleteUser(ctx, claims.OrgID, id, claims.UserID); err != nil {
|
||||
if err := h.module.DeleteUser(ctx, claims.OrgID, id); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/emailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -134,9 +135,35 @@ func (m *Module) CreateUserWithPassword(ctx context.Context, user *types.User, p
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traitsOrProperties := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traitsOrProperties)
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Created", traitsOrProperties)
|
||||
m.analytics.Send(ctx,
|
||||
analyticstypes.Identify{
|
||||
UserId: user.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraits().
|
||||
SetName(user.DisplayName).
|
||||
SetEmail(user.Email).
|
||||
Set("role", user.Role).
|
||||
SetCreatedAt(user.CreatedAt),
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: user.ID.String(),
|
||||
GroupId: user.OrgID,
|
||||
},
|
||||
analyticstypes.Track{
|
||||
UserId: user.ID.String(),
|
||||
Event: "User Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(map[string]any{
|
||||
"role": user.Role,
|
||||
"email": user.Email,
|
||||
"name": user.DisplayName,
|
||||
}),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: user.OrgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return user, nil
|
||||
}
|
||||
@@ -146,9 +173,35 @@ func (m *Module) CreateUser(ctx context.Context, user *types.User) error {
|
||||
return err
|
||||
}
|
||||
|
||||
traitsOrProperties := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traitsOrProperties)
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Created", traitsOrProperties)
|
||||
m.analytics.Send(ctx,
|
||||
analyticstypes.Identify{
|
||||
UserId: user.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraits().
|
||||
SetName(user.DisplayName).
|
||||
SetEmail(user.Email).
|
||||
Set("role", user.Role).
|
||||
SetCreatedAt(user.CreatedAt),
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: user.ID.String(),
|
||||
GroupId: user.OrgID,
|
||||
},
|
||||
analyticstypes.Track{
|
||||
UserId: user.ID.String(),
|
||||
Event: "User Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(map[string]any{
|
||||
"role": user.Role,
|
||||
"email": user.Email,
|
||||
"name": user.DisplayName,
|
||||
}),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: user.OrgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -173,22 +226,11 @@ func (m *Module) ListUsers(ctx context.Context, orgID string) ([]*types.Gettable
|
||||
return m.store.ListUsers(ctx, orgID)
|
||||
}
|
||||
|
||||
func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error) {
|
||||
user, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traits)
|
||||
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Updated", traits)
|
||||
|
||||
return user, nil
|
||||
func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error) {
|
||||
return m.store.UpdateUser(ctx, orgID, id, user)
|
||||
}
|
||||
|
||||
func (m *Module) DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error {
|
||||
func (m *Module) DeleteUser(ctx context.Context, orgID string, id string) error {
|
||||
user, err := m.store.GetUserByID(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -208,15 +250,7 @@ func (m *Module) DeleteUser(ctx context.Context, orgID string, id string, delete
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
}
|
||||
|
||||
if err := m.store.DeleteUser(ctx, orgID, user.ID.StringValue()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Deleted", map[string]any{
|
||||
"deleted_by": deletedBy,
|
||||
})
|
||||
|
||||
return nil
|
||||
return m.store.DeleteUser(ctx, orgID, user.ID.StringValue())
|
||||
}
|
||||
|
||||
func (m *Module) CreateResetPasswordToken(ctx context.Context, userID string) (*types.ResetPasswordRequest, error) {
|
||||
@@ -610,16 +644,10 @@ func (m *Module) Register(ctx context.Context, req *types.PostableRegisterOrgAnd
|
||||
}
|
||||
|
||||
func (m *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
stats := make(map[string]any)
|
||||
count, err := m.store.CountByOrgID(ctx, orgID)
|
||||
if err == nil {
|
||||
stats["user.count"] = count
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
count, err = m.store.CountAPIKeyByOrgID(ctx, orgID)
|
||||
if err == nil {
|
||||
stats["factor.api_key.count"] = count
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
return map[string]any{"user.count": count}, nil
|
||||
}
|
||||
|
||||
@@ -826,21 +826,3 @@ func (store *store) CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64,
|
||||
|
||||
return int64(count), nil
|
||||
}
|
||||
|
||||
func (store *store) CountAPIKeyByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) {
|
||||
apiKey := new(types.StorableAPIKey)
|
||||
|
||||
count, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(apiKey).
|
||||
Join("JOIN users ON users.id = storable_api_key.user_id").
|
||||
Where("org_id = ?", orgID).
|
||||
Count(ctx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return int64(count), nil
|
||||
}
|
||||
|
||||
@@ -28,8 +28,8 @@ type Module interface {
|
||||
GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error)
|
||||
GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error)
|
||||
ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error)
|
||||
UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error
|
||||
UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error)
|
||||
DeleteUser(ctx context.Context, orgID string, id string) error
|
||||
|
||||
// login
|
||||
GetAuthenticatedUser(ctx context.Context, orgID, email, password, refreshToken string) (*types.User, error)
|
||||
@@ -70,11 +70,6 @@ type Module interface {
|
||||
statsreporter.StatsCollector
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
// invite
|
||||
CreateInvite(http.ResponseWriter, *http.Request)
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -26,11 +26,14 @@ HAS=25
|
||||
HASANY=26
|
||||
HASALL=27
|
||||
BOOL=28
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
DOLLAR_VAR=29
|
||||
CURLY_VAR=30
|
||||
SQUARE_VAR=31
|
||||
NUMBER=32
|
||||
QUOTED_TEXT=33
|
||||
KEY=34
|
||||
WS=35
|
||||
FREETEXT=36
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -26,11 +26,14 @@ HAS=25
|
||||
HASANY=26
|
||||
HASALL=27
|
||||
BOOL=28
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
DOLLAR_VAR=29
|
||||
CURLY_VAR=30
|
||||
SQUARE_VAR=31
|
||||
NUMBER=32
|
||||
QUOTED_TEXT=33
|
||||
KEY=34
|
||||
WS=35
|
||||
FREETEXT=36
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
@@ -117,6 +117,12 @@ func (s *BaseFilterQueryListener) EnterValue(ctx *ValueContext) {}
|
||||
// ExitValue is called when production value is exited.
|
||||
func (s *BaseFilterQueryListener) ExitValue(ctx *ValueContext) {}
|
||||
|
||||
// EnterVariable is called when production variable is entered.
|
||||
func (s *BaseFilterQueryListener) EnterVariable(ctx *VariableContext) {}
|
||||
|
||||
// ExitVariable is called when production variable is exited.
|
||||
func (s *BaseFilterQueryListener) ExitVariable(ctx *VariableContext) {}
|
||||
|
||||
// EnterKey is called when production key is entered.
|
||||
func (s *BaseFilterQueryListener) EnterKey(ctx *KeyContext) {}
|
||||
|
||||
|
||||
@@ -72,6 +72,10 @@ func (v *BaseFilterQueryVisitor) VisitValue(ctx *ValueContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitVariable(ctx *VariableContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitKey(ctx *KeyContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
@@ -50,178 +50,213 @@ func filterquerylexerLexerInit() {
|
||||
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
|
||||
"FREETEXT",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "DOLLAR_VAR", "CURLY_VAR", "SQUARE_VAR",
|
||||
"NUMBER", "QUOTED_TEXT", "KEY", "WS", "FREETEXT",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT",
|
||||
"SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS", "KEY", "WS", "DIGIT",
|
||||
"FREETEXT",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "DOLLAR_VAR", "CURLY_VAR", "SQUARE_VAR",
|
||||
"SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS",
|
||||
"KEY", "WS", "DIGIT", "FREETEXT",
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 0, 33, 334, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 0, 36, 404, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
|
||||
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
|
||||
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
|
||||
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
|
||||
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
|
||||
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
|
||||
7, 36, 2, 37, 7, 37, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
|
||||
4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 91, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7,
|
||||
1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11,
|
||||
1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 118,
|
||||
8, 13, 11, 13, 12, 13, 119, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1,
|
||||
14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 137,
|
||||
8, 15, 11, 15, 12, 15, 138, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1,
|
||||
16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17,
|
||||
1, 17, 1, 17, 1, 17, 3, 17, 161, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1,
|
||||
18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19,
|
||||
3, 19, 178, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1,
|
||||
22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24,
|
||||
1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1,
|
||||
26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27,
|
||||
1, 27, 1, 27, 3, 27, 221, 8, 27, 1, 28, 1, 28, 1, 29, 3, 29, 226, 8, 29,
|
||||
1, 29, 4, 29, 229, 8, 29, 11, 29, 12, 29, 230, 1, 29, 1, 29, 5, 29, 235,
|
||||
8, 29, 10, 29, 12, 29, 238, 9, 29, 3, 29, 240, 8, 29, 1, 29, 1, 29, 3,
|
||||
29, 244, 8, 29, 1, 29, 4, 29, 247, 8, 29, 11, 29, 12, 29, 248, 3, 29, 251,
|
||||
8, 29, 1, 29, 3, 29, 254, 8, 29, 1, 29, 1, 29, 4, 29, 258, 8, 29, 11, 29,
|
||||
12, 29, 259, 1, 29, 1, 29, 3, 29, 264, 8, 29, 1, 29, 4, 29, 267, 8, 29,
|
||||
11, 29, 12, 29, 268, 3, 29, 271, 8, 29, 3, 29, 273, 8, 29, 1, 30, 1, 30,
|
||||
1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 1,
|
||||
30, 1, 30, 1, 30, 1, 30, 5, 30, 289, 8, 30, 10, 30, 12, 30, 292, 9, 30,
|
||||
1, 30, 3, 30, 295, 8, 30, 1, 31, 1, 31, 5, 31, 299, 8, 31, 10, 31, 12,
|
||||
31, 302, 9, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34,
|
||||
1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 316, 8, 34, 10, 34, 12, 34, 319, 9,
|
||||
34, 1, 35, 4, 35, 322, 8, 35, 11, 35, 12, 35, 323, 1, 35, 1, 35, 1, 36,
|
||||
1, 36, 1, 37, 4, 37, 331, 8, 37, 11, 37, 12, 37, 332, 0, 0, 38, 1, 1, 3,
|
||||
2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12,
|
||||
25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21,
|
||||
43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 29,
|
||||
61, 30, 63, 0, 65, 0, 67, 0, 69, 31, 71, 32, 73, 0, 75, 33, 1, 0, 30, 2,
|
||||
0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2,
|
||||
0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0, 79, 79, 111, 111, 2,
|
||||
0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66, 66, 98, 98, 2, 0, 87,
|
||||
87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82,
|
||||
82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67,
|
||||
67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72,
|
||||
104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70,
|
||||
102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92,
|
||||
92, 2, 0, 65, 90, 97, 122, 5, 0, 45, 45, 48, 58, 65, 90, 95, 95, 97, 122,
|
||||
3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34,
|
||||
39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, 0, 0, 0, 3, 1,
|
||||
0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1,
|
||||
0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19,
|
||||
1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0,
|
||||
27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0,
|
||||
0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0,
|
||||
0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0,
|
||||
0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 59, 1,
|
||||
0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 75,
|
||||
1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, 1, 0, 0, 0, 7,
|
||||
83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, 92, 1, 0, 0,
|
||||
0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, 0, 0, 21, 103,
|
||||
1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, 113, 1, 0, 0,
|
||||
0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, 0, 0, 0, 35, 154,
|
||||
1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, 41, 179, 1, 0, 0,
|
||||
0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, 1, 0, 0, 0, 49, 193,
|
||||
1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, 0, 55, 220, 1, 0, 0,
|
||||
0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, 1, 0, 0, 0, 63, 296,
|
||||
1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, 0, 69, 310, 1, 0, 0,
|
||||
0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, 1, 0, 0, 0, 77, 78,
|
||||
5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, 80, 4, 1, 0, 0, 0,
|
||||
81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, 0, 0, 84, 8, 1,
|
||||
0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, 5, 61, 0, 0,
|
||||
88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, 0, 90, 88, 1,
|
||||
0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, 5, 61, 0, 0,
|
||||
94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, 0, 97, 16, 1,
|
||||
0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, 5, 60, 0, 0,
|
||||
101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104,
|
||||
22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, 107, 24,
|
||||
1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 2,
|
||||
0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, 0, 0,
|
||||
114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, 117,
|
||||
116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120,
|
||||
1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, 7, 1,
|
||||
0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, 0, 0,
|
||||
126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129,
|
||||
130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 7,
|
||||
4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, 0,
|
||||
0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138,
|
||||
139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, 142,
|
||||
7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, 7, 3,
|
||||
0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, 0, 0,
|
||||
148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, 151,
|
||||
152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, 7,
|
||||
3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11,
|
||||
0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0,
|
||||
160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163,
|
||||
164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167,
|
||||
7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, 1, 0, 0, 0, 169, 170, 7, 15,
|
||||
0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, 0, 0, 172, 173, 7, 6, 0, 0,
|
||||
173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, 175, 177, 7, 4, 0, 0, 176,
|
||||
178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 40,
|
||||
1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, 4, 0, 0, 181, 42, 1, 0,
|
||||
0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, 0, 184, 185, 7, 6, 0, 0,
|
||||
185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, 188, 7, 4, 0, 0, 188,
|
||||
189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, 7, 5, 0, 0, 191, 192,
|
||||
7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, 0, 0, 194, 195, 7, 16,
|
||||
0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, 197, 198, 7, 18, 0, 0,
|
||||
198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, 201, 7, 16, 0, 0, 201,
|
||||
202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, 1, 0, 0, 0, 204, 205,
|
||||
7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, 11, 0, 0, 207, 208, 7,
|
||||
16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, 0, 0, 210, 54, 1, 0, 0,
|
||||
0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, 213, 214, 7, 20, 0, 0,
|
||||
214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, 217, 7, 16, 0, 0, 217,
|
||||
218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, 7, 3, 0, 0, 220, 211,
|
||||
1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, 0, 0, 222, 223, 7, 22,
|
||||
0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, 0, 225, 224, 1, 0, 0, 0,
|
||||
225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, 229, 3, 73, 36, 0, 228,
|
||||
227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231,
|
||||
1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, 0, 0, 233, 235, 3, 73,
|
||||
36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0,
|
||||
236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239,
|
||||
232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243,
|
||||
7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1,
|
||||
0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, 0, 246, 245, 1, 0,
|
||||
0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0,
|
||||
249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251,
|
||||
273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, 0, 0, 0, 253, 254,
|
||||
1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, 0, 256, 258, 3, 73,
|
||||
36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0,
|
||||
259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, 263, 7, 3, 0, 0, 262,
|
||||
264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 266,
|
||||
1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1,
|
||||
0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 271, 1, 0, 0,
|
||||
0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272,
|
||||
225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, 0, 0, 0, 274, 280, 5,
|
||||
34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, 0, 0, 277, 279, 9, 0,
|
||||
0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0,
|
||||
280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282,
|
||||
280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, 5, 39, 0, 0, 285, 289,
|
||||
8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, 0, 0, 0, 288, 285, 1,
|
||||
0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0,
|
||||
0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293,
|
||||
295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, 1, 0, 0, 0, 295, 62,
|
||||
1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, 0, 0, 298, 297, 1,
|
||||
0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0,
|
||||
0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 91, 0, 0, 304,
|
||||
305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, 0, 0, 307, 308,
|
||||
5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, 310, 317, 3, 63,
|
||||
31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, 313, 316, 3, 65,
|
||||
32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, 313, 1, 0, 0,
|
||||
0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317,
|
||||
318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 7,
|
||||
27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 321, 1, 0, 0,
|
||||
0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 35, 0, 0, 326,
|
||||
72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, 1, 0, 0, 0, 329, 331, 8,
|
||||
29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 330, 1, 0, 0,
|
||||
0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, 30, 0, 90, 119, 138, 160,
|
||||
177, 220, 225, 230, 236, 239, 243, 248, 250, 253, 259, 263, 268, 270, 272,
|
||||
278, 280, 288, 290, 294, 300, 315, 317, 323, 332, 1, 6, 0, 0,
|
||||
7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 1, 0, 1,
|
||||
0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3,
|
||||
5, 97, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9,
|
||||
1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1,
|
||||
12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 124, 8, 13, 11, 13, 12, 13, 125,
|
||||
1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1,
|
||||
14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 143, 8, 15, 11, 15, 12, 15, 144,
|
||||
1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1,
|
||||
16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17,
|
||||
167, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1,
|
||||
19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 184, 8, 19, 1, 20,
|
||||
1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1,
|
||||
23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25,
|
||||
1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1,
|
||||
27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 227,
|
||||
8, 27, 1, 28, 1, 28, 1, 28, 5, 28, 232, 8, 28, 10, 28, 12, 28, 235, 9,
|
||||
28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 241, 8, 29, 10, 29, 12, 29, 244,
|
||||
9, 29, 1, 29, 3, 29, 247, 8, 29, 1, 29, 1, 29, 5, 29, 251, 8, 29, 10, 29,
|
||||
12, 29, 254, 9, 29, 1, 29, 5, 29, 257, 8, 29, 10, 29, 12, 29, 260, 9, 29,
|
||||
1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 269, 8, 30, 10,
|
||||
30, 12, 30, 272, 9, 30, 1, 30, 3, 30, 275, 8, 30, 1, 30, 1, 30, 5, 30,
|
||||
279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 5, 30, 285, 8, 30, 10, 30,
|
||||
12, 30, 288, 9, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 3, 32, 296,
|
||||
8, 32, 1, 32, 4, 32, 299, 8, 32, 11, 32, 12, 32, 300, 1, 32, 1, 32, 5,
|
||||
32, 305, 8, 32, 10, 32, 12, 32, 308, 9, 32, 3, 32, 310, 8, 32, 1, 32, 1,
|
||||
32, 3, 32, 314, 8, 32, 1, 32, 4, 32, 317, 8, 32, 11, 32, 12, 32, 318, 3,
|
||||
32, 321, 8, 32, 1, 32, 3, 32, 324, 8, 32, 1, 32, 1, 32, 4, 32, 328, 8,
|
||||
32, 11, 32, 12, 32, 329, 1, 32, 1, 32, 3, 32, 334, 8, 32, 1, 32, 4, 32,
|
||||
337, 8, 32, 11, 32, 12, 32, 338, 3, 32, 341, 8, 32, 3, 32, 343, 8, 32,
|
||||
1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 349, 8, 33, 10, 33, 12, 33, 352, 9,
|
||||
33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 359, 8, 33, 10, 33, 12, 33,
|
||||
362, 9, 33, 1, 33, 3, 33, 365, 8, 33, 1, 34, 1, 34, 5, 34, 369, 8, 34,
|
||||
10, 34, 12, 34, 372, 9, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1,
|
||||
36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 386, 8, 37, 10, 37, 12, 37,
|
||||
389, 9, 37, 1, 38, 4, 38, 392, 8, 38, 11, 38, 12, 38, 393, 1, 38, 1, 38,
|
||||
1, 39, 1, 39, 1, 40, 4, 40, 401, 8, 40, 11, 40, 12, 40, 402, 0, 0, 41,
|
||||
1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11,
|
||||
23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20,
|
||||
41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29,
|
||||
59, 30, 61, 31, 63, 0, 65, 32, 67, 33, 69, 0, 71, 0, 73, 0, 75, 34, 77,
|
||||
35, 79, 0, 81, 36, 1, 0, 32, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105,
|
||||
105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110,
|
||||
110, 2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32,
|
||||
2, 0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2,
|
||||
0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2,
|
||||
0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0,
|
||||
68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0,
|
||||
85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 3, 0, 65, 90, 95, 95, 97, 122,
|
||||
5, 0, 46, 46, 48, 57, 65, 90, 95, 95, 97, 122, 2, 0, 43, 43, 45, 45, 2,
|
||||
0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 2, 0, 65, 90, 97, 122, 5, 0, 45,
|
||||
45, 48, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0,
|
||||
48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93,
|
||||
93, 437, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1,
|
||||
0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15,
|
||||
1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0,
|
||||
23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0,
|
||||
0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0,
|
||||
0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0,
|
||||
0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1,
|
||||
0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61,
|
||||
1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0,
|
||||
77, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 3, 85, 1, 0, 0, 0,
|
||||
5, 87, 1, 0, 0, 0, 7, 89, 1, 0, 0, 0, 9, 91, 1, 0, 0, 0, 11, 96, 1, 0,
|
||||
0, 0, 13, 98, 1, 0, 0, 0, 15, 101, 1, 0, 0, 0, 17, 104, 1, 0, 0, 0, 19,
|
||||
106, 1, 0, 0, 0, 21, 109, 1, 0, 0, 0, 23, 111, 1, 0, 0, 0, 25, 114, 1,
|
||||
0, 0, 0, 27, 119, 1, 0, 0, 0, 29, 132, 1, 0, 0, 0, 31, 138, 1, 0, 0, 0,
|
||||
33, 152, 1, 0, 0, 0, 35, 160, 1, 0, 0, 0, 37, 168, 1, 0, 0, 0, 39, 175,
|
||||
1, 0, 0, 0, 41, 185, 1, 0, 0, 0, 43, 188, 1, 0, 0, 0, 45, 192, 1, 0, 0,
|
||||
0, 47, 196, 1, 0, 0, 0, 49, 199, 1, 0, 0, 0, 51, 203, 1, 0, 0, 0, 53, 210,
|
||||
1, 0, 0, 0, 55, 226, 1, 0, 0, 0, 57, 228, 1, 0, 0, 0, 59, 236, 1, 0, 0,
|
||||
0, 61, 264, 1, 0, 0, 0, 63, 292, 1, 0, 0, 0, 65, 342, 1, 0, 0, 0, 67, 364,
|
||||
1, 0, 0, 0, 69, 366, 1, 0, 0, 0, 71, 373, 1, 0, 0, 0, 73, 376, 1, 0, 0,
|
||||
0, 75, 380, 1, 0, 0, 0, 77, 391, 1, 0, 0, 0, 79, 397, 1, 0, 0, 0, 81, 400,
|
||||
1, 0, 0, 0, 83, 84, 5, 40, 0, 0, 84, 2, 1, 0, 0, 0, 85, 86, 5, 41, 0, 0,
|
||||
86, 4, 1, 0, 0, 0, 87, 88, 5, 91, 0, 0, 88, 6, 1, 0, 0, 0, 89, 90, 5, 93,
|
||||
0, 0, 90, 8, 1, 0, 0, 0, 91, 92, 5, 44, 0, 0, 92, 10, 1, 0, 0, 0, 93, 97,
|
||||
5, 61, 0, 0, 94, 95, 5, 61, 0, 0, 95, 97, 5, 61, 0, 0, 96, 93, 1, 0, 0,
|
||||
0, 96, 94, 1, 0, 0, 0, 97, 12, 1, 0, 0, 0, 98, 99, 5, 33, 0, 0, 99, 100,
|
||||
5, 61, 0, 0, 100, 14, 1, 0, 0, 0, 101, 102, 5, 60, 0, 0, 102, 103, 5, 62,
|
||||
0, 0, 103, 16, 1, 0, 0, 0, 104, 105, 5, 60, 0, 0, 105, 18, 1, 0, 0, 0,
|
||||
106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0, 0, 108, 20, 1, 0, 0, 0, 109,
|
||||
110, 5, 62, 0, 0, 110, 22, 1, 0, 0, 0, 111, 112, 5, 62, 0, 0, 112, 113,
|
||||
5, 61, 0, 0, 113, 24, 1, 0, 0, 0, 114, 115, 7, 0, 0, 0, 115, 116, 7, 1,
|
||||
0, 0, 116, 117, 7, 2, 0, 0, 117, 118, 7, 3, 0, 0, 118, 26, 1, 0, 0, 0,
|
||||
119, 120, 7, 4, 0, 0, 120, 121, 7, 5, 0, 0, 121, 123, 7, 6, 0, 0, 122,
|
||||
124, 7, 7, 0, 0, 123, 122, 1, 0, 0, 0, 124, 125, 1, 0, 0, 0, 125, 123,
|
||||
1, 0, 0, 0, 125, 126, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 128, 7, 0,
|
||||
0, 0, 128, 129, 7, 1, 0, 0, 129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0,
|
||||
131, 28, 1, 0, 0, 0, 132, 133, 7, 1, 0, 0, 133, 134, 7, 0, 0, 0, 134, 135,
|
||||
7, 1, 0, 0, 135, 136, 7, 2, 0, 0, 136, 137, 7, 3, 0, 0, 137, 30, 1, 0,
|
||||
0, 0, 138, 139, 7, 4, 0, 0, 139, 140, 7, 5, 0, 0, 140, 142, 7, 6, 0, 0,
|
||||
141, 143, 7, 7, 0, 0, 142, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144,
|
||||
142, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 147,
|
||||
7, 1, 0, 0, 147, 148, 7, 0, 0, 0, 148, 149, 7, 1, 0, 0, 149, 150, 7, 2,
|
||||
0, 0, 150, 151, 7, 3, 0, 0, 151, 32, 1, 0, 0, 0, 152, 153, 7, 8, 0, 0,
|
||||
153, 154, 7, 3, 0, 0, 154, 155, 7, 6, 0, 0, 155, 156, 7, 9, 0, 0, 156,
|
||||
157, 7, 3, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 4, 0, 0, 159, 34, 1,
|
||||
0, 0, 0, 160, 161, 7, 3, 0, 0, 161, 162, 7, 10, 0, 0, 162, 163, 7, 1, 0,
|
||||
0, 163, 164, 7, 11, 0, 0, 164, 166, 7, 6, 0, 0, 165, 167, 7, 11, 0, 0,
|
||||
166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 36, 1, 0, 0, 0, 168, 169,
|
||||
7, 12, 0, 0, 169, 170, 7, 3, 0, 0, 170, 171, 7, 13, 0, 0, 171, 172, 7,
|
||||
3, 0, 0, 172, 173, 7, 10, 0, 0, 173, 174, 7, 14, 0, 0, 174, 38, 1, 0, 0,
|
||||
0, 175, 176, 7, 15, 0, 0, 176, 177, 7, 5, 0, 0, 177, 178, 7, 4, 0, 0, 178,
|
||||
179, 7, 6, 0, 0, 179, 180, 7, 16, 0, 0, 180, 181, 7, 1, 0, 0, 181, 183,
|
||||
7, 4, 0, 0, 182, 184, 7, 11, 0, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0,
|
||||
0, 0, 184, 40, 1, 0, 0, 0, 185, 186, 7, 1, 0, 0, 186, 187, 7, 4, 0, 0,
|
||||
187, 42, 1, 0, 0, 0, 188, 189, 7, 4, 0, 0, 189, 190, 7, 5, 0, 0, 190, 191,
|
||||
7, 6, 0, 0, 191, 44, 1, 0, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 4,
|
||||
0, 0, 194, 195, 7, 17, 0, 0, 195, 46, 1, 0, 0, 0, 196, 197, 7, 5, 0, 0,
|
||||
197, 198, 7, 12, 0, 0, 198, 48, 1, 0, 0, 0, 199, 200, 7, 18, 0, 0, 200,
|
||||
201, 7, 16, 0, 0, 201, 202, 7, 11, 0, 0, 202, 50, 1, 0, 0, 0, 203, 204,
|
||||
7, 18, 0, 0, 204, 205, 7, 16, 0, 0, 205, 206, 7, 11, 0, 0, 206, 207, 7,
|
||||
16, 0, 0, 207, 208, 7, 4, 0, 0, 208, 209, 7, 19, 0, 0, 209, 52, 1, 0, 0,
|
||||
0, 210, 211, 7, 18, 0, 0, 211, 212, 7, 16, 0, 0, 212, 213, 7, 11, 0, 0,
|
||||
213, 214, 7, 16, 0, 0, 214, 215, 7, 0, 0, 0, 215, 216, 7, 0, 0, 0, 216,
|
||||
54, 1, 0, 0, 0, 217, 218, 7, 6, 0, 0, 218, 219, 7, 12, 0, 0, 219, 220,
|
||||
7, 20, 0, 0, 220, 227, 7, 3, 0, 0, 221, 222, 7, 21, 0, 0, 222, 223, 7,
|
||||
16, 0, 0, 223, 224, 7, 0, 0, 0, 224, 225, 7, 11, 0, 0, 225, 227, 7, 3,
|
||||
0, 0, 226, 217, 1, 0, 0, 0, 226, 221, 1, 0, 0, 0, 227, 56, 1, 0, 0, 0,
|
||||
228, 229, 5, 36, 0, 0, 229, 233, 7, 22, 0, 0, 230, 232, 7, 23, 0, 0, 231,
|
||||
230, 1, 0, 0, 0, 232, 235, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233, 234,
|
||||
1, 0, 0, 0, 234, 58, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, 236, 237, 5, 123,
|
||||
0, 0, 237, 238, 5, 123, 0, 0, 238, 242, 1, 0, 0, 0, 239, 241, 7, 7, 0,
|
||||
0, 240, 239, 1, 0, 0, 0, 241, 244, 1, 0, 0, 0, 242, 240, 1, 0, 0, 0, 242,
|
||||
243, 1, 0, 0, 0, 243, 246, 1, 0, 0, 0, 244, 242, 1, 0, 0, 0, 245, 247,
|
||||
5, 46, 0, 0, 246, 245, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 1, 0,
|
||||
0, 0, 248, 252, 7, 22, 0, 0, 249, 251, 7, 23, 0, 0, 250, 249, 1, 0, 0,
|
||||
0, 251, 254, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, 253,
|
||||
258, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 257, 7, 7, 0, 0, 256, 255,
|
||||
1, 0, 0, 0, 257, 260, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0,
|
||||
0, 0, 259, 261, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 261, 262, 5, 125, 0,
|
||||
0, 262, 263, 5, 125, 0, 0, 263, 60, 1, 0, 0, 0, 264, 265, 5, 91, 0, 0,
|
||||
265, 266, 5, 91, 0, 0, 266, 270, 1, 0, 0, 0, 267, 269, 7, 7, 0, 0, 268,
|
||||
267, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271,
|
||||
1, 0, 0, 0, 271, 274, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 46,
|
||||
0, 0, 274, 273, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0,
|
||||
276, 280, 7, 22, 0, 0, 277, 279, 7, 23, 0, 0, 278, 277, 1, 0, 0, 0, 279,
|
||||
282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 286,
|
||||
1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 285, 7, 7, 0, 0, 284, 283, 1, 0,
|
||||
0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0,
|
||||
287, 289, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 93, 0, 0, 290,
|
||||
291, 5, 93, 0, 0, 291, 62, 1, 0, 0, 0, 292, 293, 7, 24, 0, 0, 293, 64,
|
||||
1, 0, 0, 0, 294, 296, 3, 63, 31, 0, 295, 294, 1, 0, 0, 0, 295, 296, 1,
|
||||
0, 0, 0, 296, 298, 1, 0, 0, 0, 297, 299, 3, 79, 39, 0, 298, 297, 1, 0,
|
||||
0, 0, 299, 300, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0,
|
||||
301, 309, 1, 0, 0, 0, 302, 306, 5, 46, 0, 0, 303, 305, 3, 79, 39, 0, 304,
|
||||
303, 1, 0, 0, 0, 305, 308, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 306, 307,
|
||||
1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 309, 302, 1, 0,
|
||||
0, 0, 309, 310, 1, 0, 0, 0, 310, 320, 1, 0, 0, 0, 311, 313, 7, 3, 0, 0,
|
||||
312, 314, 3, 63, 31, 0, 313, 312, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314,
|
||||
316, 1, 0, 0, 0, 315, 317, 3, 79, 39, 0, 316, 315, 1, 0, 0, 0, 317, 318,
|
||||
1, 0, 0, 0, 318, 316, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 321, 1, 0,
|
||||
0, 0, 320, 311, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 343, 1, 0, 0, 0,
|
||||
322, 324, 3, 63, 31, 0, 323, 322, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324,
|
||||
325, 1, 0, 0, 0, 325, 327, 5, 46, 0, 0, 326, 328, 3, 79, 39, 0, 327, 326,
|
||||
1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 329, 330, 1, 0,
|
||||
0, 0, 330, 340, 1, 0, 0, 0, 331, 333, 7, 3, 0, 0, 332, 334, 3, 63, 31,
|
||||
0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 336, 1, 0, 0, 0, 335,
|
||||
337, 3, 79, 39, 0, 336, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 336,
|
||||
1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 341, 1, 0, 0, 0, 340, 331, 1, 0,
|
||||
0, 0, 340, 341, 1, 0, 0, 0, 341, 343, 1, 0, 0, 0, 342, 295, 1, 0, 0, 0,
|
||||
342, 323, 1, 0, 0, 0, 343, 66, 1, 0, 0, 0, 344, 350, 5, 34, 0, 0, 345,
|
||||
349, 8, 25, 0, 0, 346, 347, 5, 92, 0, 0, 347, 349, 9, 0, 0, 0, 348, 345,
|
||||
1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0,
|
||||
0, 0, 350, 351, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0,
|
||||
353, 365, 5, 34, 0, 0, 354, 360, 5, 39, 0, 0, 355, 359, 8, 26, 0, 0, 356,
|
||||
357, 5, 92, 0, 0, 357, 359, 9, 0, 0, 0, 358, 355, 1, 0, 0, 0, 358, 356,
|
||||
1, 0, 0, 0, 359, 362, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 360, 361, 1, 0,
|
||||
0, 0, 361, 363, 1, 0, 0, 0, 362, 360, 1, 0, 0, 0, 363, 365, 5, 39, 0, 0,
|
||||
364, 344, 1, 0, 0, 0, 364, 354, 1, 0, 0, 0, 365, 68, 1, 0, 0, 0, 366, 370,
|
||||
7, 27, 0, 0, 367, 369, 7, 28, 0, 0, 368, 367, 1, 0, 0, 0, 369, 372, 1,
|
||||
0, 0, 0, 370, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 70, 1, 0, 0,
|
||||
0, 372, 370, 1, 0, 0, 0, 373, 374, 5, 91, 0, 0, 374, 375, 5, 93, 0, 0,
|
||||
375, 72, 1, 0, 0, 0, 376, 377, 5, 91, 0, 0, 377, 378, 5, 42, 0, 0, 378,
|
||||
379, 5, 93, 0, 0, 379, 74, 1, 0, 0, 0, 380, 387, 3, 69, 34, 0, 381, 382,
|
||||
5, 46, 0, 0, 382, 386, 3, 69, 34, 0, 383, 386, 3, 71, 35, 0, 384, 386,
|
||||
3, 73, 36, 0, 385, 381, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 384, 1,
|
||||
0, 0, 0, 386, 389, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0,
|
||||
0, 388, 76, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 392, 7, 29, 0, 0, 391,
|
||||
390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 393, 394,
|
||||
1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 396, 6, 38, 0, 0, 396, 78, 1, 0,
|
||||
0, 0, 397, 398, 7, 30, 0, 0, 398, 80, 1, 0, 0, 0, 399, 401, 8, 31, 0, 0,
|
||||
400, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402,
|
||||
403, 1, 0, 0, 0, 403, 82, 1, 0, 0, 0, 39, 0, 96, 125, 144, 166, 183, 226,
|
||||
233, 242, 246, 252, 258, 270, 274, 280, 286, 295, 300, 306, 309, 313, 318,
|
||||
320, 323, 329, 333, 338, 340, 342, 348, 350, 358, 360, 364, 370, 385, 387,
|
||||
393, 402, 1, 6, 0, 0,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||
@@ -290,9 +325,12 @@ const (
|
||||
FilterQueryLexerHASANY = 26
|
||||
FilterQueryLexerHASALL = 27
|
||||
FilterQueryLexerBOOL = 28
|
||||
FilterQueryLexerNUMBER = 29
|
||||
FilterQueryLexerQUOTED_TEXT = 30
|
||||
FilterQueryLexerKEY = 31
|
||||
FilterQueryLexerWS = 32
|
||||
FilterQueryLexerFREETEXT = 33
|
||||
FilterQueryLexerDOLLAR_VAR = 29
|
||||
FilterQueryLexerCURLY_VAR = 30
|
||||
FilterQueryLexerSQUARE_VAR = 31
|
||||
FilterQueryLexerNUMBER = 32
|
||||
FilterQueryLexerQUOTED_TEXT = 33
|
||||
FilterQueryLexerKEY = 34
|
||||
FilterQueryLexerWS = 35
|
||||
FilterQueryLexerFREETEXT = 36
|
||||
)
|
||||
|
||||
@@ -56,6 +56,9 @@ type FilterQueryListener interface {
|
||||
// EnterValue is called when entering the value production.
|
||||
EnterValue(c *ValueContext)
|
||||
|
||||
// EnterVariable is called when entering the variable production.
|
||||
EnterVariable(c *VariableContext)
|
||||
|
||||
// EnterKey is called when entering the key production.
|
||||
EnterKey(c *KeyContext)
|
||||
|
||||
@@ -107,6 +110,9 @@ type FilterQueryListener interface {
|
||||
// ExitValue is called when exiting the value production.
|
||||
ExitValue(c *ValueContext)
|
||||
|
||||
// ExitVariable is called when exiting the variable production.
|
||||
ExitVariable(c *VariableContext)
|
||||
|
||||
// ExitKey is called when exiting the key production.
|
||||
ExitKey(c *KeyContext)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -56,6 +56,9 @@ type FilterQueryVisitor interface {
|
||||
// Visit a parse tree produced by FilterQueryParser#value.
|
||||
VisitValue(ctx *ValueContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#variable.
|
||||
VisitVariable(ctx *VariableContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#key.
|
||||
VisitKey(ctx *KeyContext) interface{}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ type builderQuery[T any] struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
stmtBuilder qbtypes.StatementBuilder[T]
|
||||
spec qbtypes.QueryBuilderQuery[T]
|
||||
variables map[string]qbtypes.VariableItem
|
||||
|
||||
fromMS uint64
|
||||
toMS uint64
|
||||
@@ -32,11 +33,13 @@ func newBuilderQuery[T any](
|
||||
spec qbtypes.QueryBuilderQuery[T],
|
||||
tr qbtypes.TimeRange,
|
||||
kind qbtypes.RequestType,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) *builderQuery[T] {
|
||||
return &builderQuery[T]{
|
||||
telemetryStore: telemetryStore,
|
||||
stmtBuilder: stmtBuilder,
|
||||
spec: spec,
|
||||
variables: variables,
|
||||
fromMS: tr.From,
|
||||
toMS: tr.To,
|
||||
kind: kind,
|
||||
@@ -174,7 +177,7 @@ func (q *builderQuery[T]) Execute(ctx context.Context) (*qbtypes.Result, error)
|
||||
return q.executeWindowList(ctx)
|
||||
}
|
||||
|
||||
stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec)
|
||||
stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec, q.variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -278,7 +281,7 @@ func (q *builderQuery[T]) executeWindowList(ctx context.Context) (*qbtypes.Resul
|
||||
q.spec.Offset = 0
|
||||
q.spec.Limit = need
|
||||
|
||||
stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec)
|
||||
stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec, q.variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -3,8 +3,11 @@ package querier
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/govaluate"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -106,12 +109,15 @@ func postProcessBuilderQuery[T any](
|
||||
q *querier,
|
||||
result *qbtypes.Result,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
_ *qbtypes.QueryRangeRequest,
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
|
||||
// Apply functions
|
||||
if len(query.Functions) > 0 {
|
||||
result = q.applyFunctions(result, query.Functions)
|
||||
// For builder queries, use the query's own step
|
||||
step := query.StepInterval.Duration.Milliseconds()
|
||||
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
|
||||
result = q.applyFunctions(result, functions)
|
||||
}
|
||||
|
||||
return result
|
||||
@@ -130,7 +136,10 @@ func postProcessMetricQuery(
|
||||
}
|
||||
|
||||
if len(query.Functions) > 0 {
|
||||
result = q.applyFunctions(result, query.Functions)
|
||||
// For metric queries, use the query's own step
|
||||
step := query.StepInterval.Duration.Milliseconds()
|
||||
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
|
||||
result = q.applyFunctions(result, functions)
|
||||
}
|
||||
|
||||
// Apply reduce to for scalar request type
|
||||
@@ -222,6 +231,11 @@ func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes
|
||||
if result != nil {
|
||||
results[name] = result
|
||||
}
|
||||
} else if req.RequestType == qbtypes.RequestTypeScalar {
|
||||
result := q.processScalarFormula(ctx, results, formula, req)
|
||||
if result != nil {
|
||||
results[name] = result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,7 +247,7 @@ func (q *querier) processTimeSeriesFormula(
|
||||
ctx context.Context,
|
||||
results map[string]*qbtypes.Result,
|
||||
formula qbtypes.QueryBuilderFormula,
|
||||
_ *qbtypes.QueryRangeRequest,
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
// Prepare time series data for formula evaluation
|
||||
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
|
||||
@@ -278,12 +292,218 @@ func (q *querier) processTimeSeriesFormula(
|
||||
}
|
||||
|
||||
if len(formula.Functions) > 0 {
|
||||
result = q.applyFunctions(result, formula.Functions)
|
||||
// For formulas, calculate GCD of steps from queries in the expression
|
||||
step := q.calculateFormulaStep(formula.Expression, req)
|
||||
functions := q.prepareFillZeroArgsWithStep(formula.Functions, req, step)
|
||||
result = q.applyFunctions(result, functions)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// processScalarFormula handles formula evaluation for scalar data
|
||||
//
|
||||
// NOTE: This implementation has a known limitation with formulas that reference
|
||||
// specific aggregations by index (e.g., "A.0", "A.1") or multiple aggregations
|
||||
// from the same query (e.g., "A.0 * 2 + A.1"). The FormulaEvaluator's series
|
||||
// matching logic doesn't work correctly when converting scalar data to time series
|
||||
// format for these cases.
|
||||
//
|
||||
// Currently supported:
|
||||
// - Formulas between different queries: "A / B", "A * 2 + B"
|
||||
// - Simple references: "A" (defaults to first aggregation)
|
||||
//
|
||||
// Not supported:
|
||||
// - Indexed aggregation references: "A.0", "A.1"
|
||||
// - Multiple aggregations from same query: "A.0 + A.1"
|
||||
//
|
||||
// To properly support this, we would need to either:
|
||||
// 1. Fix the FormulaEvaluator's series lookup logic for scalar-converted data
|
||||
// 2. Implement a dedicated scalar formula evaluator
|
||||
func (q *querier) processScalarFormula(
|
||||
ctx context.Context,
|
||||
results map[string]*qbtypes.Result,
|
||||
formula qbtypes.QueryBuilderFormula,
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
// Convert scalar data to time series format with zero timestamp
|
||||
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
|
||||
|
||||
for queryName, result := range results {
|
||||
if scalarData, ok := result.Value.(*qbtypes.ScalarData); ok {
|
||||
// Convert scalar to time series
|
||||
tsData := &qbtypes.TimeSeriesData{
|
||||
QueryName: scalarData.QueryName,
|
||||
Aggregations: make([]*qbtypes.AggregationBucket, 0),
|
||||
}
|
||||
|
||||
// Find aggregation columns
|
||||
aggColumns := make(map[int]int) // aggregation index -> column index
|
||||
for colIdx, col := range scalarData.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeAggregation {
|
||||
aggColumns[int(col.AggregationIndex)] = colIdx
|
||||
}
|
||||
}
|
||||
|
||||
// Group rows by their label sets
|
||||
type labeledRowData struct {
|
||||
labels []*qbtypes.Label
|
||||
values map[int]float64 // aggregation index -> value
|
||||
}
|
||||
|
||||
// First pass: group all rows by their label combination
|
||||
rowsByLabels := make(map[string]*labeledRowData)
|
||||
for _, row := range scalarData.Data {
|
||||
// Build labels from group columns
|
||||
labels := make([]*qbtypes.Label, 0)
|
||||
for i, col := range scalarData.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeGroup && i < len(row) {
|
||||
labels = append(labels, &qbtypes.Label{
|
||||
Key: col.TelemetryFieldKey,
|
||||
Value: row[i],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
labelKey := qbtypes.GetUniqueSeriesKey(labels)
|
||||
|
||||
// Get or create row data
|
||||
rowData, exists := rowsByLabels[labelKey]
|
||||
if !exists {
|
||||
rowData = &labeledRowData{
|
||||
labels: labels,
|
||||
values: make(map[int]float64),
|
||||
}
|
||||
rowsByLabels[labelKey] = rowData
|
||||
}
|
||||
|
||||
// Store all aggregation values from this row
|
||||
for aggIdx, colIdx := range aggColumns {
|
||||
if colIdx < len(row) {
|
||||
if val, ok := toFloat64(row[colIdx]); ok {
|
||||
rowData.values[aggIdx] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get sorted label keys for consistent ordering
|
||||
labelKeys := make([]string, 0, len(rowsByLabels))
|
||||
for key := range rowsByLabels {
|
||||
labelKeys = append(labelKeys, key)
|
||||
}
|
||||
slices.Sort(labelKeys)
|
||||
|
||||
// Create aggregation buckets
|
||||
aggIndices := make([]int, 0, len(aggColumns))
|
||||
for aggIdx := range aggColumns {
|
||||
aggIndices = append(aggIndices, aggIdx)
|
||||
}
|
||||
slices.Sort(aggIndices)
|
||||
|
||||
// For each aggregation, create a bucket with series in consistent order
|
||||
for _, aggIdx := range aggIndices {
|
||||
colIdx := aggColumns[aggIdx]
|
||||
|
||||
bucket := &qbtypes.AggregationBucket{
|
||||
Index: aggIdx,
|
||||
Alias: scalarData.Columns[colIdx].Name,
|
||||
Meta: scalarData.Columns[colIdx].Meta,
|
||||
Series: make([]*qbtypes.TimeSeries, 0),
|
||||
}
|
||||
|
||||
// Create series in the same order (by label key)
|
||||
for _, labelKey := range labelKeys {
|
||||
rowData := rowsByLabels[labelKey]
|
||||
|
||||
// Only create series if we have a value for this aggregation
|
||||
if val, exists := rowData.values[aggIdx]; exists {
|
||||
series := &qbtypes.TimeSeries{
|
||||
Labels: rowData.labels,
|
||||
Values: []*qbtypes.TimeSeriesValue{{
|
||||
Timestamp: 0,
|
||||
Value: val,
|
||||
}},
|
||||
}
|
||||
bucket.Series = append(bucket.Series, series)
|
||||
}
|
||||
}
|
||||
|
||||
tsData.Aggregations = append(tsData.Aggregations, bucket)
|
||||
}
|
||||
|
||||
timeSeriesData[queryName] = tsData
|
||||
}
|
||||
}
|
||||
|
||||
// Create formula evaluator
|
||||
canDefaultZero := make(map[string]bool)
|
||||
evaluator, err := qbtypes.NewFormulaEvaluator(formula.Expression, canDefaultZero)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to create formula evaluator", "error", err, "formula", formula.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Evaluate the formula
|
||||
formulaSeries, err := evaluator.EvaluateFormula(timeSeriesData)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to evaluate formula", "error", err, "formula", formula.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Convert back to scalar format
|
||||
scalarResult := &qbtypes.ScalarData{
|
||||
QueryName: formula.Name,
|
||||
Columns: make([]*qbtypes.ColumnDescriptor, 0),
|
||||
Data: make([][]any, 0),
|
||||
}
|
||||
|
||||
// Build columns from first series
|
||||
if len(formulaSeries) > 0 && len(formulaSeries[0].Labels) > 0 {
|
||||
// Add group columns
|
||||
for _, label := range formulaSeries[0].Labels {
|
||||
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: label.Key,
|
||||
QueryName: formula.Name,
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Add result column
|
||||
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "__result"},
|
||||
QueryName: formula.Name,
|
||||
AggregationIndex: 0,
|
||||
Type: qbtypes.ColumnTypeAggregation,
|
||||
})
|
||||
|
||||
// Build rows
|
||||
for _, series := range formulaSeries {
|
||||
row := make([]any, len(scalarResult.Columns))
|
||||
|
||||
// Add group values
|
||||
for i, label := range series.Labels {
|
||||
if i < len(row)-1 {
|
||||
row[i] = label.Value
|
||||
}
|
||||
}
|
||||
|
||||
// Add aggregation value (from single value at timestamp 0)
|
||||
if len(series.Values) > 0 {
|
||||
row[len(row)-1] = series.Values[0].Value
|
||||
} else {
|
||||
row[len(row)-1] = "n/a"
|
||||
}
|
||||
|
||||
scalarResult.Data = append(scalarResult.Data, row)
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Value: scalarResult,
|
||||
}
|
||||
}
|
||||
|
||||
// filterDisabledQueries removes results for disabled queries
|
||||
func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]*qbtypes.Result {
|
||||
filtered := make(map[string]*qbtypes.Result)
|
||||
@@ -650,3 +870,98 @@ func toFloat64(v any) (float64, bool) {
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// gcd calculates the greatest common divisor
|
||||
func gcd(a, b int64) int64 {
|
||||
if b == 0 {
|
||||
return a
|
||||
}
|
||||
return gcd(b, a%b)
|
||||
}
|
||||
|
||||
// prepareFillZeroArgsWithStep prepares fillZero function arguments with a specific step
|
||||
func (q *querier) prepareFillZeroArgsWithStep(functions []qbtypes.Function, req *qbtypes.QueryRangeRequest, step int64) []qbtypes.Function {
|
||||
// Check if we need to modify any functions
|
||||
needsCopy := false
|
||||
for _, fn := range functions {
|
||||
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
|
||||
needsCopy = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no fillZero functions need arguments, return original slice
|
||||
if !needsCopy {
|
||||
return functions
|
||||
}
|
||||
|
||||
// Only copy if we need to modify
|
||||
updatedFunctions := make([]qbtypes.Function, len(functions))
|
||||
copy(updatedFunctions, functions)
|
||||
|
||||
// Process each function
|
||||
for i, fn := range updatedFunctions {
|
||||
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
|
||||
// Set the arguments: start, end, step
|
||||
fn.Args = []qbtypes.FunctionArg{
|
||||
{Value: float64(req.Start)},
|
||||
{Value: float64(req.End)},
|
||||
{Value: float64(step)},
|
||||
}
|
||||
updatedFunctions[i] = fn
|
||||
}
|
||||
}
|
||||
|
||||
return updatedFunctions
|
||||
}
|
||||
|
||||
// calculateFormulaStep calculates the GCD of steps from queries referenced in the formula
|
||||
func (q *querier) calculateFormulaStep(expression string, req *qbtypes.QueryRangeRequest) int64 {
|
||||
// Use govaluate to parse the expression and extract variables
|
||||
// This is the same library used by FormulaEvaluator
|
||||
parsedExpr, err := govaluate.NewEvaluableExpression(expression)
|
||||
if err != nil {
|
||||
// If we can't parse the expression, use default
|
||||
return 60000
|
||||
}
|
||||
|
||||
// Get the variables from the parsed expression
|
||||
variables := parsedExpr.Vars()
|
||||
|
||||
// Extract base query names (e.g., "A" from "A.0" or "A.my_alias")
|
||||
queryNames := make(map[string]bool)
|
||||
for _, variable := range variables {
|
||||
// Split by "." to get the base query name
|
||||
parts := strings.Split(variable, ".")
|
||||
if len(parts) > 0 {
|
||||
queryNames[parts[0]] = true
|
||||
}
|
||||
}
|
||||
|
||||
var steps []int64
|
||||
|
||||
// Collect steps only from queries referenced in the formula
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
info := getqueryInfo(query.Spec)
|
||||
// Check if this query is referenced in the formula
|
||||
if !info.Disabled && queryNames[info.Name] && info.Step.Duration > 0 {
|
||||
stepMs := info.Step.Duration.Milliseconds()
|
||||
if stepMs > 0 {
|
||||
steps = append(steps, stepMs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no steps found, use a default (60 seconds)
|
||||
if len(steps) == 0 {
|
||||
return 60000
|
||||
}
|
||||
|
||||
// Calculate GCD of all steps
|
||||
result := steps[0]
|
||||
for i := 1; i < len(steps); i++ {
|
||||
result = gcd(result, steps[i])
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
230
pkg/querier/postprocess_fillzero_test.go
Normal file
230
pkg/querier/postprocess_fillzero_test.go
Normal file
@@ -0,0 +1,230 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func TestPrepareFillZeroArgsWithStep(t *testing.T) {
|
||||
q := &querier{}
|
||||
|
||||
req := &qbtypes.QueryRangeRequest{
|
||||
Start: 1000000,
|
||||
End: 2000000,
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
functions []qbtypes.Function
|
||||
step int64
|
||||
checkArgs bool
|
||||
}{
|
||||
{
|
||||
name: "fillZero without args",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameFillZero,
|
||||
Args: []qbtypes.FunctionArg{},
|
||||
},
|
||||
},
|
||||
step: 30000, // 30 seconds
|
||||
checkArgs: true,
|
||||
},
|
||||
{
|
||||
name: "fillZero with existing args",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameFillZero,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: 500000.0},
|
||||
{Value: 1500000.0},
|
||||
{Value: 15000.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
step: 60000,
|
||||
checkArgs: false, // Should not modify existing args
|
||||
},
|
||||
{
|
||||
name: "other function should not be modified",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
Args: []qbtypes.FunctionArg{},
|
||||
},
|
||||
},
|
||||
step: 60000,
|
||||
checkArgs: false,
|
||||
},
|
||||
{
|
||||
name: "no copy when fillZero already has args",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameFillZero,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: 1000.0},
|
||||
{Value: 2000.0},
|
||||
{Value: 500.0},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
},
|
||||
},
|
||||
step: 60000,
|
||||
checkArgs: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := q.prepareFillZeroArgsWithStep(tt.functions, req, tt.step)
|
||||
|
||||
if len(result) != len(tt.functions) {
|
||||
t.Fatalf("Expected %d functions, got %d", len(tt.functions), len(result))
|
||||
}
|
||||
|
||||
// Check if no copy was made when not needed
|
||||
if tt.name == "no copy when fillZero already has args" || tt.name == "other function should not be modified" {
|
||||
// Verify that the result is the same slice (no copy)
|
||||
if &result[0] != &tt.functions[0] {
|
||||
t.Errorf("Expected no copy, but a copy was made")
|
||||
}
|
||||
}
|
||||
|
||||
for _, fn := range result {
|
||||
if fn.Name == qbtypes.FunctionNameFillZero && tt.checkArgs {
|
||||
if len(fn.Args) != 3 {
|
||||
t.Errorf("Expected 3 args for fillZero, got %d", len(fn.Args))
|
||||
}
|
||||
// Check start
|
||||
if start, ok := fn.Args[0].Value.(float64); !ok || start != float64(req.Start) {
|
||||
t.Errorf("Expected start %f, got %v", float64(req.Start), fn.Args[0].Value)
|
||||
}
|
||||
// Check end
|
||||
if end, ok := fn.Args[1].Value.(float64); !ok || end != float64(req.End) {
|
||||
t.Errorf("Expected end %f, got %v", float64(req.End), fn.Args[1].Value)
|
||||
}
|
||||
// Check step
|
||||
if step, ok := fn.Args[2].Value.(float64); !ok || step != float64(tt.step) {
|
||||
t.Errorf("Expected step %f, got %v", float64(tt.step), fn.Args[2].Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalculateFormulaStep(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
req *qbtypes.QueryRangeRequest
|
||||
expected int64
|
||||
}{
|
||||
{
|
||||
name: "single query reference",
|
||||
expression: "A * 2",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
StepInterval: qbtypes.Step{Duration: 120 * time.Second},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: 60000, // Only A is referenced
|
||||
},
|
||||
{
|
||||
name: "multiple query references",
|
||||
expression: "A + B",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: 30000, // GCD of 30s and 60s
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "(A + B) / C",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
StepInterval: qbtypes.Step{Duration: 120 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "C",
|
||||
StepInterval: qbtypes.Step{Duration: 180 * time.Second},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: 60000, // GCD of 60s, 120s, and 180s
|
||||
},
|
||||
{
|
||||
name: "no query references",
|
||||
expression: "100",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{},
|
||||
},
|
||||
},
|
||||
expected: 60000, // Default
|
||||
},
|
||||
}
|
||||
|
||||
q := &querier{}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := q.calculateFormulaStep(tt.expression, tt.req)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected step %d, got %d", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -7,10 +7,12 @@ import (
|
||||
"slices"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -107,7 +109,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
|
||||
// First pass: collect all metric names that need temporality
|
||||
metricNames := make([]string, 0)
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
for idx, query := range req.CompositeQuery.Queries {
|
||||
if query.Type == qbtypes.QueryTypeBuilder {
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]); ok {
|
||||
for _, agg := range spec.Aggregations {
|
||||
@@ -116,6 +118,44 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
}
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.RecommendedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.RecommendedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.RecommendedStepIntervalForMetric(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,13 +197,13 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
@@ -176,7 +216,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
default:
|
||||
@@ -361,15 +401,15 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
|
||||
case *builderQuery[qbtypes.TraceAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
|
||||
case *builderQuery[qbtypes.LogAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
|
||||
case *builderQuery[qbtypes.MetricAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -79,6 +79,7 @@ func newProvider(
|
||||
traceConditionBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
traceAggExprRewriter,
|
||||
telemetryStore,
|
||||
)
|
||||
|
||||
// Create log statement builder
|
||||
|
||||
@@ -1584,7 +1584,6 @@ func (aH *APIHandler) registerEvent(w http.ResponseWriter, r *http.Request) {
|
||||
switch request.EventType {
|
||||
case model.TrackEvent:
|
||||
telemetry.GetInstance().SendEvent(request.EventName, request.Attributes, claims.Email, request.RateLimited, true)
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, request.EventName, request.Attributes)
|
||||
case model.GroupEvent:
|
||||
telemetry.GetInstance().SendGroupEvent(request.Attributes, claims.Email)
|
||||
case model.IdentifyEvent:
|
||||
@@ -2024,7 +2023,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
user, errv2 := aH.Signoz.Modules.User.Register(r.Context(), &req)
|
||||
_, errv2 := aH.Signoz.Modules.User.Register(r.Context(), &req)
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
@@ -2034,7 +2033,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
// from here onwards, we expect admin (owner) to invite other users.
|
||||
aH.SetupCompleted = true
|
||||
|
||||
aH.Respond(w, user)
|
||||
aH.Respond(w, nil)
|
||||
}
|
||||
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
|
||||
@@ -65,19 +65,11 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
||||
// accustom log filters like `body.log.message EXISTS` into EXPR language
|
||||
// where User is attempting to check for keys present in JSON log body
|
||||
if strings.HasPrefix(v.Key.Key, "body.") {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
} else if typ := getTypeName(v.Key.Type); typ != "" {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
|
||||
key, found := strings.CutPrefix(v.Key.Key, "body.")
|
||||
if found {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(key), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
} else {
|
||||
// if type of key is not available; is considered as TOP LEVEL key in OTEL Log Data model hence
|
||||
// switch Exist and Not Exists operators with NOT EQUAL and EQUAL respectively
|
||||
operator := v3.FilterOperatorNotEqual
|
||||
if v.Operator == v3.FilterOperatorNotExists {
|
||||
operator = v3.FilterOperatorEqual
|
||||
}
|
||||
|
||||
filter = fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
|
||||
}
|
||||
default:
|
||||
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
|
||||
|
||||
@@ -141,34 +141,6 @@ var testCases = []struct {
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
|
||||
191
pkg/querybuilder/having_rewriter.go
Normal file
191
pkg/querybuilder/having_rewriter.go
Normal file
@@ -0,0 +1,191 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// HavingExpressionRewriter rewrites having expressions to use the correct SQL column names
|
||||
type HavingExpressionRewriter struct {
|
||||
// Map of user-friendly names to SQL column names
|
||||
columnMap map[string]string
|
||||
}
|
||||
|
||||
// NewHavingExpressionRewriter creates a new having expression rewriter
|
||||
func NewHavingExpressionRewriter() *HavingExpressionRewriter {
|
||||
return &HavingExpressionRewriter{
|
||||
columnMap: make(map[string]string),
|
||||
}
|
||||
}
|
||||
|
||||
// RewriteForTraces rewrites having expression for trace queries
|
||||
func (r *HavingExpressionRewriter) RewriteForTraces(expression string, aggregations []qbtypes.TraceAggregation) string {
|
||||
r.buildTraceColumnMap(aggregations)
|
||||
return r.rewriteExpression(expression)
|
||||
}
|
||||
|
||||
// RewriteForLogs rewrites having expression for log queries
|
||||
func (r *HavingExpressionRewriter) RewriteForLogs(expression string, aggregations []qbtypes.LogAggregation) string {
|
||||
r.buildLogColumnMap(aggregations)
|
||||
return r.rewriteExpression(expression)
|
||||
}
|
||||
|
||||
// RewriteForMetrics rewrites having expression for metric queries
|
||||
func (r *HavingExpressionRewriter) RewriteForMetrics(expression string, aggregations []qbtypes.MetricAggregation) string {
|
||||
r.buildMetricColumnMap(aggregations)
|
||||
return r.rewriteExpression(expression)
|
||||
}
|
||||
|
||||
// buildTraceColumnMap builds the column mapping for trace aggregations
|
||||
func (r *HavingExpressionRewriter) buildTraceColumnMap(aggregations []qbtypes.TraceAggregation) {
|
||||
r.columnMap = make(map[string]string)
|
||||
|
||||
for idx, agg := range aggregations {
|
||||
sqlColumn := fmt.Sprintf("__result_%d", idx)
|
||||
|
||||
// Map alias if present
|
||||
if agg.Alias != "" {
|
||||
r.columnMap[agg.Alias] = sqlColumn
|
||||
}
|
||||
|
||||
// Map expression
|
||||
r.columnMap[agg.Expression] = sqlColumn
|
||||
|
||||
// Map __result{number} format
|
||||
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
|
||||
|
||||
// For single aggregation, also map __result
|
||||
if len(aggregations) == 1 {
|
||||
r.columnMap["__result"] = sqlColumn
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// buildLogColumnMap builds the column mapping for log aggregations
|
||||
func (r *HavingExpressionRewriter) buildLogColumnMap(aggregations []qbtypes.LogAggregation) {
|
||||
r.columnMap = make(map[string]string)
|
||||
|
||||
for idx, agg := range aggregations {
|
||||
sqlColumn := fmt.Sprintf("__result_%d", idx)
|
||||
|
||||
// Map alias if present
|
||||
if agg.Alias != "" {
|
||||
r.columnMap[agg.Alias] = sqlColumn
|
||||
}
|
||||
|
||||
// Map expression
|
||||
r.columnMap[agg.Expression] = sqlColumn
|
||||
|
||||
// Map __result{number} format
|
||||
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
|
||||
|
||||
// For single aggregation, also map __result
|
||||
if len(aggregations) == 1 {
|
||||
r.columnMap["__result"] = sqlColumn
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// buildMetricColumnMap builds the column mapping for metric aggregations
|
||||
func (r *HavingExpressionRewriter) buildMetricColumnMap(aggregations []qbtypes.MetricAggregation) {
|
||||
r.columnMap = make(map[string]string)
|
||||
|
||||
// For metrics, we typically have a single aggregation that results in "value" column
|
||||
// But we still need to handle the mapping for consistency
|
||||
|
||||
for idx, agg := range aggregations {
|
||||
// For metrics, the column is usually "value" in the final select
|
||||
sqlColumn := "value"
|
||||
|
||||
// Map different metric formats
|
||||
metricName := agg.MetricName
|
||||
|
||||
// Don't map the plain metric name - it's ambiguous
|
||||
// r.columnMap[metricName] = sqlColumn
|
||||
|
||||
// Map with space aggregation
|
||||
if agg.SpaceAggregation.StringValue() != "" {
|
||||
r.columnMap[fmt.Sprintf("%s(%s)", agg.SpaceAggregation.StringValue(), metricName)] = sqlColumn
|
||||
}
|
||||
|
||||
// Map with time aggregation
|
||||
if agg.TimeAggregation.StringValue() != "" {
|
||||
r.columnMap[fmt.Sprintf("%s(%s)", agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
|
||||
}
|
||||
|
||||
// Map with both aggregations
|
||||
if agg.TimeAggregation.StringValue() != "" && agg.SpaceAggregation.StringValue() != "" {
|
||||
r.columnMap[fmt.Sprintf("%s(%s(%s))", agg.SpaceAggregation.StringValue(), agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
|
||||
}
|
||||
|
||||
// If no aggregations specified, map the plain metric name
|
||||
if agg.TimeAggregation.StringValue() == "" && agg.SpaceAggregation.StringValue() == "" {
|
||||
r.columnMap[metricName] = sqlColumn
|
||||
}
|
||||
|
||||
// Map __result format
|
||||
r.columnMap["__result"] = sqlColumn
|
||||
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
|
||||
}
|
||||
}
|
||||
|
||||
// rewriteExpression rewrites the having expression using the column map
|
||||
func (r *HavingExpressionRewriter) rewriteExpression(expression string) string {
|
||||
// First, handle quoted strings to avoid replacing within them
|
||||
quotedStrings := make(map[string]string)
|
||||
quotePattern := regexp.MustCompile(`'[^']*'|"[^"]*"`)
|
||||
quotedIdx := 0
|
||||
|
||||
expression = quotePattern.ReplaceAllStringFunc(expression, func(match string) string {
|
||||
placeholder := fmt.Sprintf("__QUOTED_%d__", quotedIdx)
|
||||
quotedStrings[placeholder] = match
|
||||
quotedIdx++
|
||||
return placeholder
|
||||
})
|
||||
|
||||
// Sort column mappings by length (descending) to handle longer names first
|
||||
// This prevents partial replacements (e.g., "count" being replaced in "count_distinct")
|
||||
type mapping struct {
|
||||
from string
|
||||
to string
|
||||
}
|
||||
|
||||
mappings := make([]mapping, 0, len(r.columnMap))
|
||||
for from, to := range r.columnMap {
|
||||
mappings = append(mappings, mapping{from: from, to: to})
|
||||
}
|
||||
|
||||
// Sort by length descending
|
||||
for i := 0; i < len(mappings); i++ {
|
||||
for j := i + 1; j < len(mappings); j++ {
|
||||
if len(mappings[j].from) > len(mappings[i].from) {
|
||||
mappings[i], mappings[j] = mappings[j], mappings[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply replacements
|
||||
for _, m := range mappings {
|
||||
// For function expressions (containing parentheses), we need special handling
|
||||
if strings.Contains(m.from, "(") {
|
||||
// Escape special regex characters in the function name
|
||||
escapedFrom := regexp.QuoteMeta(m.from)
|
||||
pattern := regexp.MustCompile(`\b` + escapedFrom)
|
||||
expression = pattern.ReplaceAllString(expression, m.to)
|
||||
} else {
|
||||
// Use word boundaries to ensure we're replacing complete identifiers
|
||||
pattern := regexp.MustCompile(`\b` + regexp.QuoteMeta(m.from) + `\b`)
|
||||
expression = pattern.ReplaceAllString(expression, m.to)
|
||||
}
|
||||
}
|
||||
|
||||
// Restore quoted strings
|
||||
for placeholder, original := range quotedStrings {
|
||||
expression = strings.Replace(expression, placeholder, original, 1)
|
||||
}
|
||||
|
||||
return expression
|
||||
}
|
||||
281
pkg/querybuilder/having_rewriter_test.go
Normal file
281
pkg/querybuilder/having_rewriter_test.go
Normal file
@@ -0,0 +1,281 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func TestHavingExpressionRewriter_RewriteForTraces(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.TraceAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single aggregation with __result",
|
||||
expression: "__result > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 100",
|
||||
},
|
||||
{
|
||||
name: "single aggregation with alias",
|
||||
expression: "total_count > 100 AND total_count < 1000",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: "total_count"},
|
||||
},
|
||||
expected: "__result_0 > 100 AND __result_0 < 1000",
|
||||
},
|
||||
{
|
||||
name: "multiple aggregations with aliases",
|
||||
expression: "error_count > 10 OR success_count > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "countIf(status = 'error')", Alias: "error_count"},
|
||||
{Expression: "countIf(status = 'success')", Alias: "success_count"},
|
||||
},
|
||||
expected: "__result_0 > 10 OR __result_1 > 100",
|
||||
},
|
||||
{
|
||||
name: "expression reference",
|
||||
expression: "count() > 50",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 50",
|
||||
},
|
||||
{
|
||||
name: "__result{number} format",
|
||||
expression: "__result0 > 10 AND __result1 < 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
{Expression: "sum(duration)", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 10 AND __result_1 < 100",
|
||||
},
|
||||
{
|
||||
name: "complex expression with parentheses",
|
||||
expression: "(total > 100 AND errors < 10) OR (total < 50 AND errors = 0)",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: "total"},
|
||||
{Expression: "countIf(error = true)", Alias: "errors"},
|
||||
},
|
||||
expected: "(__result_0 > 100 AND __result_1 < 10) OR (__result_0 < 50 AND __result_1 = 0)",
|
||||
},
|
||||
{
|
||||
name: "with quoted strings",
|
||||
expression: "status = 'active' AND count > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "status", Alias: "status"},
|
||||
{Expression: "count()", Alias: "count"},
|
||||
},
|
||||
expected: "__result_0 = 'active' AND __result_1 > 100",
|
||||
},
|
||||
{
|
||||
name: "avoid partial replacements",
|
||||
expression: "count_distinct > 10 AND count > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count_distinct(user_id)", Alias: "count_distinct"},
|
||||
{Expression: "count()", Alias: "count"},
|
||||
},
|
||||
expected: "__result_0 > 10 AND __result_1 > 100",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForTraces(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHavingExpressionRewriter_RewriteForLogs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.LogAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single aggregation with __result",
|
||||
expression: "__result > 1000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 1000",
|
||||
},
|
||||
{
|
||||
name: "multiple aggregations with aliases and expressions",
|
||||
expression: "total_logs > 1000 AND avg(size) < 1024",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: "total_logs"},
|
||||
{Expression: "avg(size)", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 1000 AND __result_1 < 1024",
|
||||
},
|
||||
{
|
||||
name: "complex boolean expression",
|
||||
expression: "(error_logs > 100 AND error_logs < 1000) OR warning_logs > 5000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "countIf(level = 'error')", Alias: "error_logs"},
|
||||
{Expression: "countIf(level = 'warning')", Alias: "warning_logs"},
|
||||
},
|
||||
expected: "(__result_0 > 100 AND __result_0 < 1000) OR __result_1 > 5000",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForLogs(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHavingExpressionRewriter_RewriteForMetrics(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.MetricAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "metric with space aggregation",
|
||||
expression: "avg(cpu_usage) > 80",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
},
|
||||
},
|
||||
expected: "value > 80",
|
||||
},
|
||||
{
|
||||
name: "metric with time aggregation",
|
||||
expression: "rate(requests) > 1000",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "requests",
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
},
|
||||
},
|
||||
expected: "value > 1000",
|
||||
},
|
||||
{
|
||||
name: "metric with both aggregations",
|
||||
expression: "sum(rate(requests)) > 5000",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "requests",
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
expected: "value > 5000",
|
||||
},
|
||||
{
|
||||
name: "metric with __result",
|
||||
expression: "__result < 100",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "memory_usage",
|
||||
SpaceAggregation: metrictypes.SpaceAggregationMax,
|
||||
},
|
||||
},
|
||||
expected: "value < 100",
|
||||
},
|
||||
{
|
||||
name: "metric name without aggregation",
|
||||
expression: "temperature > 30",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "temperature",
|
||||
},
|
||||
},
|
||||
expected: "value > 30",
|
||||
},
|
||||
{
|
||||
name: "complex expression with parentheses",
|
||||
expression: "(avg(cpu_usage) > 80 AND avg(cpu_usage) < 95) OR __result > 99",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
},
|
||||
},
|
||||
expected: "(value > 80 AND value < 95) OR value > 99",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForMetrics(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHavingExpressionRewriter_EdgeCases(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.TraceAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty expression",
|
||||
expression: "",
|
||||
aggregations: []qbtypes.TraceAggregation{},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "no matching columns",
|
||||
expression: "unknown_column > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: "total"},
|
||||
},
|
||||
expected: "unknown_column > 100",
|
||||
},
|
||||
{
|
||||
name: "expression within quoted string",
|
||||
expression: "status = 'count() > 100' AND total > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "status", Alias: "status"},
|
||||
{Expression: "count()", Alias: "total"},
|
||||
},
|
||||
expected: "__result_0 = 'count() > 100' AND __result_1 > 100",
|
||||
},
|
||||
{
|
||||
name: "double quotes",
|
||||
expression: `name = "test" AND count > 10`,
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "name", Alias: "name"},
|
||||
{Expression: "count()", Alias: "count"},
|
||||
},
|
||||
expected: `__result_0 = "test" AND __result_1 > 10`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForTraces(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -95,6 +95,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
||||
end uint64,
|
||||
requestType qbtypes.RequestType,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
config, exists := signalConfigs[b.signal]
|
||||
if !exists {
|
||||
@@ -111,7 +112,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := b.addConditions(ctx, q, start, end, query, keys); err != nil {
|
||||
if err := b.addConditions(ctx, q, start, end, query, keys, variables); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -129,6 +130,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) error {
|
||||
// Add filter condition if present
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
@@ -139,6 +141,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
ConditionBuilder: b.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
SkipFullTextFilter: true,
|
||||
Variables: variables,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
package querybuilder
|
||||
|
||||
import "math"
|
||||
import (
|
||||
"math"
|
||||
)
|
||||
|
||||
const (
|
||||
NsToSeconds = 1000000000
|
||||
BucketAdjustment = 1800 // 30 minutes
|
||||
|
||||
RecommendedNumberOfPoints = 300
|
||||
MaxAllowedNumberofPoints = 1500
|
||||
MaxAllowedSeries = 3000
|
||||
)
|
||||
|
||||
// ToNanoSecs takes epoch and returns it in ns
|
||||
@@ -21,3 +27,61 @@ func ToNanoSecs(epoch uint64) uint64 {
|
||||
}
|
||||
return temp * uint64(math.Pow(10, float64(19-count)))
|
||||
}
|
||||
|
||||
func RecommendedStepInterval(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / RecommendedNumberOfPoints / 1e9
|
||||
|
||||
if step < 5 {
|
||||
return 5
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 5
|
||||
return step - step%5
|
||||
}
|
||||
|
||||
func MinAllowedStepInterval(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / MaxAllowedNumberofPoints / 1e9
|
||||
|
||||
if step < 5 {
|
||||
return 5
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 5
|
||||
return step - step%5
|
||||
}
|
||||
|
||||
func RecommendedStepIntervalForMetric(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / RecommendedNumberOfPoints / 1e9
|
||||
|
||||
// TODO: make this make use of the reporting frequency and remove the hard coded step
|
||||
if step < 60 {
|
||||
return 60
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 60
|
||||
return step - step%60
|
||||
}
|
||||
|
||||
func MinAllowedStepIntervalForMetric(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / RecommendedNumberOfPoints / 1e9
|
||||
|
||||
// TODO: make this make use of the reporting frequency and remove the hard coded step
|
||||
if step < 60 {
|
||||
return 60
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 60
|
||||
return step - step%60
|
||||
}
|
||||
|
||||
81
pkg/querybuilder/variable_integration_test.go
Normal file
81
pkg/querybuilder/variable_integration_test.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestWhereClauseVisitorVariableResolution(t *testing.T) {
|
||||
// Test that the visitor properly resolves variables in VisitValue
|
||||
|
||||
variables := map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.QueryVariableType,
|
||||
Value: "payment-service",
|
||||
},
|
||||
"status": {
|
||||
Type: qbtypes.CustomVariableType,
|
||||
Value: []string{"200", "201"},
|
||||
},
|
||||
"all": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("visitor resolves variable in value", func(t *testing.T) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
opts := FilterExprVisitorOpts{
|
||||
FieldMapper: &simpleFieldMapper{},
|
||||
ConditionBuilder: &simpleConditionBuilder{},
|
||||
FieldKeys: make(map[string][]*telemetrytypes.TelemetryFieldKey),
|
||||
Builder: sb,
|
||||
Variables: variables,
|
||||
}
|
||||
|
||||
visitor := newFilterExpressionVisitor(opts)
|
||||
assert.NotNil(t, visitor.variableResolver)
|
||||
|
||||
// Test that the variable resolver is properly initialized
|
||||
assert.NotNil(t, visitor.variableResolver)
|
||||
|
||||
// Test variable resolution
|
||||
value, skipFilter, err := visitor.variableResolver.ResolveVariable("service")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "payment-service", value)
|
||||
assert.False(t, skipFilter)
|
||||
|
||||
// Test __all__ variable
|
||||
value, skipFilter, err = visitor.variableResolver.ResolveVariable("all")
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, skipFilter)
|
||||
})
|
||||
}
|
||||
|
||||
// Simple mock implementations for testing
|
||||
|
||||
type simpleFieldMapper struct{}
|
||||
|
||||
func (m *simpleFieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
return key.Name, nil
|
||||
}
|
||||
|
||||
func (m *simpleFieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (m *simpleFieldMapper) ColumnExpressionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
return key.Name, nil
|
||||
}
|
||||
|
||||
type simpleConditionBuilder struct{}
|
||||
|
||||
func (m *simpleConditionBuilder) ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
return sb.Equal(key.Name, value), nil
|
||||
}
|
||||
145
pkg/querybuilder/variable_resolver.go
Normal file
145
pkg/querybuilder/variable_resolver.go
Normal file
@@ -0,0 +1,145 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// VariableResolver handles variable substitution in query expressions
|
||||
type VariableResolver struct {
|
||||
variables map[string]qbtypes.VariableItem
|
||||
}
|
||||
|
||||
// NewVariableResolver creates a new VariableResolver
|
||||
func NewVariableResolver(variables map[string]qbtypes.VariableItem) *VariableResolver {
|
||||
return &VariableResolver{
|
||||
variables: variables,
|
||||
}
|
||||
}
|
||||
|
||||
// Variable patterns:
|
||||
// {{.var}} or {{var}}
|
||||
// [[.var]] or [[var]]
|
||||
// $var
|
||||
var variablePatterns = []*regexp.Regexp{
|
||||
regexp.MustCompile(`\{\{\.?(\w+)\}\}`), // {{.var}} or {{var}}
|
||||
regexp.MustCompile(`\[\[\.?(\w+)\]\]`), // [[.var]] or [[var]]
|
||||
regexp.MustCompile(`\$(\w+)`), // $var
|
||||
}
|
||||
|
||||
// IsVariableReference checks if a value is a variable reference
|
||||
func (r *VariableResolver) IsVariableReference(value string) (bool, string) {
|
||||
// Check for exact match only (not partial)
|
||||
for _, pattern := range variablePatterns {
|
||||
matches := pattern.FindStringSubmatch(value)
|
||||
if len(matches) > 1 && matches[0] == value {
|
||||
return true, matches[1]
|
||||
}
|
||||
}
|
||||
return false, ""
|
||||
}
|
||||
|
||||
// ResolveVariable resolves a variable reference to its actual value
|
||||
func (r *VariableResolver) ResolveVariable(varName string) (any, bool, error) {
|
||||
item, exists := r.variables[varName]
|
||||
if !exists {
|
||||
return nil, false, fmt.Errorf("variable '%s' not found", varName)
|
||||
}
|
||||
|
||||
// Check if this is a dynamic variable with special __all__ value
|
||||
if item.Type == qbtypes.DynamicVariableType {
|
||||
// Check for __all__ values which mean "skip filter"
|
||||
switch v := item.Value.(type) {
|
||||
case string:
|
||||
if v == "__all__" {
|
||||
return nil, true, nil // skip filter
|
||||
}
|
||||
case []any:
|
||||
if len(v) == 1 {
|
||||
if str, ok := v[0].(string); ok && str == "__all__" {
|
||||
return nil, true, nil // skip filter
|
||||
}
|
||||
}
|
||||
case []string:
|
||||
if len(v) == 1 && v[0] == "__all__" {
|
||||
return nil, true, nil // skip filter
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return item.Value, false, nil
|
||||
}
|
||||
|
||||
// ResolveFilterExpression resolves variables in a filter expression
|
||||
// Returns the resolved expression and whether any filters should be skipped
|
||||
func (r *VariableResolver) ResolveFilterExpression(expression string) (string, bool, error) {
|
||||
if expression == "" {
|
||||
return expression, false, nil
|
||||
}
|
||||
|
||||
// Check if the entire expression is a variable
|
||||
if isVar, varName := r.IsVariableReference(strings.TrimSpace(expression)); isVar {
|
||||
value, skipFilter, err := r.ResolveVariable(varName)
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
}
|
||||
if skipFilter {
|
||||
return "", true, nil
|
||||
}
|
||||
// Convert value to string representation
|
||||
return formatValue(value), false, nil
|
||||
}
|
||||
|
||||
// For complex expressions, we need to find and replace variable references
|
||||
// We'll iterate through all variables and check if they appear in the expression
|
||||
resolvedExpr := expression
|
||||
for _, pattern := range variablePatterns {
|
||||
matches := pattern.FindAllStringSubmatch(expression, -1)
|
||||
for _, match := range matches {
|
||||
if len(match) > 1 {
|
||||
varName := match[1]
|
||||
value, skipFilter, err := r.ResolveVariable(varName)
|
||||
if err != nil {
|
||||
// Skip this variable if not found
|
||||
continue
|
||||
}
|
||||
if skipFilter {
|
||||
// If any variable indicates skip filter, skip the entire filter
|
||||
return "", true, nil
|
||||
}
|
||||
// Replace the variable reference with its value
|
||||
resolvedExpr = strings.ReplaceAll(resolvedExpr, match[0], formatValue(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resolvedExpr, false, nil
|
||||
}
|
||||
|
||||
// formatValue formats a value for use in a filter expression
|
||||
func formatValue(value any) string {
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
// Quote strings
|
||||
return fmt.Sprintf("'%s'", strings.ReplaceAll(v, "'", "''"))
|
||||
case []string:
|
||||
// Format as array
|
||||
parts := make([]string, len(v))
|
||||
for i, s := range v {
|
||||
parts[i] = fmt.Sprintf("'%s'", strings.ReplaceAll(s, "'", "''"))
|
||||
}
|
||||
return fmt.Sprintf("[%s]", strings.Join(parts, ", "))
|
||||
case []any:
|
||||
// Format as array
|
||||
parts := make([]string, len(v))
|
||||
for i, item := range v {
|
||||
parts[i] = formatValue(item)
|
||||
}
|
||||
return fmt.Sprintf("[%s]", strings.Join(parts, ", "))
|
||||
default:
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
}
|
||||
317
pkg/querybuilder/variable_resolver_test.go
Normal file
317
pkg/querybuilder/variable_resolver_test.go
Normal file
@@ -0,0 +1,317 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestVariableResolver_IsVariableReference(t *testing.T) {
|
||||
r := NewVariableResolver(nil)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
value string
|
||||
isVar bool
|
||||
varName string
|
||||
}{
|
||||
{
|
||||
name: "double curly with dot",
|
||||
value: "{{.myVar}}",
|
||||
isVar: true,
|
||||
varName: "myVar",
|
||||
},
|
||||
{
|
||||
name: "double curly without dot",
|
||||
value: "{{myVar}}",
|
||||
isVar: true,
|
||||
varName: "myVar",
|
||||
},
|
||||
{
|
||||
name: "double square with dot",
|
||||
value: "[[.myVar]]",
|
||||
isVar: true,
|
||||
varName: "myVar",
|
||||
},
|
||||
{
|
||||
name: "double square without dot",
|
||||
value: "[[myVar]]",
|
||||
isVar: true,
|
||||
varName: "myVar",
|
||||
},
|
||||
{
|
||||
name: "dollar sign",
|
||||
value: "$myVar",
|
||||
isVar: true,
|
||||
varName: "myVar",
|
||||
},
|
||||
{
|
||||
name: "not a variable",
|
||||
value: "myVar",
|
||||
isVar: false,
|
||||
varName: "",
|
||||
},
|
||||
{
|
||||
name: "partial match",
|
||||
value: "prefix{{myVar}}suffix",
|
||||
isVar: false,
|
||||
varName: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
isVar, varName := r.IsVariableReference(tt.value)
|
||||
assert.Equal(t, tt.isVar, isVar)
|
||||
assert.Equal(t, tt.varName, varName)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestVariableResolver_ResolveVariable(t *testing.T) {
|
||||
variables := map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.QueryVariableType,
|
||||
Value: "payment-service",
|
||||
},
|
||||
"status": {
|
||||
Type: qbtypes.CustomVariableType,
|
||||
Value: []string{"200", "201"},
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.TextBoxVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
"all_services": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
"all_array": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: []string{"__all__"},
|
||||
},
|
||||
}
|
||||
|
||||
r := NewVariableResolver(variables)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
varName string
|
||||
wantValue any
|
||||
wantSkip bool
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "query variable",
|
||||
varName: "service",
|
||||
wantValue: "payment-service",
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "custom variable array",
|
||||
varName: "status",
|
||||
wantValue: []string{"200", "201"},
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "textbox variable",
|
||||
varName: "env",
|
||||
wantValue: "production",
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "dynamic variable with __all__",
|
||||
varName: "all_services",
|
||||
wantValue: nil,
|
||||
wantSkip: true,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "dynamic variable with __all__ in array",
|
||||
varName: "all_array",
|
||||
wantValue: nil,
|
||||
wantSkip: true,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "non-existent variable",
|
||||
varName: "unknown",
|
||||
wantValue: nil,
|
||||
wantSkip: false,
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
value, skipFilter, err := r.ResolveVariable(tt.varName)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.wantValue, value)
|
||||
assert.Equal(t, tt.wantSkip, skipFilter)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestVariableResolver_ResolveFilterExpression(t *testing.T) {
|
||||
variables := map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.QueryVariableType,
|
||||
Value: "payment-service",
|
||||
},
|
||||
"status": {
|
||||
Type: qbtypes.CustomVariableType,
|
||||
Value: []string{"200", "201"},
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.TextBoxVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
"all": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
}
|
||||
|
||||
r := NewVariableResolver(variables)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
wantExpression string
|
||||
wantSkip bool
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "simple variable reference",
|
||||
expression: "{{service}}",
|
||||
wantExpression: "'payment-service'",
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "expression with variable",
|
||||
expression: `service.name = "{{service}}"`,
|
||||
wantExpression: `service.name = "'payment-service'"`,
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "expression with array variable",
|
||||
expression: "status_code IN {{status}}",
|
||||
wantExpression: `status_code IN ['200', '201']`,
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "multiple variables",
|
||||
expression: `service.name = "{{service}}" AND environment = "{{env}}"`,
|
||||
wantExpression: `service.name = "'payment-service'" AND environment = "'production'"`,
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "dollar variable syntax",
|
||||
expression: `service.name = "$service"`,
|
||||
wantExpression: `service.name = "'payment-service'"`,
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "double square brackets",
|
||||
expression: `service.name = "[[service]]"`,
|
||||
wantExpression: `service.name = "'payment-service'"`,
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "__all__ variable should skip filter",
|
||||
expression: "service.name = {{all}}",
|
||||
wantExpression: "",
|
||||
wantSkip: true,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "expression with unknown variable",
|
||||
expression: "service.name = {{unknown}}",
|
||||
wantExpression: "service.name = {{unknown}}", // unchanged
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "no variables",
|
||||
expression: "service.name = 'static-value'",
|
||||
wantExpression: "service.name = 'static-value'",
|
||||
wantSkip: false,
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
resolved, skipFilter, err := r.ResolveFilterExpression(tt.expression)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.wantExpression, resolved)
|
||||
assert.Equal(t, tt.wantSkip, skipFilter)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatValue(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
value any
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "string value",
|
||||
value: "test",
|
||||
want: "'test'",
|
||||
},
|
||||
{
|
||||
name: "string with quotes",
|
||||
value: "test's value",
|
||||
want: "'test''s value'",
|
||||
},
|
||||
{
|
||||
name: "string array",
|
||||
value: []string{"a", "b", "c"},
|
||||
want: "['a', 'b', 'c']",
|
||||
},
|
||||
{
|
||||
name: "interface array",
|
||||
value: []any{"a", 123, "c"},
|
||||
want: "['a', 123, 'c']",
|
||||
},
|
||||
{
|
||||
name: "number",
|
||||
value: 123,
|
||||
want: "123",
|
||||
},
|
||||
{
|
||||
name: "boolean",
|
||||
value: true,
|
||||
want: "true",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := formatValue(tt.value)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -29,6 +29,7 @@ type filterExpressionVisitor struct {
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
skipResourceFilter bool
|
||||
skipFullTextFilter bool
|
||||
variableResolver *VariableResolver
|
||||
}
|
||||
|
||||
type FilterExprVisitorOpts struct {
|
||||
@@ -41,10 +42,16 @@ type FilterExprVisitorOpts struct {
|
||||
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
SkipResourceFilter bool
|
||||
SkipFullTextFilter bool
|
||||
Variables map[string]qbtypes.VariableItem
|
||||
}
|
||||
|
||||
// newFilterExpressionVisitor creates a new filterExpressionVisitor
|
||||
func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor {
|
||||
var variableResolver *VariableResolver
|
||||
if opts.Variables != nil && len(opts.Variables) > 0 {
|
||||
variableResolver = NewVariableResolver(opts.Variables)
|
||||
}
|
||||
|
||||
return &filterExpressionVisitor{
|
||||
fieldMapper: opts.FieldMapper,
|
||||
conditionBuilder: opts.ConditionBuilder,
|
||||
@@ -55,6 +62,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
|
||||
jsonKeyToKey: opts.JsonKeyToKey,
|
||||
skipResourceFilter: opts.SkipResourceFilter,
|
||||
skipFullTextFilter: opts.SkipFullTextFilter,
|
||||
variableResolver: variableResolver,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,6 +169,8 @@ func (v *filterExpressionVisitor) Visit(tree antlr.ParseTree) any {
|
||||
return v.VisitValue(t)
|
||||
case *grammar.KeyContext:
|
||||
return v.VisitKey(t)
|
||||
case *grammar.VariableContext:
|
||||
return v.VisitVariable(t)
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
@@ -378,6 +388,11 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
if len(values) > 0 {
|
||||
value := v.Visit(values[0])
|
||||
|
||||
// Check if we should skip this filter due to __all__ variable
|
||||
if strVal, ok := value.(string); ok && strVal == "__SKIP_FILTER__" {
|
||||
return "true" // Return always true condition to skip filter
|
||||
}
|
||||
|
||||
var op qbtypes.FilterOperator
|
||||
|
||||
// Handle each type of comparison
|
||||
@@ -433,12 +448,58 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
|
||||
// VisitInClause handles IN expressions
|
||||
func (v *filterExpressionVisitor) VisitInClause(ctx *grammar.InClauseContext) any {
|
||||
return v.Visit(ctx.ValueList())
|
||||
// Check if it's a variable
|
||||
if ctx.Variable() != nil {
|
||||
value := v.Visit(ctx.Variable())
|
||||
|
||||
// If the variable resolved to "__SKIP_FILTER__", return empty array
|
||||
if skipVal, ok := value.(string); ok && skipVal == "__SKIP_FILTER__" {
|
||||
return []any{}
|
||||
}
|
||||
|
||||
// If it's already an array, return it
|
||||
if arr, ok := value.([]any); ok {
|
||||
return arr
|
||||
}
|
||||
|
||||
// Otherwise, wrap single value in array
|
||||
return []any{value}
|
||||
}
|
||||
|
||||
// Handle regular value list
|
||||
if ctx.ValueList() != nil {
|
||||
return v.Visit(ctx.ValueList())
|
||||
}
|
||||
|
||||
return []any{}
|
||||
}
|
||||
|
||||
// VisitNotInClause handles NOT IN expressions
|
||||
func (v *filterExpressionVisitor) VisitNotInClause(ctx *grammar.NotInClauseContext) any {
|
||||
return v.Visit(ctx.ValueList())
|
||||
// Check if it's a variable
|
||||
if ctx.Variable() != nil {
|
||||
value := v.Visit(ctx.Variable())
|
||||
|
||||
// If the variable resolved to "__SKIP_FILTER__", return empty array
|
||||
if skipVal, ok := value.(string); ok && skipVal == "__SKIP_FILTER__" {
|
||||
return []any{}
|
||||
}
|
||||
|
||||
// If it's already an array, return it
|
||||
if arr, ok := value.([]any); ok {
|
||||
return arr
|
||||
}
|
||||
|
||||
// Otherwise, wrap single value in array
|
||||
return []any{value}
|
||||
}
|
||||
|
||||
// Handle regular value list
|
||||
if ctx.ValueList() != nil {
|
||||
return v.Visit(ctx.ValueList())
|
||||
}
|
||||
|
||||
return []any{}
|
||||
}
|
||||
|
||||
// VisitValueList handles comma-separated value lists
|
||||
@@ -568,12 +629,79 @@ func (v *filterExpressionVisitor) VisitArray(ctx *grammar.ArrayContext) any {
|
||||
return v.Visit(ctx.ValueList())
|
||||
}
|
||||
|
||||
// VisitValue handles literal values: strings, numbers, booleans
|
||||
// VisitVariable handles variable resolution
|
||||
func (v *filterExpressionVisitor) VisitVariable(ctx *grammar.VariableContext) any {
|
||||
var varName string
|
||||
var varText string
|
||||
|
||||
// Extract variable name based on syntax
|
||||
if ctx.DOLLAR_VAR() != nil {
|
||||
varText = ctx.DOLLAR_VAR().GetText()
|
||||
varName = varText[1:] // Remove $
|
||||
} else if ctx.CURLY_VAR() != nil {
|
||||
varText = ctx.CURLY_VAR().GetText()
|
||||
// Remove {{ }} and optional whitespace/dots
|
||||
varName = strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(varText, "{{"), "}}"))
|
||||
varName = strings.TrimPrefix(varName, ".")
|
||||
} else if ctx.SQUARE_VAR() != nil {
|
||||
varText = ctx.SQUARE_VAR().GetText()
|
||||
// Remove [[ ]] and optional whitespace/dots
|
||||
varName = strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(varText, "[["), "]]"))
|
||||
varName = strings.TrimPrefix(varName, ".")
|
||||
} else {
|
||||
v.errors = append(v.errors, "unknown variable type")
|
||||
return nil
|
||||
}
|
||||
|
||||
// If no variable resolver is provided, return the variable text
|
||||
if v.variableResolver == nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("variable %s used but no variable resolver provided", varText))
|
||||
return varText
|
||||
}
|
||||
|
||||
// Resolve the variable
|
||||
resolvedValue, skipFilter, err := v.variableResolver.ResolveVariable(varName)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to resolve variable %s: %v", varText, err))
|
||||
return nil
|
||||
}
|
||||
|
||||
if skipFilter {
|
||||
return "__SKIP_FILTER__"
|
||||
}
|
||||
|
||||
return resolvedValue
|
||||
}
|
||||
|
||||
// VisitValue handles literal values: strings, numbers, booleans, variables
|
||||
func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
// Check if this is a variable first
|
||||
if ctx.Variable() != nil {
|
||||
return v.Visit(ctx.Variable())
|
||||
}
|
||||
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
txt := ctx.QUOTED_TEXT().GetText()
|
||||
// trim quotes and return the value
|
||||
return trimQuotes(txt)
|
||||
// trim quotes and check for variable
|
||||
value := trimQuotes(txt)
|
||||
|
||||
// Check if this is a variable reference
|
||||
if v.variableResolver != nil {
|
||||
if isVar, varName := v.variableResolver.IsVariableReference(value); isVar {
|
||||
resolvedValue, skipFilter, err := v.variableResolver.ResolveVariable(varName)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to resolve variable: %s", err.Error()))
|
||||
return value
|
||||
}
|
||||
if skipFilter {
|
||||
// Return a special marker to indicate filter should be skipped
|
||||
return "__SKIP_FILTER__"
|
||||
}
|
||||
return resolvedValue
|
||||
}
|
||||
}
|
||||
|
||||
return value
|
||||
} else if ctx.NUMBER() != nil {
|
||||
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
|
||||
if err != nil {
|
||||
@@ -590,7 +718,25 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
// When the user writes an expression like `service.name=redis`
|
||||
// The `redis` part is a VALUE context but parsed as a KEY token
|
||||
// so we return the text as is
|
||||
return ctx.KEY().GetText()
|
||||
keyText := ctx.KEY().GetText()
|
||||
|
||||
// Check if this is a variable reference
|
||||
if v.variableResolver != nil {
|
||||
if isVar, varName := v.variableResolver.IsVariableReference(keyText); isVar {
|
||||
resolvedValue, skipFilter, err := v.variableResolver.ResolveVariable(varName)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to resolve variable: %s", err.Error()))
|
||||
return keyText
|
||||
}
|
||||
if skipFilter {
|
||||
// Return a special marker to indicate filter should be skipped
|
||||
return "__SKIP_FILTER__"
|
||||
}
|
||||
return resolvedValue
|
||||
}
|
||||
}
|
||||
|
||||
return keyText
|
||||
}
|
||||
|
||||
return "" // Should not happen with valid input
|
||||
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/emailing/smtpemailing"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/clickhouseprometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -154,9 +153,9 @@ func NewSharderProviderFactories() factory.NamedMap[factory.ProviderFactory[shar
|
||||
)
|
||||
}
|
||||
|
||||
func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] {
|
||||
func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, userGetter, build, analyticsConfig),
|
||||
analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, build, analyticsConfig),
|
||||
noopstatsreporter.NewFactory(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -5,9 +5,7 @@ import (
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest"
|
||||
"github.com/SigNoz/signoz/pkg/statsreporter"
|
||||
@@ -63,9 +61,8 @@ func TestNewProviderFactories(t *testing.T) {
|
||||
})
|
||||
|
||||
assert.NotPanics(t, func() {
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), instrumentationtest.New().ToProviderSettings()))
|
||||
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil)
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual)
|
||||
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, version.Build{}, analytics.Config{Enabled: true})
|
||||
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, version.Build{}, analytics.Config{Enabled: true})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/ruler"
|
||||
@@ -32,7 +31,6 @@ import (
|
||||
type SigNoz struct {
|
||||
*factory.Registry
|
||||
Instrumentation instrumentation.Instrumentation
|
||||
Analytics analytics.Analytics
|
||||
Cache cache.Cache
|
||||
Web web.Web
|
||||
SQLStore sqlstore.SQLStore
|
||||
@@ -214,9 +212,6 @@ func New(
|
||||
// Initialize organization getter
|
||||
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstore), sharder)
|
||||
|
||||
// Initialize user getter
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
|
||||
// Initialize alertmanager from the available alertmanager provider factories
|
||||
alertmanager, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
@@ -272,7 +267,7 @@ func New(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.StatsReporter,
|
||||
NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, userGetter, version.Info, config.Analytics),
|
||||
NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, version.Info, config.Analytics),
|
||||
config.StatsReporter.Provider(),
|
||||
)
|
||||
if err != nil {
|
||||
@@ -293,7 +288,6 @@ func New(
|
||||
|
||||
return &SigNoz{
|
||||
Registry: registry,
|
||||
Analytics: analytics,
|
||||
Instrumentation: instrumentation,
|
||||
Cache: cache,
|
||||
Web: web,
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -130,17 +131,15 @@ func (migration *updateAgents) Up(ctx context.Context, db *bun.DB) error {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
var orgID string
|
||||
err = tx.
|
||||
NewSelect().
|
||||
ColumnExpr("id").
|
||||
Table("organizations").
|
||||
Limit(1).
|
||||
Scan(ctx, &orgID)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
// get all org ids
|
||||
var orgIDs []string
|
||||
if err := tx.NewSelect().Model(new(types.Organization)).Column("id").Scan(ctx, &orgIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// there are multiple orgs, so we don't need to update the agents table
|
||||
if len(orgIDs) > 1 {
|
||||
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "multiple orgs found: %v", orgIDs)
|
||||
}
|
||||
|
||||
err = migration.
|
||||
@@ -158,7 +157,7 @@ func (migration *updateAgents) Up(ctx context.Context, db *bun.DB) error {
|
||||
|
||||
if err == nil && len(existingAgents) > 0 {
|
||||
newAgents, err := migration.
|
||||
CopyOldAgentToNewAgent(ctx, tx, existingAgents, orgID)
|
||||
CopyOldAgentToNewAgent(ctx, tx, existingAgents, orgIDs[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -191,7 +190,7 @@ func (migration *updateAgents) Up(ctx context.Context, db *bun.DB) error {
|
||||
|
||||
if err == nil && len(existingAgentConfigVersions) > 0 {
|
||||
newAgentConfigVersions, err := migration.
|
||||
CopyOldAgentConfigVersionToNewAgentConfigVersion(ctx, tx, existingAgentConfigVersions, orgID)
|
||||
CopyOldAgentConfigVersionToNewAgentConfigVersion(ctx, tx, existingAgentConfigVersions, orgIDs[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -224,7 +223,7 @@ func (migration *updateAgents) Up(ctx context.Context, db *bun.DB) error {
|
||||
|
||||
if err == nil && len(existingAgentConfigElements) > 0 {
|
||||
newAgentConfigElements, err := migration.
|
||||
CopyOldAgentConfigElementToNewAgentConfigElement(ctx, tx, existingAgentConfigElements, orgID)
|
||||
CopyOldAgentConfigElementToNewAgentConfigElement(ctx, tx, existingAgentConfigElements, orgIDs[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -9,10 +9,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/analytics/segmentanalytics"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/statsreporter"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
)
|
||||
@@ -33,9 +32,6 @@ type provider struct {
|
||||
// used to get organizations
|
||||
orgGetter organization.Getter
|
||||
|
||||
// used to get users
|
||||
userGetter user.Getter
|
||||
|
||||
// used to send stats to an analytics backend
|
||||
analytics analytics.Analytics
|
||||
|
||||
@@ -49,9 +45,9 @@ type provider struct {
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewFactory(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, build version.Build, analyticsConfig analytics.Config) factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config] {
|
||||
func NewFactory(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, build version.Build, analyticsConfig analytics.Config) factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("analytics"), func(ctx context.Context, settings factory.ProviderSettings, config statsreporter.Config) (statsreporter.StatsReporter, error) {
|
||||
return New(ctx, settings, config, telemetryStore, collectors, orgGetter, userGetter, build, analyticsConfig)
|
||||
return New(ctx, settings, config, telemetryStore, collectors, orgGetter, build, analyticsConfig)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -62,7 +58,6 @@ func New(
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
collectors []statsreporter.StatsCollector,
|
||||
orgGetter organization.Getter,
|
||||
userGetter user.Getter,
|
||||
build version.Build,
|
||||
analyticsConfig analytics.Config,
|
||||
) (statsreporter.StatsReporter, error) {
|
||||
@@ -79,7 +74,6 @@ func New(
|
||||
telemetryStore: telemetryStore,
|
||||
collectors: collectors,
|
||||
orgGetter: orgGetter,
|
||||
userGetter: userGetter,
|
||||
analytics: analytics,
|
||||
build: build,
|
||||
deployment: deployment,
|
||||
@@ -122,7 +116,6 @@ func (provider *provider) Report(ctx context.Context) error {
|
||||
continue
|
||||
}
|
||||
|
||||
// Add build and deployment stats
|
||||
stats["build.version"] = provider.build.Version()
|
||||
stats["build.branch"] = provider.build.Branch()
|
||||
stats["build.hash"] = provider.build.Hash()
|
||||
@@ -132,30 +125,37 @@ func (provider *provider) Report(ctx context.Context) error {
|
||||
stats["deployment.os"] = provider.deployment.OS()
|
||||
stats["deployment.arch"] = provider.deployment.Arch()
|
||||
|
||||
// Add org stats
|
||||
stats["display_name"] = org.DisplayName
|
||||
stats["name"] = org.Name
|
||||
stats["created_at"] = org.CreatedAt
|
||||
stats["alias"] = org.Alias
|
||||
|
||||
provider.settings.Logger().DebugContext(ctx, "reporting stats", "stats", stats)
|
||||
|
||||
provider.analytics.IdentifyGroup(ctx, org.ID.String(), stats)
|
||||
provider.analytics.TrackGroup(ctx, org.ID.String(), "Stats Reported", stats)
|
||||
|
||||
if !provider.config.Collect.Identities {
|
||||
continue
|
||||
}
|
||||
|
||||
users, err := provider.userGetter.ListByOrgID(ctx, org.ID)
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "failed to list users", "error", err, "org_id", org.ID)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, user := range users {
|
||||
provider.analytics.IdentifyUser(ctx, org.ID.String(), user.ID.String(), types.NewTraitsFromUser(user))
|
||||
}
|
||||
provider.analytics.Send(
|
||||
ctx,
|
||||
analyticstypes.Track{
|
||||
UserId: "stats_" + org.ID.String(),
|
||||
Event: "Stats Reported",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(stats),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: org.ID.String(),
|
||||
},
|
||||
},
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: "stats_" + org.ID.String(),
|
||||
GroupId: org.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraitsFromMap(stats).
|
||||
SetName(org.DisplayName).
|
||||
SetUsername(org.Name).
|
||||
SetCreatedAt(org.CreatedAt),
|
||||
},
|
||||
analyticstypes.Identify{
|
||||
UserId: "stats_" + org.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraits().
|
||||
SetName(org.DisplayName).
|
||||
SetUsername(org.Name).
|
||||
SetCreatedAt(org.CreatedAt),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -12,13 +12,6 @@ type Config struct {
|
||||
|
||||
// Interval is the interval at which the stats are collected.
|
||||
Interval time.Duration `mapstructure:"interval"`
|
||||
|
||||
// Collect is the collection configuration.
|
||||
Collect Collect `mapstructure:"collect"`
|
||||
}
|
||||
|
||||
type Collect struct {
|
||||
Identities bool `mapstructure:"identities"`
|
||||
}
|
||||
|
||||
func NewConfigFactory() factory.ConfigFactory {
|
||||
@@ -29,9 +22,6 @@ func newConfig() factory.Config {
|
||||
return Config{
|
||||
Enabled: true,
|
||||
Interval: 6 * time.Hour,
|
||||
Collect: Collect{
|
||||
Identities: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -61,6 +61,7 @@ func (b *logQueryStatementBuilder) Build(
|
||||
end uint64,
|
||||
requestType qbtypes.RequestType,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
start = querybuilder.ToNanoSecs(start)
|
||||
@@ -77,11 +78,11 @@ func (b *logQueryStatementBuilder) Build(
|
||||
|
||||
switch requestType {
|
||||
case qbtypes.RequestTypeRaw:
|
||||
return b.buildListQuery(ctx, q, query, start, end, keys)
|
||||
return b.buildListQuery(ctx, q, query, start, end, keys, variables)
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys)
|
||||
return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys, variables)
|
||||
case qbtypes.RequestTypeScalar:
|
||||
return b.buildScalarQuery(ctx, q, query, start, end, keys, false)
|
||||
return b.buildScalarQuery(ctx, q, query, start, end, keys, variables, false)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("unsupported request type: %s", requestType)
|
||||
@@ -130,6 +131,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
start, end uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
var (
|
||||
@@ -137,7 +139,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil {
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
@@ -153,7 +155,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -196,6 +198,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
start, end uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
var (
|
||||
@@ -203,7 +206,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil {
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
@@ -246,7 +249,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -254,10 +257,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
var finalSQL string
|
||||
var finalArgs []any
|
||||
|
||||
if query.Limit > 0 {
|
||||
if query.Limit > 0 && len(query.GroupBy) > 0 {
|
||||
// build the scalar “top/bottom-N” query in its own builder.
|
||||
cteSB := sqlbuilder.NewSelectBuilder()
|
||||
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true)
|
||||
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, variables, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -272,7 +275,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Group by all dimensions
|
||||
sb.GroupBy("ALL")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
sb.Having(query.Having.Expression)
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
@@ -286,7 +292,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
} else {
|
||||
sb.GroupBy("ALL")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
sb.Having(query.Having.Expression)
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
@@ -312,6 +321,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
start, end uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
skipResourceCTE bool,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
@@ -320,7 +330,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil {
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" && !skipResourceCTE {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
@@ -365,7 +375,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -375,7 +385,10 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
sb.Having(query.Having.Expression)
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
|
||||
// Add order by
|
||||
@@ -414,11 +427,12 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
// buildFilterCondition builds SQL condition from filter expression
|
||||
func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
_ context.Context,
|
||||
ctx context.Context,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) ([]string, error) {
|
||||
|
||||
var filterWhereClause *sqlbuilder.WhereClause
|
||||
@@ -435,6 +449,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
Variables: variables,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -471,9 +486,10 @@ func (b *logQueryStatementBuilder) maybeAttachResourceFilter(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
start, end uint64,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (cteSQL string, cteArgs []any, err error) {
|
||||
|
||||
stmt, err := b.buildResourceFilterCTE(ctx, query, start, end)
|
||||
stmt, err := b.buildResourceFilterCTE(ctx, query, start, end, variables)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -487,6 +503,7 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
|
||||
ctx context.Context,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
start, end uint64,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
return b.resourceFilterStmtBuilder.Build(
|
||||
@@ -495,5 +512,6 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
|
||||
end,
|
||||
qbtypes.RequestTypeRaw,
|
||||
query,
|
||||
variables,
|
||||
)
|
||||
}
|
||||
|
||||
78
pkg/telemetrylogs/stmt_builder_having_test.go
Normal file
78
pkg/telemetrylogs/stmt_builder_having_test.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestHavingExpressionRewriter_LogQueries(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
havingExpression string
|
||||
aggregations []qbtypes.LogAggregation
|
||||
expectedExpression string
|
||||
}{
|
||||
{
|
||||
name: "single aggregation with alias",
|
||||
havingExpression: "total_logs > 1000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: "total_logs"},
|
||||
},
|
||||
expectedExpression: "__result_0 > 1000",
|
||||
},
|
||||
{
|
||||
name: "multiple aggregations with complex expression",
|
||||
havingExpression: "(total > 100 AND avg_duration < 500) OR total > 10000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: "total"},
|
||||
{Expression: "avg(duration)", Alias: "avg_duration"},
|
||||
},
|
||||
expectedExpression: "(__result_0 > 100 AND __result_1 < 500) OR __result_0 > 10000",
|
||||
},
|
||||
{
|
||||
name: "__result reference for single aggregation",
|
||||
havingExpression: "__result > 500",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
},
|
||||
expectedExpression: "__result_0 > 500",
|
||||
},
|
||||
{
|
||||
name: "expression reference",
|
||||
havingExpression: "sum(bytes) > 1024000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "sum(bytes)", Alias: ""},
|
||||
},
|
||||
expectedExpression: "__result_0 > 1024000",
|
||||
},
|
||||
{
|
||||
name: "__result{number} format",
|
||||
havingExpression: "__result0 > 100 AND __result1 < 1000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
{Expression: "sum(bytes)", Alias: ""},
|
||||
},
|
||||
expectedExpression: "__result_0 > 100 AND __result_1 < 1000",
|
||||
},
|
||||
{
|
||||
name: "mixed aliases and expressions",
|
||||
havingExpression: "error_count > 10 AND count() < 1000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
{Expression: "countIf(level='error')", Alias: "error_count"},
|
||||
},
|
||||
expectedExpression: "__result_1 > 10 AND __result_0 < 1000",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForLogs(tt.havingExpression, tt.aggregations)
|
||||
assert.Equal(t, tt.expectedExpression, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -136,7 +136,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, make(map[string]qbtypes.VariableItem))
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
|
||||
@@ -76,6 +76,7 @@ func (b *metricQueryStatementBuilder) Build(
|
||||
end uint64,
|
||||
_ qbtypes.RequestType,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
keySelectors := getKeySelectors(query)
|
||||
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
|
||||
@@ -83,7 +84,7 @@ func (b *metricQueryStatementBuilder) Build(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return b.buildPipelineStatement(ctx, start, end, query, keys)
|
||||
return b.buildPipelineStatement(ctx, start, end, query, keys, variables)
|
||||
}
|
||||
|
||||
// Fast‑path (no fingerprint grouping)
|
||||
@@ -139,6 +140,7 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement(
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
var (
|
||||
cteFragments []string
|
||||
@@ -178,7 +180,7 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement(
|
||||
|
||||
// time_series_cte
|
||||
// this is applicable for all the queries
|
||||
if timeSeriesCTE, timeSeriesCTEArgs, err = b.buildTimeSeriesCTE(ctx, start, end, query, keys); err != nil {
|
||||
if timeSeriesCTE, timeSeriesCTEArgs, err = b.buildTimeSeriesCTE(ctx, start, end, query, keys, variables); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -269,6 +271,7 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (string, []any, error) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
@@ -281,6 +284,7 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
ConditionBuilder: b.cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
})
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
@@ -502,9 +506,25 @@ func (b *metricQueryStatementBuilder) buildFinalSelect(
|
||||
sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
sb.GroupBy("ts")
|
||||
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else {
|
||||
sb.Select("*")
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
|
||||
// For non-percentile queries, we need to filter in WHERE clause since we're selecting from CTE
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Where(rewrittenExpr)
|
||||
}
|
||||
}
|
||||
|
||||
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -188,6 +189,11 @@ func (c *conditionBuilder) ConditionFor(
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
// Check if this is a span search scope field
|
||||
if key.FieldContext == telemetrytypes.FieldContextSpan && c.isSpanScopeField(key.Name) {
|
||||
return c.buildSpanScopeCondition(key, operator, value)
|
||||
}
|
||||
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -208,3 +214,41 @@ func (c *conditionBuilder) ConditionFor(
|
||||
}
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) isSpanScopeField(name string) bool {
|
||||
keyName := strings.ToLower(name)
|
||||
return keyName == SpanSearchScopeRoot || keyName == SpanSearchScopeEntryPoint
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, error) {
|
||||
// Span scope fields only support = true operator
|
||||
if operator != qbtypes.FilterOperatorEqual {
|
||||
return "", fmt.Errorf("span scope field %s only supports '=' operator", key.Name)
|
||||
}
|
||||
|
||||
// Check if value is true (can be bool true or string "true")
|
||||
isTrue := false
|
||||
switch v := value.(type) {
|
||||
case bool:
|
||||
isTrue = v
|
||||
case string:
|
||||
isTrue = strings.ToLower(v) == "true"
|
||||
default:
|
||||
return "", fmt.Errorf("span scope field %s expects boolean value, got %T", key.Name, value)
|
||||
}
|
||||
|
||||
if !isTrue {
|
||||
return "", fmt.Errorf("span scope field %s can only be filtered with value 'true'", key.Name)
|
||||
}
|
||||
|
||||
keyName := strings.ToLower(key.Name)
|
||||
switch keyName {
|
||||
case SpanSearchScopeRoot:
|
||||
return "parent_span_id = ''", nil
|
||||
case SpanSearchScopeEntryPoint:
|
||||
return fmt.Sprintf("((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''",
|
||||
DBName, TopLevelOperationsTableName), nil
|
||||
default:
|
||||
return "", fmt.Errorf("invalid span search scope: %s", key.Name)
|
||||
}
|
||||
}
|
||||
|
||||
6
pkg/telemetrytraces/constants.go
Normal file
6
pkg/telemetrytraces/constants.go
Normal file
@@ -0,0 +1,6 @@
|
||||
package telemetrytraces
|
||||
|
||||
const (
|
||||
SpanSearchScopeRoot = "isroot"
|
||||
SpanSearchScopeEntryPoint = "isentrypoint"
|
||||
)
|
||||
@@ -150,6 +150,12 @@ func (m *defaultFieldMapper) getColumn(
|
||||
return indexV3Columns["attributes_bool"], nil
|
||||
}
|
||||
case telemetrytypes.FieldContextSpan, telemetrytypes.FieldContextUnspecified:
|
||||
// Check if this is a span scope field
|
||||
if strings.ToLower(key.Name) == SpanSearchScopeRoot || strings.ToLower(key.Name) == SpanSearchScopeEntryPoint {
|
||||
// Return a dummy column for span scope fields
|
||||
// The actual SQL will be generated in the condition builder
|
||||
return &schema.Column{Name: key.Name, Type: schema.ColumnTypeBool}, nil
|
||||
}
|
||||
if col, ok := indexV3Columns[key.Name]; ok {
|
||||
return col, nil
|
||||
}
|
||||
@@ -171,6 +177,13 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
// Special handling for span scope fields
|
||||
if key.FieldContext == telemetrytypes.FieldContextSpan &&
|
||||
(strings.ToLower(key.Name) == SpanSearchScopeRoot || strings.ToLower(key.Name) == SpanSearchScopeEntryPoint) {
|
||||
// Return the field name as-is, the condition builder will handle the SQL generation
|
||||
return key.Name, nil
|
||||
}
|
||||
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
142
pkg/telemetrytraces/span_scope_simple_test.go
Normal file
142
pkg/telemetrytraces/span_scope_simple_test.go
Normal file
@@ -0,0 +1,142 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestSpanScopeFilterExpression(t *testing.T) {
|
||||
// Test that span scope fields work in filter expressions
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expectedCondition string
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "simple isroot filter",
|
||||
expression: "isroot = true",
|
||||
expectedCondition: "parent_span_id = ''",
|
||||
},
|
||||
{
|
||||
name: "simple isentrypoint filter",
|
||||
expression: "isentrypoint = true",
|
||||
expectedCondition: "((name, resource_string_service$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
|
||||
},
|
||||
{
|
||||
name: "combined filter with AND",
|
||||
expression: "isroot = true AND has_error = true",
|
||||
expectedCondition: "parent_span_id = ''",
|
||||
},
|
||||
{
|
||||
name: "combined filter with OR",
|
||||
expression: "isentrypoint = true OR has_error = true",
|
||||
expectedCondition: "((name, resource_string_service$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Parse the expression and build the where clause
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
// Prepare field keys for span scope fields
|
||||
fieldKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||
fieldKeys["isroot"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}}
|
||||
fieldKeys["isentrypoint"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "isentrypoint",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}}
|
||||
fieldKeys["has_error"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "has_error",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}}
|
||||
|
||||
whereClause, _, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: fieldKeys,
|
||||
Builder: sb,
|
||||
})
|
||||
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, whereClause)
|
||||
|
||||
// Apply the where clause to the builder and get the SQL
|
||||
sb.AddWhereClause(whereClause)
|
||||
whereSQL, _ := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
t.Logf("Generated SQL: %s", whereSQL)
|
||||
assert.Contains(t, whereSQL, tt.expectedCondition)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSpanScopeWithResourceFilter(t *testing.T) {
|
||||
// Test that span scope fields are marked as SkipResourceFilter
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
}{
|
||||
{
|
||||
name: "isroot should skip resource filter",
|
||||
expression: "isroot = true",
|
||||
},
|
||||
{
|
||||
name: "isentrypoint should skip resource filter",
|
||||
expression: "isentrypoint = true",
|
||||
},
|
||||
{
|
||||
name: "combined expression should skip resource filter",
|
||||
expression: "isroot = true AND service.name = 'api'",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// For now, just verify the expression parses correctly
|
||||
// In a real implementation, we'd need to check that the resource filter
|
||||
// is properly skipped when span scope fields are present
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Prepare field keys for the test
|
||||
fieldKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||
fieldKeys["isroot"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}}
|
||||
fieldKeys["isentrypoint"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "isentrypoint",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}}
|
||||
fieldKeys["service.name"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}}
|
||||
|
||||
_, _, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: fieldKeys,
|
||||
SkipResourceFilter: false, // This would be set by the statement builder
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
181
pkg/telemetrytraces/span_scope_test.go
Normal file
181
pkg/telemetrytraces/span_scope_test.go
Normal file
@@ -0,0 +1,181 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSpanScopeConditions(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
ctx := context.Background()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
operator qbtypes.FilterOperator
|
||||
value any
|
||||
expectedSQL string
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "isroot = true",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: true,
|
||||
expectedSQL: "parent_span_id = ''",
|
||||
},
|
||||
{
|
||||
name: "isroot = 'true' (string)",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: "true",
|
||||
expectedSQL: "parent_span_id = ''",
|
||||
},
|
||||
{
|
||||
name: "isroot = 'TRUE' (uppercase)",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isRoot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: "TRUE",
|
||||
expectedSQL: "parent_span_id = ''",
|
||||
},
|
||||
{
|
||||
name: "isentrypoint = true",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isentrypoint",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: true,
|
||||
expectedSQL: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
|
||||
},
|
||||
{
|
||||
name: "isEntryPoint = true (mixed case)",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isEntryPoint",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: true,
|
||||
expectedSQL: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
|
||||
},
|
||||
{
|
||||
name: "isroot with wrong operator",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorNotEqual,
|
||||
value: true,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "isroot = false",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: false,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "isroot with non-boolean value",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: 123,
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "regular span field",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: "test-span",
|
||||
expectedSQL: "$1", // sqlbuilder uses placeholder syntax
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
condition, err := cb.ConditionFor(ctx, tt.key, tt.operator, tt.value, sb)
|
||||
|
||||
if tt.expectedError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedSQL, condition)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSpanScopeFieldMapper(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
ctx := context.Background()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
expectField string
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "isroot field",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isroot",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
expectField: "isroot",
|
||||
},
|
||||
{
|
||||
name: "isentrypoint field",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isentrypoint",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
expectField: "isentrypoint",
|
||||
},
|
||||
{
|
||||
name: "regular span field",
|
||||
key: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
expectField: "name",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
field, err := fm.FieldFor(ctx, tt.key)
|
||||
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectField, field)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
@@ -25,6 +26,7 @@ type traceQueryStatementBuilder struct {
|
||||
cb qbtypes.ConditionBuilder
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||
aggExprRewriter qbtypes.AggExprRewriter
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
}
|
||||
|
||||
var _ qbtypes.StatementBuilder[qbtypes.TraceAggregation] = (*traceQueryStatementBuilder)(nil)
|
||||
@@ -36,6 +38,7 @@ func NewTraceQueryStatementBuilder(
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
) *traceQueryStatementBuilder {
|
||||
tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces")
|
||||
return &traceQueryStatementBuilder{
|
||||
@@ -45,6 +48,7 @@ func NewTraceQueryStatementBuilder(
|
||||
cb: conditionBuilder,
|
||||
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||
aggExprRewriter: aggExprRewriter,
|
||||
telemetryStore: telemetryStore,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,6 +59,7 @@ func (b *traceQueryStatementBuilder) Build(
|
||||
end uint64,
|
||||
requestType qbtypes.RequestType,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
start = querybuilder.ToNanoSecs(start)
|
||||
@@ -67,16 +72,37 @@ func (b *traceQueryStatementBuilder) Build(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check if filter contains trace_id(s) and optimize time range if needed
|
||||
if query.Filter != nil && query.Filter.Expression != "" && b.telemetryStore != nil {
|
||||
traceIDs, found := ExtractTraceIDsFromFilter(query.Filter.Expression, keys)
|
||||
if found && len(traceIDs) > 0 {
|
||||
// Create trace time range finder
|
||||
finder := NewTraceTimeRangeFinder(b.telemetryStore)
|
||||
|
||||
// Get the actual time range of the trace(s)
|
||||
traceStart, traceEnd, err := finder.GetTraceTimeRangeMulti(ctx, traceIDs)
|
||||
if err != nil {
|
||||
// Log the error but continue with original time range
|
||||
b.logger.Debug("failed to get trace time range", "trace_ids", traceIDs, "error", err)
|
||||
} else if traceStart > 0 && traceEnd > 0 {
|
||||
// Use the trace's actual time range instead of the request's time range
|
||||
start = traceStart
|
||||
end = traceEnd
|
||||
b.logger.Debug("optimized time range for traces", "trace_ids", traceIDs, "start", start, "end", end)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create SQL builder
|
||||
q := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
switch requestType {
|
||||
case qbtypes.RequestTypeRaw:
|
||||
return b.buildListQuery(ctx, q, query, start, end, keys)
|
||||
return b.buildListQuery(ctx, q, query, start, end, keys, variables)
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys)
|
||||
return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys, variables)
|
||||
case qbtypes.RequestTypeScalar:
|
||||
return b.buildScalarQuery(ctx, q, query, start, end, keys, false)
|
||||
return b.buildScalarQuery(ctx, q, query, start, end, keys, variables, false, false)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("unsupported request type: %s", requestType)
|
||||
@@ -134,6 +160,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
start, end uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
var (
|
||||
@@ -141,7 +168,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil {
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
@@ -172,7 +199,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||
|
||||
// Add filter conditions
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -215,6 +242,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
start, end uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
var (
|
||||
@@ -222,7 +250,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil {
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
@@ -265,7 +293,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -273,10 +301,10 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
var finalSQL string
|
||||
var finalArgs []any
|
||||
|
||||
if query.Limit > 0 {
|
||||
if query.Limit > 0 && len(query.GroupBy) > 0 {
|
||||
// build the scalar “top/bottom-N” query in its own builder.
|
||||
cteSB := sqlbuilder.NewSelectBuilder()
|
||||
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true)
|
||||
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, variables, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -290,8 +318,12 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
// Group by all dimensions
|
||||
sb.GroupBy("ALL")
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
sb.Having(query.Having.Expression)
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
@@ -303,8 +335,12 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
} else {
|
||||
sb.GroupBy("ALL")
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
sb.Having(query.Having.Expression)
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
@@ -329,7 +365,9 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
start, end uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
skipResourceCTE bool,
|
||||
skipHaving bool,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
var (
|
||||
@@ -337,7 +375,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil {
|
||||
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" && !skipResourceCTE {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
@@ -381,7 +419,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||
|
||||
// Add filter conditions
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
|
||||
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -390,8 +428,11 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
sb.GroupBy("ALL")
|
||||
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
sb.Having(query.Having.Expression)
|
||||
if query.Having != nil && query.Having.Expression != "" && !skipHaving {
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
|
||||
// Add order by
|
||||
@@ -430,11 +471,12 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
// buildFilterCondition builds SQL condition from filter expression
|
||||
func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
_ context.Context,
|
||||
ctx context.Context,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) ([]string, error) {
|
||||
|
||||
var filterWhereClause *sqlbuilder.WhereClause
|
||||
@@ -448,6 +490,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
ConditionBuilder: b.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -484,9 +527,10 @@ func (b *traceQueryStatementBuilder) maybeAttachResourceFilter(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
start, end uint64,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (cteSQL string, cteArgs []any, err error) {
|
||||
|
||||
stmt, err := b.buildResourceFilterCTE(ctx, query, start, end)
|
||||
stmt, err := b.buildResourceFilterCTE(ctx, query, start, end, variables)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -500,6 +544,7 @@ func (b *traceQueryStatementBuilder) buildResourceFilterCTE(
|
||||
ctx context.Context,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
start, end uint64,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
return b.resourceFilterStmtBuilder.Build(
|
||||
@@ -508,5 +553,6 @@ func (b *traceQueryStatementBuilder) buildResourceFilterCTE(
|
||||
end,
|
||||
qbtypes.RequestTypeRaw,
|
||||
query,
|
||||
variables,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -82,12 +82,13 @@ func TestStatementBuilder(t *testing.T) {
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil, // telemetryStore is nil for tests
|
||||
)
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
|
||||
221
pkg/telemetrytraces/trace_id_extractor.go
Normal file
221
pkg/telemetrytraces/trace_id_extractor.go
Normal file
@@ -0,0 +1,221 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
grammar "github.com/SigNoz/signoz/pkg/parser/grammar"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
)
|
||||
|
||||
// traceIDExtractor is a visitor that extracts trace IDs from filter expressions
|
||||
type traceIDExtractor struct {
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
traceIDs []string
|
||||
found bool
|
||||
}
|
||||
|
||||
// ExtractTraceIDsFromFilter uses ANTLR parser to extract trace IDs from a filter expression
|
||||
func ExtractTraceIDsFromFilter(filterExpr string, fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, bool) {
|
||||
// Check if we have a trace_id field in the field keys
|
||||
var hasTraceIDField bool
|
||||
for fieldName, keys := range fieldKeys {
|
||||
if strings.ToLower(fieldName) == "trace_id" || strings.ToLower(fieldName) == "traceid" {
|
||||
for _, key := range keys {
|
||||
if key.FieldContext == telemetrytypes.FieldContextSpan {
|
||||
hasTraceIDField = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if hasTraceIDField {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !hasTraceIDField {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Setup the ANTLR parsing pipeline
|
||||
input := antlr.NewInputStream(filterExpr)
|
||||
lexer := grammar.NewFilterQueryLexer(input)
|
||||
|
||||
// Set up error handling
|
||||
lexer.RemoveErrorListeners()
|
||||
|
||||
tokens := antlr.NewCommonTokenStream(lexer, 0)
|
||||
parser := grammar.NewFilterQueryParser(tokens)
|
||||
parser.RemoveErrorListeners()
|
||||
|
||||
// Parse the query
|
||||
tree := parser.Query()
|
||||
|
||||
// Visit the parse tree with our trace ID extractor
|
||||
extractor := &traceIDExtractor{
|
||||
fieldKeys: fieldKeys,
|
||||
}
|
||||
extractor.Visit(tree)
|
||||
|
||||
return extractor.traceIDs, extractor.found
|
||||
}
|
||||
|
||||
// Visit dispatches to the specific visit method based on node type
|
||||
func (e *traceIDExtractor) Visit(tree antlr.ParseTree) any {
|
||||
if tree == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch t := tree.(type) {
|
||||
case *grammar.QueryContext:
|
||||
return e.VisitQuery(t)
|
||||
case *grammar.ExpressionContext:
|
||||
return e.VisitExpression(t)
|
||||
case *grammar.OrExpressionContext:
|
||||
return e.VisitOrExpression(t)
|
||||
case *grammar.AndExpressionContext:
|
||||
return e.VisitAndExpression(t)
|
||||
case *grammar.UnaryExpressionContext:
|
||||
return e.VisitUnaryExpression(t)
|
||||
case *grammar.PrimaryContext:
|
||||
return e.VisitPrimary(t)
|
||||
case *grammar.ComparisonContext:
|
||||
return e.VisitComparison(t)
|
||||
case *grammar.InClauseContext:
|
||||
return e.VisitInClause(t)
|
||||
default:
|
||||
// For other node types, visit children
|
||||
for i := 0; i < tree.GetChildCount(); i++ {
|
||||
if child := tree.GetChild(i); child != nil {
|
||||
if parseTree, ok := child.(antlr.ParseTree); ok {
|
||||
e.Visit(parseTree)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitQuery(ctx *grammar.QueryContext) any {
|
||||
return e.Visit(ctx.Expression())
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitExpression(ctx *grammar.ExpressionContext) any {
|
||||
return e.Visit(ctx.OrExpression())
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitOrExpression(ctx *grammar.OrExpressionContext) any {
|
||||
for _, expr := range ctx.AllAndExpression() {
|
||||
e.Visit(expr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitAndExpression(ctx *grammar.AndExpressionContext) any {
|
||||
for _, expr := range ctx.AllUnaryExpression() {
|
||||
e.Visit(expr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitUnaryExpression(ctx *grammar.UnaryExpressionContext) any {
|
||||
return e.Visit(ctx.Primary())
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitPrimary(ctx *grammar.PrimaryContext) any {
|
||||
if ctx.OrExpression() != nil {
|
||||
return e.Visit(ctx.OrExpression())
|
||||
} else if ctx.Comparison() != nil {
|
||||
return e.Visit(ctx.Comparison())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitComparison(ctx *grammar.ComparisonContext) any {
|
||||
// Get the key
|
||||
keyCtx := ctx.Key()
|
||||
if keyCtx == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
keyText := keyCtx.GetText()
|
||||
|
||||
// Check if this is a trace_id field
|
||||
isTraceIDField := false
|
||||
for fieldName, keys := range e.fieldKeys {
|
||||
if strings.EqualFold(keyText, fieldName) && (strings.ToLower(fieldName) == "trace_id" || strings.ToLower(fieldName) == "traceid") {
|
||||
for _, key := range keys {
|
||||
if key.FieldContext == telemetrytypes.FieldContextSpan {
|
||||
isTraceIDField = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if isTraceIDField {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !isTraceIDField {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check the operator
|
||||
if ctx.EQUALS() != nil {
|
||||
// Handle single value comparison
|
||||
values := ctx.AllValue()
|
||||
if len(values) > 0 {
|
||||
if value := e.extractValue(values[0]); value != "" {
|
||||
e.traceIDs = append(e.traceIDs, value)
|
||||
e.found = true
|
||||
}
|
||||
}
|
||||
} else if ctx.InClause() != nil {
|
||||
// Handle IN clause
|
||||
return e.Visit(ctx.InClause())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) VisitInClause(ctx *grammar.InClauseContext) any {
|
||||
valueListCtx := ctx.ValueList()
|
||||
if valueListCtx == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Extract all values from the value list
|
||||
for _, valueCtx := range valueListCtx.AllValue() {
|
||||
if value := e.extractValue(valueCtx); value != "" {
|
||||
e.traceIDs = append(e.traceIDs, value)
|
||||
e.found = true
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *traceIDExtractor) extractValue(ctx grammar.IValueContext) string {
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
// Remove quotes
|
||||
text := ctx.QUOTED_TEXT().GetText()
|
||||
if len(text) >= 2 {
|
||||
return text[1 : len(text)-1]
|
||||
}
|
||||
} else if ctx.KEY() != nil {
|
||||
return ctx.KEY().GetText()
|
||||
} else if ctx.NUMBER() != nil {
|
||||
return ctx.NUMBER().GetText()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// ExtractTraceIDFromFilter extracts a single trace ID from a filter expression if present
|
||||
// Deprecated: Use ExtractTraceIDsFromFilter instead
|
||||
func ExtractTraceIDFromFilter(filterExpr string, fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey) (string, bool) {
|
||||
traceIDs, found := ExtractTraceIDsFromFilter(filterExpr, fieldKeys)
|
||||
if found && len(traceIDs) > 0 {
|
||||
return traceIDs[0], true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
81
pkg/telemetrytraces/trace_time_range.go
Normal file
81
pkg/telemetrytraces/trace_time_range.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
)
|
||||
|
||||
const (
|
||||
SignozSpansTableName = "distributed_signoz_spans"
|
||||
SignozSpansLocalTableName = "signoz_spans"
|
||||
)
|
||||
|
||||
// TraceTimeRangeFinder finds the time range of a trace given its ID
|
||||
type TraceTimeRangeFinder struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
}
|
||||
|
||||
func NewTraceTimeRangeFinder(telemetryStore telemetrystore.TelemetryStore) *TraceTimeRangeFinder {
|
||||
return &TraceTimeRangeFinder{
|
||||
telemetryStore: telemetryStore,
|
||||
}
|
||||
}
|
||||
|
||||
// GetTraceTimeRange queries the signoz_spans table to find the start and end time of a trace
|
||||
func (f *TraceTimeRangeFinder) GetTraceTimeRange(ctx context.Context, traceID string) (startNano, endNano uint64, err error) {
|
||||
traceIDs := []string{traceID}
|
||||
return f.GetTraceTimeRangeMulti(ctx, traceIDs)
|
||||
}
|
||||
|
||||
// GetTraceTimeRangeMulti queries the signoz_spans table to find the start and end time across multiple traces
|
||||
func (f *TraceTimeRangeFinder) GetTraceTimeRangeMulti(ctx context.Context, traceIDs []string) (startNano, endNano uint64, err error) {
|
||||
if len(traceIDs) == 0 {
|
||||
return 0, 0, fmt.Errorf("no trace IDs provided")
|
||||
}
|
||||
|
||||
// Clean the trace IDs - remove any quotes
|
||||
cleanedIDs := make([]string, len(traceIDs))
|
||||
for i, id := range traceIDs {
|
||||
cleanedIDs[i] = strings.Trim(id, "'\"")
|
||||
}
|
||||
|
||||
// Build placeholders for the IN clause
|
||||
placeholders := make([]string, len(cleanedIDs))
|
||||
args := make([]any, len(cleanedIDs))
|
||||
for i, id := range cleanedIDs {
|
||||
placeholders[i] = "?"
|
||||
args[i] = id
|
||||
}
|
||||
|
||||
// Query to find min and max timestamp across all traces
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
toUnixTimestamp64Nano(min(timestamp)) as start_time,
|
||||
toUnixTimestamp64Nano(max(timestamp)) as end_time
|
||||
FROM %s.%s
|
||||
WHERE traceID IN (%s)
|
||||
AND timestamp >= now() - INTERVAL 30 DAY
|
||||
`, DBName, SignozSpansTableName, strings.Join(placeholders, ", "))
|
||||
|
||||
row := f.telemetryStore.ClickhouseDB().QueryRow(ctx, query, args...)
|
||||
|
||||
err = row.Scan(&startNano, &endNano)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, 0, fmt.Errorf("traces not found: %v", cleanedIDs)
|
||||
}
|
||||
return 0, 0, fmt.Errorf("failed to query trace time range: %w", err)
|
||||
}
|
||||
|
||||
// Add some buffer time (1 second before and after)
|
||||
if startNano > 1_000_000_000 { // 1 second in nanoseconds
|
||||
startNano -= 1_000_000_000
|
||||
}
|
||||
endNano += 1_000_000_000
|
||||
|
||||
return startNano, endNano, nil
|
||||
}
|
||||
188
pkg/telemetrytraces/trace_time_range_integration_test.go
Normal file
188
pkg/telemetrytraces/trace_time_range_integration_test.go
Normal file
@@ -0,0 +1,188 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTraceTimeRangeOptimization(t *testing.T) {
|
||||
// This test verifies that when a trace_id filter is present,
|
||||
// the statement builder can optimize the time range
|
||||
// (though without a real DB connection, it will use the original time range)
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
|
||||
// Setup field keys including trace_id
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
// Add trace_id to the keys map
|
||||
mockMetadataStore.KeysMap["trace_id"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
}}
|
||||
mockMetadataStore.KeysMap["name"] = []*telemetrytypes.TelemetryFieldKey{{
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
}}
|
||||
|
||||
resourceFilterFM := resourcefilter.NewFieldMapper()
|
||||
resourceFilterCB := resourcefilter.NewConditionBuilder(resourceFilterFM)
|
||||
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
|
||||
resourceFilterFM,
|
||||
resourceFilterCB,
|
||||
mockMetadataStore,
|
||||
)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
|
||||
|
||||
// Create statement builder with nil telemetryStore (no DB connection in unit test)
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil, // telemetryStore is nil - optimization won't happen but code path is tested
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]
|
||||
expectTimeOptimization bool
|
||||
}{
|
||||
{
|
||||
name: "query with trace_id filter",
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "trace_id = '12345abc' AND service.name = 'api'",
|
||||
},
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
|
||||
},
|
||||
},
|
||||
expectTimeOptimization: true, // would optimize if telemetryStore was provided
|
||||
},
|
||||
{
|
||||
name: "query with trace_id IN filter",
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "trace_id IN ['12345abc', '67890def'] AND service.name = 'api'",
|
||||
},
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
|
||||
},
|
||||
},
|
||||
expectTimeOptimization: true, // would optimize if telemetryStore was provided
|
||||
},
|
||||
{
|
||||
name: "query without trace_id filter",
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'api'",
|
||||
},
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
|
||||
},
|
||||
},
|
||||
expectTimeOptimization: false,
|
||||
},
|
||||
{
|
||||
name: "query with empty filter",
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
|
||||
},
|
||||
},
|
||||
expectTimeOptimization: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Build the query
|
||||
stmt, err := statementBuilder.Build(
|
||||
ctx,
|
||||
1747947419000, // start time in ms
|
||||
1747983448000, // end time in ms
|
||||
qbtypes.RequestTypeRaw,
|
||||
tt.query,
|
||||
nil, // no variables
|
||||
)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, stmt)
|
||||
|
||||
// Verify the query was built successfully
|
||||
assert.NotEmpty(t, stmt.Query)
|
||||
|
||||
// In a real scenario with telemetryStore, we would verify that
|
||||
// the time range was optimized when trace_id is present
|
||||
if tt.query.Filter != nil && tt.query.Filter.Expression != "" {
|
||||
traceIDs, found := ExtractTraceIDsFromFilter(tt.query.Filter.Expression, mockMetadataStore.KeysMap)
|
||||
assert.Equal(t, tt.expectTimeOptimization, found && len(traceIDs) > 0)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTraceTimeRangeFinderQuery(t *testing.T) {
|
||||
// This test verifies the SQL query generated by GetTraceTimeRange
|
||||
expectedQuery := `
|
||||
SELECT
|
||||
toUnixTimestamp64Nano(min(timestamp)) as start_time,
|
||||
toUnixTimestamp64Nano(max(timestamp)) as end_time
|
||||
FROM signoz_traces.distributed_signoz_spans
|
||||
WHERE traceID = ?
|
||||
AND timestamp >= now() - INTERVAL 30 DAY
|
||||
`
|
||||
|
||||
// Remove extra whitespace for comparison
|
||||
expectedQuery = normalizeQuery(expectedQuery)
|
||||
|
||||
// The actual query from the function
|
||||
actualQuery := `
|
||||
SELECT
|
||||
toUnixTimestamp64Nano(min(timestamp)) as start_time,
|
||||
toUnixTimestamp64Nano(max(timestamp)) as end_time
|
||||
FROM signoz_traces.distributed_signoz_spans
|
||||
WHERE traceID = ?
|
||||
AND timestamp >= now() - INTERVAL 30 DAY
|
||||
`
|
||||
actualQuery = normalizeQuery(actualQuery)
|
||||
|
||||
assert.Equal(t, expectedQuery, actualQuery)
|
||||
}
|
||||
|
||||
func normalizeQuery(query string) string {
|
||||
// Simple normalization: remove leading/trailing whitespace and collapse multiple spaces
|
||||
lines := []string{}
|
||||
for _, line := range strings.Split(strings.TrimSpace(query), "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line != "" {
|
||||
lines = append(lines, line)
|
||||
}
|
||||
}
|
||||
return strings.Join(lines, " ")
|
||||
}
|
||||
111
pkg/telemetrytraces/trace_time_range_multi_test.go
Normal file
111
pkg/telemetrytraces/trace_time_range_multi_test.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGetTraceTimeRangeMulti(t *testing.T) {
|
||||
// Test the SQL query generated for multiple trace IDs
|
||||
ctx := context.Background()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
traceIDs []string
|
||||
expectErr bool
|
||||
}{
|
||||
{
|
||||
name: "single trace ID",
|
||||
traceIDs: []string{"trace1"},
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "multiple trace IDs",
|
||||
traceIDs: []string{"trace1", "trace2", "trace3"},
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "empty trace IDs",
|
||||
traceIDs: []string{},
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "trace IDs with quotes",
|
||||
traceIDs: []string{"'trace1'", `"trace2"`, "trace3"},
|
||||
expectErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Without a real telemetry store, we can only test the error cases
|
||||
finder := &TraceTimeRangeFinder{telemetryStore: nil}
|
||||
|
||||
if tt.expectErr {
|
||||
_, _, err := finder.GetTraceTimeRangeMulti(ctx, tt.traceIDs)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
// With a nil telemetry store, non-error cases will panic when trying to query
|
||||
// This is expected and we skip those tests
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTraceIDExtractionWithComplexFilters(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filterExpr string
|
||||
expectIDs []string
|
||||
}{
|
||||
{
|
||||
name: "nested parentheses with trace_id",
|
||||
filterExpr: "((trace_id = 'abc') AND (service = 'api'))",
|
||||
expectIDs: []string{"abc"},
|
||||
},
|
||||
{
|
||||
name: "OR condition with multiple trace_ids",
|
||||
filterExpr: "trace_id = 'abc' OR trace_id = 'def'",
|
||||
expectIDs: []string{"abc", "def"},
|
||||
},
|
||||
{
|
||||
name: "IN clause with OR condition",
|
||||
filterExpr: "trace_id IN ['a', 'b'] OR trace_id = 'c'",
|
||||
expectIDs: []string{"a", "b", "c"},
|
||||
},
|
||||
{
|
||||
name: "complex nested conditions",
|
||||
filterExpr: "(service = 'api' AND (trace_id IN ['x', 'y'] OR duration > 100)) AND status = 200",
|
||||
expectIDs: []string{"x", "y"},
|
||||
},
|
||||
}
|
||||
|
||||
fieldKeys := map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
"service": {{
|
||||
Name: "service",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}},
|
||||
"duration": {{
|
||||
Name: "duration",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
"status": {{
|
||||
Name: "status",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ids, found := ExtractTraceIDsFromFilter(tt.filterExpr, fieldKeys)
|
||||
assert.True(t, found)
|
||||
assert.Equal(t, tt.expectIDs, ids)
|
||||
})
|
||||
}
|
||||
}
|
||||
262
pkg/telemetrytraces/trace_time_range_test.go
Normal file
262
pkg/telemetrytraces/trace_time_range_test.go
Normal file
@@ -0,0 +1,262 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestExtractTraceIDsFromFilter(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filterExpr string
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
expectIDs []string
|
||||
expectFound bool
|
||||
}{
|
||||
{
|
||||
name: "simple trace_id filter",
|
||||
filterExpr: "trace_id = '123abc'",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: []string{"123abc"},
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id IN with square brackets",
|
||||
filterExpr: "trace_id IN ['123abc', '456def', '789ghi']",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: []string{"123abc", "456def", "789ghi"},
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id IN with parentheses",
|
||||
filterExpr: "trace_id IN ('aaa', 'bbb', 'ccc')",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: []string{"aaa", "bbb", "ccc"},
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id IN with double quotes",
|
||||
filterExpr: `trace_id IN ["111", "222", "333"]`,
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: []string{"111", "222", "333"},
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id IN with mixed quotes",
|
||||
filterExpr: `trace_id IN ['abc', "def", 'ghi']`,
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: []string{"abc", "def", "ghi"},
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id IN with single value",
|
||||
filterExpr: "trace_id IN ['single']",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: []string{"single"},
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id IN in complex filter",
|
||||
filterExpr: "service.name = 'api' AND trace_id IN ['x1', 'x2'] AND duration > 100",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service.name": {{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}},
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
"duration": {{
|
||||
Name: "duration",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: []string{"x1", "x2"},
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "no trace_id in filter",
|
||||
filterExpr: "service.name = 'api' AND duration > 100",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service.name": {{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}},
|
||||
"duration": {{
|
||||
Name: "duration",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectIDs: nil,
|
||||
expectFound: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ids, found := ExtractTraceIDsFromFilter(tt.filterExpr, tt.fieldKeys)
|
||||
assert.Equal(t, tt.expectFound, found)
|
||||
assert.Equal(t, tt.expectIDs, ids)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractTraceIDFromFilter(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filterExpr string
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
expectID string
|
||||
expectFound bool
|
||||
}{
|
||||
{
|
||||
name: "simple trace_id filter",
|
||||
filterExpr: "trace_id = '123abc'",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectID: "123abc",
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id filter with double quotes",
|
||||
filterExpr: `trace_id = "456def"`,
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectID: "456def",
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "traceId alternative name",
|
||||
filterExpr: "traceId = '789ghi'",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"traceId": {{
|
||||
Name: "traceId",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectID: "789ghi",
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id in complex filter",
|
||||
filterExpr: "service.name = 'api' AND trace_id = 'xyz123' AND duration > 100",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service.name": {{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}},
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
"duration": {{
|
||||
Name: "duration",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectID: "xyz123",
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "no trace_id in filter",
|
||||
filterExpr: "service.name = 'api' AND duration > 100",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service.name": {{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}},
|
||||
"duration": {{
|
||||
Name: "duration",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectID: "",
|
||||
expectFound: false,
|
||||
},
|
||||
{
|
||||
name: "trace_id field not in span context",
|
||||
filterExpr: "trace_id = '123'",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
}},
|
||||
},
|
||||
expectID: "",
|
||||
expectFound: false,
|
||||
},
|
||||
{
|
||||
name: "unquoted trace_id value",
|
||||
filterExpr: "trace_id = abc123def",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectID: "abc123def",
|
||||
expectFound: true,
|
||||
},
|
||||
{
|
||||
name: "trace_id with parentheses",
|
||||
filterExpr: "(trace_id = '123') AND (service = 'api')",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}},
|
||||
},
|
||||
expectID: "123",
|
||||
expectFound: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
id, found := ExtractTraceIDFromFilter(tt.filterExpr, tt.fieldKeys)
|
||||
assert.Equal(t, tt.expectFound, found)
|
||||
assert.Equal(t, tt.expectID, id)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -49,15 +49,6 @@ func NewOrganizationKey(orgID valuer.UUID) uint32 {
|
||||
return hasher.Sum32()
|
||||
}
|
||||
|
||||
func NewTraitsFromOrganization(org *Organization) map[string]any {
|
||||
return map[string]any{
|
||||
"display_name": org.DisplayName,
|
||||
"name": org.Name,
|
||||
"created_at": org.CreatedAt,
|
||||
"alias": org.Alias,
|
||||
}
|
||||
}
|
||||
|
||||
type TTLSetting struct {
|
||||
bun.BaseModel `bun:"table:ttl_setting"`
|
||||
Identifiable
|
||||
|
||||
@@ -30,6 +30,7 @@ var (
|
||||
FunctionNameMedian7 = FunctionName{valuer.NewString("median7")}
|
||||
FunctionNameTimeShift = FunctionName{valuer.NewString("timeShift")}
|
||||
FunctionNameAnomaly = FunctionName{valuer.NewString("anomaly")}
|
||||
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
|
||||
)
|
||||
|
||||
// ApplyFunction applies the given function to the result data
|
||||
@@ -89,6 +90,24 @@ func ApplyFunction(fn Function, result *TimeSeries) *TimeSeries {
|
||||
// Placeholder for anomaly detection as function that can be used in dashboards other than
|
||||
// the anomaly alert
|
||||
return result
|
||||
case FunctionNameFillZero:
|
||||
// fillZero expects 3 arguments: start, end, step (all in milliseconds)
|
||||
if len(args) < 3 {
|
||||
return result
|
||||
}
|
||||
start, err := parseFloat64Arg(args[0].Value)
|
||||
if err != nil {
|
||||
return result
|
||||
}
|
||||
end, err := parseFloat64Arg(args[1].Value)
|
||||
if err != nil {
|
||||
return result
|
||||
}
|
||||
step, err := parseFloat64Arg(args[2].Value)
|
||||
if err != nil || step <= 0 {
|
||||
return result
|
||||
}
|
||||
return funcFillZero(result, int64(start), int64(end), int64(step))
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -357,3 +376,44 @@ func ApplyFunctions(functions []Function, result *TimeSeries) *TimeSeries {
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// funcFillZero fills gaps in time series with zeros at regular step intervals
|
||||
// It takes start, end, and step parameters (all in milliseconds) to ensure consistent filling
|
||||
func funcFillZero(result *TimeSeries, start, end, step int64) *TimeSeries {
|
||||
if step <= 0 {
|
||||
return result
|
||||
}
|
||||
|
||||
// Align start and end to step boundaries
|
||||
// Round start down to the nearest step boundary
|
||||
alignedStart := (start / step) * step
|
||||
// Round end up to the nearest step boundary
|
||||
alignedEnd := ((end + step - 1) / step) * step
|
||||
|
||||
// Create a map of existing values for quick lookup
|
||||
existingValues := make(map[int64]*TimeSeriesValue)
|
||||
for _, v := range result.Values {
|
||||
existingValues[v.Timestamp] = v
|
||||
}
|
||||
|
||||
// Create a new slice to hold all values (existing + filled)
|
||||
filledValues := make([]*TimeSeriesValue, 0)
|
||||
|
||||
// Generate all timestamps from aligned start to aligned end with the given step
|
||||
for ts := alignedStart; ts <= alignedEnd; ts += step {
|
||||
if val, exists := existingValues[ts]; exists {
|
||||
// Use existing value
|
||||
filledValues = append(filledValues, val)
|
||||
} else {
|
||||
// Fill with zero
|
||||
filledValues = append(filledValues, &TimeSeriesValue{
|
||||
Timestamp: ts,
|
||||
Value: 0,
|
||||
Partial: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
result.Values = filledValues
|
||||
return result
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user