Compare commits
19 Commits
testing-qb
...
v0.88.0-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
81fcca3bd3 | ||
|
|
4f7d84aa37 | ||
|
|
8f8dedb8b3 | ||
|
|
3f65229506 | ||
|
|
f006260719 | ||
|
|
3fc8f6c353 | ||
|
|
e02ae9a5c4 | ||
|
|
1989d07e52 | ||
|
|
78194ae955 | ||
|
|
da1b6d1ed0 | ||
|
|
d3c76ae8be | ||
|
|
bed3dbc698 | ||
|
|
66affb0ece | ||
|
|
75f62372ae | ||
|
|
a3ac307b4e | ||
|
|
7672d2f636 | ||
|
|
e3018d9529 | ||
|
|
385ee268e3 | ||
|
|
01036a8a2f |
@@ -224,3 +224,6 @@ statsreporter:
|
||||
enabled: true
|
||||
# The interval at which the stats are collected.
|
||||
interval: 6h
|
||||
collect:
|
||||
# Whether to collect identities and traits (emails).
|
||||
identities: true
|
||||
|
||||
@@ -129,5 +129,6 @@
|
||||
"text_num_points": "data points in each result group",
|
||||
"text_alert_frequency": "Run alert every",
|
||||
"text_for": "minutes",
|
||||
"selected_query_placeholder": "Select query"
|
||||
"selected_query_placeholder": "Select query",
|
||||
"alert_rule_not_found": "Alert Rule not found"
|
||||
}
|
||||
|
||||
@@ -126,7 +126,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
|
||||
const isRouteEnabledForWorkspaceBlockedState =
|
||||
isAdmin &&
|
||||
(path === ROUTES.ORG_SETTINGS ||
|
||||
(path === ROUTES.SETTINGS ||
|
||||
path === ROUTES.ORG_SETTINGS ||
|
||||
path === ROUTES.BILLING ||
|
||||
path === ROUTES.MY_SETTINGS);
|
||||
|
||||
|
||||
@@ -131,10 +131,6 @@ export const CreateAlertChannelAlerts = Loadable(
|
||||
() => import(/* webpackChunkName: "Create Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const EditAlertChannelsAlerts = Loadable(
|
||||
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
export const AllAlertChannels = Loadable(
|
||||
() => import(/* webpackChunkName: "All Channels" */ 'pages/Settings'),
|
||||
);
|
||||
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
CreateNewAlerts,
|
||||
DashboardPage,
|
||||
DashboardWidget,
|
||||
EditAlertChannelsAlerts,
|
||||
EditRulesPage,
|
||||
ErrorDetails,
|
||||
Home,
|
||||
@@ -253,13 +252,6 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: true,
|
||||
key: 'CHANNELS_NEW',
|
||||
},
|
||||
{
|
||||
path: ROUTES.CHANNELS_EDIT,
|
||||
exact: true,
|
||||
component: EditAlertChannelsAlerts,
|
||||
isPrivate: true,
|
||||
key: 'CHANNELS_EDIT',
|
||||
},
|
||||
{
|
||||
path: ROUTES.ALL_CHANNELS,
|
||||
exact: true,
|
||||
|
||||
@@ -119,6 +119,7 @@ export const updateFunnelSteps = async (
|
||||
export interface ValidateFunnelPayload {
|
||||
start_time: number;
|
||||
end_time: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface ValidateFunnelResponse {
|
||||
@@ -132,12 +133,11 @@ export interface ValidateFunnelResponse {
|
||||
}
|
||||
|
||||
export const validateFunnelSteps = async (
|
||||
funnelId: string,
|
||||
payload: ValidateFunnelPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<ValidateFunnelResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/validate`,
|
||||
`${FUNNELS_BASE_PATH}/analytics/validate`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
@@ -185,6 +185,7 @@ export interface FunnelOverviewPayload {
|
||||
end_time: number;
|
||||
step_start?: number;
|
||||
step_end?: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelOverviewResponse {
|
||||
@@ -202,12 +203,11 @@ export interface FunnelOverviewResponse {
|
||||
}
|
||||
|
||||
export const getFunnelOverview = async (
|
||||
funnelId: string,
|
||||
payload: FunnelOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelOverviewResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/overview`,
|
||||
`${FUNNELS_BASE_PATH}/analytics/overview`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -235,12 +235,11 @@ export interface SlowTraceData {
|
||||
}
|
||||
|
||||
export const getFunnelSlowTraces = async (
|
||||
funnelId: string,
|
||||
payload: FunnelOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<SlowTraceData> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/slow-traces`,
|
||||
`${FUNNELS_BASE_PATH}/analytics/slow-traces`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -273,7 +272,7 @@ export const getFunnelErrorTraces = async (
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<ErrorTraceData> | ErrorResponse> => {
|
||||
const response: AxiosResponse = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/error-traces`,
|
||||
`${FUNNELS_BASE_PATH}/analytics/error-traces`,
|
||||
payload,
|
||||
{
|
||||
signal,
|
||||
@@ -291,6 +290,7 @@ export const getFunnelErrorTraces = async (
|
||||
export interface FunnelStepsPayload {
|
||||
start_time: number;
|
||||
end_time: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelStepGraphMetrics {
|
||||
@@ -307,12 +307,11 @@ export interface FunnelStepsResponse {
|
||||
}
|
||||
|
||||
export const getFunnelSteps = async (
|
||||
funnelId: string,
|
||||
payload: FunnelStepsPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelStepsResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps`,
|
||||
`${FUNNELS_BASE_PATH}/analytics/steps`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
@@ -330,6 +329,7 @@ export interface FunnelStepsOverviewPayload {
|
||||
end_time: number;
|
||||
step_start?: number;
|
||||
step_end?: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
export interface FunnelStepsOverviewResponse {
|
||||
@@ -341,12 +341,11 @@ export interface FunnelStepsOverviewResponse {
|
||||
}
|
||||
|
||||
export const getFunnelStepsOverview = async (
|
||||
funnelId: string,
|
||||
payload: FunnelStepsOverviewPayload,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<FunnelStepsOverviewResponse> | ErrorResponse> => {
|
||||
const response = await axios.post(
|
||||
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps/overview`,
|
||||
`${FUNNELS_BASE_PATH}/analytics/steps/overview`,
|
||||
payload,
|
||||
{ signal },
|
||||
);
|
||||
|
||||
@@ -30,5 +30,5 @@ export enum LOCALSTORAGE {
|
||||
SHOW_EXCEPTIONS_QUICK_FILTERS = 'SHOW_EXCEPTIONS_QUICK_FILTERS',
|
||||
BANNER_DISMISSED = 'BANNER_DISMISSED',
|
||||
QUICK_FILTERS_SETTINGS_ANNOUNCEMENT = 'QUICK_FILTERS_SETTINGS_ANNOUNCEMENT',
|
||||
UNEXECUTED_FUNNELS = 'UNEXECUTED_FUNNELS',
|
||||
FUNNEL_STEPS = 'FUNNEL_STEPS',
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ const ROUTES = {
|
||||
ALERT_OVERVIEW: '/alerts/overview',
|
||||
ALL_CHANNELS: '/settings/channels',
|
||||
CHANNELS_NEW: '/settings/channels/new',
|
||||
CHANNELS_EDIT: '/settings/channels/edit/:id',
|
||||
CHANNELS_EDIT: '/settings/channels/edit/:channelId',
|
||||
ALL_ERROR: '/exceptions',
|
||||
ERROR_DETAIL: '/error-detail',
|
||||
VERSION: '/status',
|
||||
@@ -62,8 +62,10 @@ const ROUTES = {
|
||||
WORKSPACE_SUSPENDED: '/workspace-suspended',
|
||||
SHORTCUTS: '/settings/shortcuts',
|
||||
INTEGRATIONS: '/integrations',
|
||||
MESSAGING_QUEUES_BASE: '/messaging-queues',
|
||||
MESSAGING_QUEUES_KAFKA: '/messaging-queues/kafka',
|
||||
MESSAGING_QUEUES_KAFKA_DETAIL: '/messaging-queues/kafka/detail',
|
||||
INFRASTRUCTURE_MONITORING_BASE: '/infrastructure-monitoring',
|
||||
INFRASTRUCTURE_MONITORING_HOSTS: '/infrastructure-monitoring/hosts',
|
||||
INFRASTRUCTURE_MONITORING_KUBERNETES: '/infrastructure-monitoring/kubernetes',
|
||||
MESSAGING_QUEUES_CELERY_TASK: '/messaging-queues/celery-task',
|
||||
@@ -71,6 +73,7 @@ const ROUTES = {
|
||||
METRICS_EXPLORER: '/metrics-explorer/summary',
|
||||
METRICS_EXPLORER_EXPLORER: '/metrics-explorer/explorer',
|
||||
METRICS_EXPLORER_VIEWS: '/metrics-explorer/views',
|
||||
API_MONITORING_BASE: '/api-monitoring',
|
||||
API_MONITORING: '/api-monitoring/explorer',
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
|
||||
@@ -23,7 +23,7 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
|
||||
const onClickEditHandler = useCallback((id: string) => {
|
||||
history.push(
|
||||
generatePath(ROUTES.CHANNELS_EDIT, {
|
||||
id,
|
||||
channelId: id,
|
||||
}),
|
||||
);
|
||||
}, []);
|
||||
|
||||
@@ -28,7 +28,6 @@ import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
function EditAlertChannels({
|
||||
@@ -53,7 +52,11 @@ function EditAlertChannels({
|
||||
const [savingState, setSavingState] = useState<boolean>(false);
|
||||
const [testingState, setTestingState] = useState<boolean>(false);
|
||||
const { notifications } = useNotifications();
|
||||
const { id } = useParams<{ id: string }>();
|
||||
|
||||
// Extract channelId from URL pathname since useParams doesn't work in nested routing
|
||||
const { pathname } = window.location;
|
||||
const channelIdMatch = pathname.match(/\/settings\/channels\/edit\/([^/]+)/);
|
||||
const id = channelIdMatch ? channelIdMatch[1] : '';
|
||||
|
||||
const [type, setType] = useState<ChannelType>(
|
||||
initialValue?.type ? (initialValue.type as ChannelType) : ChannelType.Slack,
|
||||
|
||||
@@ -149,7 +149,7 @@ function FormAlertChannels({
|
||||
</Button>
|
||||
<Button
|
||||
onClick={(): void => {
|
||||
history.replace(ROUTES.SETTINGS);
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
}}
|
||||
>
|
||||
{t('button_return')}
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsEmptyOrIncorrectMetrics from '../HostsEmptyOrIncorrectMetrics';
|
||||
|
||||
describe('HostsEmptyOrIncorrectMetrics', () => {
|
||||
it('shows no data message when noData is true', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData incorrectData={false} />);
|
||||
expect(
|
||||
screen.getByText('No host metrics data received yet.'),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/Infrastructure monitoring requires the/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows incorrect data message when incorrectData is true', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData />);
|
||||
expect(
|
||||
screen.getByText(
|
||||
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
|
||||
),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show no data message when noData is false', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
|
||||
expect(
|
||||
screen.queryByText('No host metrics data received yet.'),
|
||||
).not.toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText(/Infrastructure monitoring requires the/),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show incorrect data message when incorrectData is false', () => {
|
||||
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
|
||||
expect(
|
||||
screen.queryByText(
|
||||
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
|
||||
),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,166 @@
|
||||
/* eslint-disable react/button-has-type */
|
||||
import { render } from '@testing-library/react';
|
||||
import ROUTES from 'constants/routes';
|
||||
import * as useGetHostListHooks from 'hooks/infraMonitoring/useGetHostList';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import * as timezoneHooks from 'providers/Timezone';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
|
||||
import HostsList from '../HostsList';
|
||||
|
||||
jest.mock('lib/getMinMax', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => ({
|
||||
minTime: 1713734400000,
|
||||
maxTime: 1713738000000,
|
||||
isValidTimeFormat: jest.fn().mockReturnValue(true),
|
||||
})),
|
||||
}));
|
||||
jest.mock('components/CustomTimePicker/CustomTimePicker', () => ({
|
||||
__esModule: true,
|
||||
default: ({ onSelect, selectedTime, selectedValue }: any): JSX.Element => (
|
||||
<div data-testid="custom-time-picker">
|
||||
<button onClick={(): void => onSelect('custom')}>
|
||||
{selectedTime} - {selectedValue}
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: (): any => ({
|
||||
globalTime: {
|
||||
selectedTime: {
|
||||
startTime: 1713734400000,
|
||||
endTime: 1713738000000,
|
||||
},
|
||||
maxTime: 1713738000000,
|
||||
minTime: 1713734400000,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn().mockReturnValue({
|
||||
pathname: ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
|
||||
}),
|
||||
}));
|
||||
jest.mock('react-router-dom-v5-compat', () => {
|
||||
const actual = jest.requireActual('react-router-dom-v5-compat');
|
||||
return {
|
||||
...actual,
|
||||
useSearchParams: jest
|
||||
.fn()
|
||||
.mockReturnValue([
|
||||
{ get: jest.fn(), entries: jest.fn().mockReturnValue([]) },
|
||||
jest.fn(),
|
||||
]),
|
||||
useNavigationType: (): any => 'PUSH',
|
||||
};
|
||||
});
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
|
||||
timezone: {
|
||||
offset: 0,
|
||||
},
|
||||
browserTimezone: {
|
||||
offset: 0,
|
||||
},
|
||||
} as any);
|
||||
jest.spyOn(useGetHostListHooks, 'useGetHostList').mockReturnValue({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
records: [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: true,
|
||||
cpu: 0.75,
|
||||
memory: 0.65,
|
||||
wait: 0.03,
|
||||
},
|
||||
],
|
||||
isSendingK8SAgentMetrics: false,
|
||||
sentAnyHostMetricsData: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
} as any);
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
describe('HostsList', () => {
|
||||
it('renders hosts list table', () => {
|
||||
const { container } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<HostsList />
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
expect(container.querySelector('.hosts-list-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders filters', () => {
|
||||
const { container } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
<Provider store={store}>
|
||||
<HostsList />
|
||||
</Provider>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
expect(container.querySelector('.filters')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,37 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsListControls from '../HostsListControls';
|
||||
|
||||
jest.mock('container/QueryBuilder/filters/QueryBuilderSearch', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="query-builder-search">Search</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
describe('HostsListControls', () => {
|
||||
const mockHandleFiltersChange = jest.fn();
|
||||
const mockFilters = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
it('renders search and date time filters', () => {
|
||||
render(
|
||||
<HostsListControls
|
||||
handleFiltersChange={mockHandleFiltersChange}
|
||||
filters={mockFilters}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('query-builder-search')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,139 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import HostsListTable from '../HostsListTable';
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
const EMPTY_STATE_CONTAINER_CLASS = '.hosts-empty-state-container';
|
||||
|
||||
describe('HostsListTable', () => {
|
||||
const mockHost = {
|
||||
hostName: 'test-host-1',
|
||||
active: true,
|
||||
cpu: 0.75,
|
||||
memory: 0.65,
|
||||
wait: 0.03,
|
||||
load15: 1.5,
|
||||
os: 'linux',
|
||||
};
|
||||
|
||||
const mockTableData = {
|
||||
payload: {
|
||||
data: {
|
||||
hosts: [mockHost],
|
||||
},
|
||||
},
|
||||
};
|
||||
const mockOnHostClick = jest.fn();
|
||||
const mockSetCurrentPage = jest.fn();
|
||||
const mockSetOrderBy = jest.fn();
|
||||
const mockSetPageSize = jest.fn();
|
||||
const mockProps = {
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
tableData: mockTableData,
|
||||
hostMetricsData: [mockHost],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
onHostClick: mockOnHostClick,
|
||||
currentPage: 1,
|
||||
setCurrentPage: mockSetCurrentPage,
|
||||
pageSize: 10,
|
||||
setOrderBy: mockSetOrderBy,
|
||||
setPageSize: mockSetPageSize,
|
||||
} as any;
|
||||
|
||||
it('renders loading state if isLoading is true', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} isLoading />);
|
||||
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders loading state if isFetching is true', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} isFetching />);
|
||||
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders error state if isError is true', () => {
|
||||
render(<HostsListTable {...mockProps} isError />);
|
||||
expect(screen.getByText('Something went wrong')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if no hosts are found', () => {
|
||||
const { container } = render(<HostsListTable {...mockProps} />);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if sentAnyHostMetricsData is false', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
sentAnyHostMetricsData: false,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders empty state if isSendingIncorrectK8SAgentMetrics is true', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
isSendingIncorrectK8SAgentMetrics: true,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('renders table data', () => {
|
||||
const { container } = render(
|
||||
<HostsListTable
|
||||
{...mockProps}
|
||||
tableData={{
|
||||
...mockTableData,
|
||||
payload: {
|
||||
...mockTableData.payload,
|
||||
data: {
|
||||
...mockTableData.payload.data,
|
||||
isSendingIncorrectK8SAgentMetrics: false,
|
||||
sentAnyHostMetricsData: true,
|
||||
},
|
||||
},
|
||||
}}
|
||||
/>,
|
||||
);
|
||||
expect(container.querySelector('.hosts-list-table')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,104 @@
|
||||
import { render } from '@testing-library/react';
|
||||
|
||||
import { formatDataForTable, GetHostsQuickFiltersConfig } from '../utils';
|
||||
|
||||
const PROGRESS_BAR_CLASS = '.progress-bar';
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
describe('InfraMonitoringHosts utils', () => {
|
||||
describe('formatDataForTable', () => {
|
||||
it('should format host data correctly', () => {
|
||||
const mockData = [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: true,
|
||||
cpu: 0.95,
|
||||
memory: 0.85,
|
||||
wait: 0.05,
|
||||
load15: 2.5,
|
||||
os: 'linux',
|
||||
},
|
||||
] as any;
|
||||
|
||||
const result = formatDataForTable(mockData);
|
||||
|
||||
expect(result[0].hostName).toBe('test-host');
|
||||
expect(result[0].wait).toBe('5%');
|
||||
expect(result[0].load15).toBe(2.5);
|
||||
|
||||
// Test active tag rendering
|
||||
const activeTag = render(result[0].active as JSX.Element);
|
||||
expect(activeTag.container.textContent).toBe('ACTIVE');
|
||||
expect(activeTag.container.querySelector('.active')).toBeTruthy();
|
||||
|
||||
// Test CPU progress bar
|
||||
const cpuProgress = render(result[0].cpu as JSX.Element);
|
||||
const cpuProgressBar = cpuProgress.container.querySelector(
|
||||
PROGRESS_BAR_CLASS,
|
||||
);
|
||||
expect(cpuProgressBar).toBeTruthy();
|
||||
|
||||
// Test memory progress bar
|
||||
const memoryProgress = render(result[0].memory as JSX.Element);
|
||||
const memoryProgressBar = memoryProgress.container.querySelector(
|
||||
PROGRESS_BAR_CLASS,
|
||||
);
|
||||
expect(memoryProgressBar).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should handle inactive hosts', () => {
|
||||
const mockData = [
|
||||
{
|
||||
hostName: 'test-host',
|
||||
active: false,
|
||||
cpu: 0.3,
|
||||
memory: 0.4,
|
||||
wait: 0.02,
|
||||
load15: 1.2,
|
||||
os: 'linux',
|
||||
cpuTimeSeries: [],
|
||||
memoryTimeSeries: [],
|
||||
waitTimeSeries: [],
|
||||
load15TimeSeries: [],
|
||||
},
|
||||
] as any;
|
||||
|
||||
const result = formatDataForTable(mockData);
|
||||
|
||||
const inactiveTag = render(result[0].active as JSX.Element);
|
||||
expect(inactiveTag.container.textContent).toBe('INACTIVE');
|
||||
expect(inactiveTag.container.querySelector('.inactive')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('GetHostsQuickFiltersConfig', () => {
|
||||
it('should return correct config when dotMetricsEnabled is true', () => {
|
||||
const result = GetHostsQuickFiltersConfig(true);
|
||||
|
||||
expect(result[0].attributeKey.key).toBe('host.name');
|
||||
expect(result[1].attributeKey.key).toBe('os.type');
|
||||
expect(result[0].aggregateAttribute).toBe('system.cpu.load_average.15m');
|
||||
});
|
||||
|
||||
it('should return correct config when dotMetricsEnabled is false', () => {
|
||||
const result = GetHostsQuickFiltersConfig(false);
|
||||
|
||||
expect(result[0].attributeKey.key).toBe('host_name');
|
||||
expect(result[1].attributeKey.key).toBe('os_type');
|
||||
expect(result[0].aggregateAttribute).toBe('system_cpu_load_average_15m');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -611,9 +611,7 @@ export const errorPercentage = ({
|
||||
{
|
||||
id: '',
|
||||
key: {
|
||||
key: dotMetricsEnabled
|
||||
? WidgetKeys.Service_name
|
||||
: WidgetKeys.StatusCodeNorm,
|
||||
key: dotMetricsEnabled ? WidgetKeys.StatusCode : WidgetKeys.StatusCodeNorm,
|
||||
dataType: DataTypes.Int64,
|
||||
isColumn: false,
|
||||
type: MetricsType.Tag,
|
||||
|
||||
@@ -240,6 +240,7 @@
|
||||
line-height: 18px; /* 163.636% */
|
||||
letter-spacing: 0.88px;
|
||||
text-transform: uppercase;
|
||||
min-height: 18px;
|
||||
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -889,6 +890,10 @@
|
||||
.ant-dropdown-menu-item-divider {
|
||||
background-color: var(--Slate-500, #161922) !important;
|
||||
}
|
||||
|
||||
.ant-dropdown-menu-item-disabled {
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
||||
|
||||
.settings-dropdown,
|
||||
|
||||
@@ -447,6 +447,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
{
|
||||
key: 'workspace',
|
||||
label: 'Workspace Settings',
|
||||
disabled: isWorkspaceBlocked,
|
||||
},
|
||||
...(isEnterpriseSelfHostedUser || isCommunityEnterpriseUser
|
||||
? [
|
||||
@@ -464,7 +465,12 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
),
|
||||
},
|
||||
].filter(Boolean),
|
||||
[isEnterpriseSelfHostedUser, isCommunityEnterpriseUser, user.email],
|
||||
[
|
||||
isEnterpriseSelfHostedUser,
|
||||
isCommunityEnterpriseUser,
|
||||
user.email,
|
||||
isWorkspaceBlocked,
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -391,7 +391,7 @@ export const helpSupportDropdownMenuItems: SidebarItem[] = [
|
||||
},
|
||||
{
|
||||
key: 'invite-collaborators',
|
||||
label: 'Invite a Collaborator',
|
||||
label: 'Invite a Team Member',
|
||||
icon: <Plus size={14} />,
|
||||
itemKey: 'invite-collaborators',
|
||||
},
|
||||
@@ -403,6 +403,10 @@ export const NEW_ROUTES_MENU_ITEM_KEY_MAP: Record<string, string> = {
|
||||
[ROUTES.TRACE_EXPLORER]: ROUTES.TRACES_EXPLORER,
|
||||
[ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER,
|
||||
[ROUTES.METRICS_EXPLORER_BASE]: ROUTES.METRICS_EXPLORER,
|
||||
[ROUTES.INFRASTRUCTURE_MONITORING_BASE]:
|
||||
ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
|
||||
[ROUTES.API_MONITORING_BASE]: ROUTES.API_MONITORING,
|
||||
[ROUTES.MESSAGING_QUEUES_BASE]: ROUTES.MESSAGING_QUEUES_OVERVIEW,
|
||||
};
|
||||
|
||||
export default menuItems;
|
||||
|
||||
@@ -241,6 +241,15 @@
|
||||
&-title {
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
|
||||
&-footer {
|
||||
border-top-color: var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
.add-span-to-funnel-modal__discard-button {
|
||||
background: var(--bg-vanilla-200);
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -72,7 +72,6 @@ function FunnelDetailsView({
|
||||
funnel={funnel}
|
||||
isTraceDetailsPage
|
||||
span={span}
|
||||
disableAutoSave
|
||||
triggerAutoSave={triggerAutoSave}
|
||||
showNotifications={showNotifications}
|
||||
/>
|
||||
@@ -143,13 +142,19 @@ function AddSpanToFunnelModal({
|
||||
const handleSaveFunnel = (): void => {
|
||||
setTriggerSave(true);
|
||||
// Reset trigger after a brief moment to allow the save to be processed
|
||||
setTimeout(() => setTriggerSave(false), 100);
|
||||
setTimeout(() => {
|
||||
setTriggerSave(false);
|
||||
onClose();
|
||||
}, 100);
|
||||
};
|
||||
|
||||
const handleDiscard = (): void => {
|
||||
setTriggerDiscard(true);
|
||||
// Reset trigger after a brief moment
|
||||
setTimeout(() => setTriggerDiscard(false), 100);
|
||||
setTimeout(() => {
|
||||
setTriggerDiscard(false);
|
||||
onClose();
|
||||
}, 100);
|
||||
};
|
||||
|
||||
const renderListView = (): JSX.Element => (
|
||||
@@ -239,9 +244,6 @@ function AddSpanToFunnelModal({
|
||||
footer={
|
||||
activeView === ModalView.DETAILS
|
||||
? [
|
||||
<Button key="close" onClick={onClose}>
|
||||
Close
|
||||
</Button>,
|
||||
<Button
|
||||
type="default"
|
||||
key="discard"
|
||||
|
||||
@@ -149,30 +149,28 @@ function SpanOverview({
|
||||
<Typography.Text className="service-name">
|
||||
{span.serviceName}
|
||||
</Typography.Text>
|
||||
{!!span.serviceName &&
|
||||
!!span.name &&
|
||||
process.env.NODE_ENV === 'development' && (
|
||||
<div className="add-funnel-button">
|
||||
<span className="add-funnel-button__separator">·</span>
|
||||
<Button
|
||||
type="text"
|
||||
size="small"
|
||||
className="add-funnel-button__button"
|
||||
onClick={(e): void => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
handleAddSpanToFunnel(span);
|
||||
}}
|
||||
icon={
|
||||
<img
|
||||
className="add-funnel-button__icon"
|
||||
src="/Icons/funnel-add.svg"
|
||||
alt="funnel-icon"
|
||||
/>
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{!!span.serviceName && !!span.name && (
|
||||
<div className="add-funnel-button">
|
||||
<span className="add-funnel-button__separator">·</span>
|
||||
<Button
|
||||
type="text"
|
||||
size="small"
|
||||
className="add-funnel-button__button"
|
||||
onClick={(e): void => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
handleAddSpanToFunnel(span);
|
||||
}}
|
||||
icon={
|
||||
<img
|
||||
className="add-funnel-button__icon"
|
||||
src="/Icons/funnel-add.svg"
|
||||
alt="funnel-icon"
|
||||
/>
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
@@ -475,7 +473,7 @@ function Success(props: ISuccessProps): JSX.Element {
|
||||
virtualiserRef={virtualizerRef}
|
||||
setColumnWidths={setTraceFlamegraphStatsWidth}
|
||||
/>
|
||||
{selectedSpanToAddToFunnel && process.env.NODE_ENV === 'development' && (
|
||||
{selectedSpanToAddToFunnel && (
|
||||
<AddSpanToFunnelModal
|
||||
span={selectedSpanToAddToFunnel}
|
||||
isOpen={isAddSpanToFunnelModalOpen}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { FunnelData, FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import { useUpdateFunnelSteps } from './useFunnels';
|
||||
@@ -13,22 +16,30 @@ interface UseFunnelConfiguration {
|
||||
isPopoverOpen: boolean;
|
||||
setIsPopoverOpen: (isPopoverOpen: boolean) => void;
|
||||
steps: FunnelStepData[];
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
// Add this helper function
|
||||
const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
export const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
if (steps.some((step) => !step.filters)) return steps;
|
||||
|
||||
return steps.map((step) => ({
|
||||
...step,
|
||||
filters: {
|
||||
...step.filters,
|
||||
items: step.filters.items.map((item) => ({
|
||||
id: '',
|
||||
key: item.key,
|
||||
value: item.value,
|
||||
op: item.op,
|
||||
})),
|
||||
items: step.filters.items.map((item) => {
|
||||
const {
|
||||
id: unusedId,
|
||||
isIndexed,
|
||||
...keyObj
|
||||
} = item.key as BaseAutocompleteData;
|
||||
return {
|
||||
id: '',
|
||||
key: keyObj,
|
||||
value: item.value,
|
||||
op: item.op,
|
||||
};
|
||||
}),
|
||||
},
|
||||
}));
|
||||
};
|
||||
@@ -36,22 +47,22 @@ const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export default function useFunnelConfiguration({
|
||||
funnel,
|
||||
disableAutoSave = false,
|
||||
triggerAutoSave = false,
|
||||
showNotifications = false,
|
||||
}: {
|
||||
funnel: FunnelData;
|
||||
disableAutoSave?: boolean;
|
||||
triggerAutoSave?: boolean;
|
||||
showNotifications?: boolean;
|
||||
}): UseFunnelConfiguration {
|
||||
const { notifications } = useNotifications();
|
||||
const queryClient = useQueryClient();
|
||||
const {
|
||||
steps,
|
||||
initialSteps,
|
||||
hasIncompleteStepFields,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
handleRestoreSteps,
|
||||
handleRunFunnel,
|
||||
selectedTime,
|
||||
setIsUpdatingFunnel,
|
||||
} = useFunnelContext();
|
||||
|
||||
// State management
|
||||
@@ -59,10 +70,6 @@ export default function useFunnelConfiguration({
|
||||
|
||||
const debouncedSteps = useDebounce(steps, 200);
|
||||
|
||||
const [lastValidatedSteps, setLastValidatedSteps] = useState<FunnelStepData[]>(
|
||||
initialSteps,
|
||||
);
|
||||
|
||||
// Mutation hooks
|
||||
const updateStepsMutation = useUpdateFunnelSteps(
|
||||
funnel.funnel_id,
|
||||
@@ -71,6 +78,15 @@ export default function useFunnelConfiguration({
|
||||
|
||||
// Derived state
|
||||
const lastSavedStepsStateRef = useRef<FunnelStepData[]>(steps);
|
||||
const hasRestoredFromLocalStorage = useRef(false);
|
||||
|
||||
// localStorage hook for funnel steps
|
||||
const localStorageKey = `${LOCALSTORAGE.FUNNEL_STEPS}_${funnel.funnel_id}`;
|
||||
const [
|
||||
localStorageSavedSteps,
|
||||
setLocalStorageSavedSteps,
|
||||
clearLocalStorageSavedSteps,
|
||||
] = useLocalStorage<FunnelStepData[] | null>(localStorageKey, null);
|
||||
|
||||
const hasStepsChanged = useCallback(() => {
|
||||
const normalizedLastSavedSteps = normalizeSteps(
|
||||
@@ -80,6 +96,34 @@ export default function useFunnelConfiguration({
|
||||
return !isEqual(normalizedDebouncedSteps, normalizedLastSavedSteps);
|
||||
}, [debouncedSteps]);
|
||||
|
||||
// Handle localStorage for funnel steps
|
||||
useEffect(() => {
|
||||
// Restore from localStorage on first run if
|
||||
if (!hasRestoredFromLocalStorage.current) {
|
||||
const savedSteps = localStorageSavedSteps;
|
||||
if (savedSteps) {
|
||||
handleRestoreSteps(savedSteps);
|
||||
hasRestoredFromLocalStorage.current = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Save steps to localStorage
|
||||
if (hasStepsChanged()) {
|
||||
setLocalStorageSavedSteps(debouncedSteps);
|
||||
}
|
||||
}, [
|
||||
debouncedSteps,
|
||||
funnel.funnel_id,
|
||||
hasStepsChanged,
|
||||
handleRestoreSteps,
|
||||
localStorageSavedSteps,
|
||||
setLocalStorageSavedSteps,
|
||||
queryClient,
|
||||
selectedTime,
|
||||
lastUpdatedSteps,
|
||||
]);
|
||||
|
||||
const hasFunnelStepDefinitionsChanged = useCallback(
|
||||
(prevSteps: FunnelStepData[], nextSteps: FunnelStepData[]): boolean => {
|
||||
if (prevSteps.length !== nextSteps.length) return true;
|
||||
@@ -97,15 +141,6 @@ export default function useFunnelConfiguration({
|
||||
[],
|
||||
);
|
||||
|
||||
const hasFunnelLatencyTypeChanged = useCallback(
|
||||
(prevSteps: FunnelStepData[], nextSteps: FunnelStepData[]): boolean =>
|
||||
prevSteps.some((step, index) => {
|
||||
const nextStep = nextSteps[index];
|
||||
return step.latency_type !== nextStep.latency_type;
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
// Mutation payload preparation
|
||||
const getUpdatePayload = useCallback(
|
||||
() => ({
|
||||
@@ -116,33 +151,19 @@ export default function useFunnelConfiguration({
|
||||
[funnel.funnel_id, debouncedSteps],
|
||||
);
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
const { selectedTime } = useFunnelContext();
|
||||
|
||||
const validateStepsQueryKey = useMemo(
|
||||
() => [REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS, funnel.funnel_id, selectedTime],
|
||||
[funnel.funnel_id, selectedTime],
|
||||
);
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// Determine if we should save based on the mode
|
||||
let shouldSave = false;
|
||||
|
||||
if (disableAutoSave) {
|
||||
// Manual save mode: only save when explicitly triggered
|
||||
shouldSave = triggerAutoSave;
|
||||
} else {
|
||||
// Auto-save mode: save when steps have changed and no incomplete fields
|
||||
shouldSave = hasStepsChanged() && !hasIncompleteStepFields;
|
||||
}
|
||||
|
||||
if (shouldSave && !isEqual(debouncedSteps, lastValidatedSteps)) {
|
||||
if (triggerAutoSave && !isEqual(debouncedSteps, lastUpdatedSteps)) {
|
||||
setIsUpdatingFunnel(true);
|
||||
updateStepsMutation.mutate(getUpdatePayload(), {
|
||||
onSuccess: (data) => {
|
||||
const updatedFunnelSteps = data?.payload?.steps;
|
||||
|
||||
if (!updatedFunnelSteps) return;
|
||||
|
||||
// Clear localStorage since steps are saved successfully
|
||||
clearLocalStorageSavedSteps();
|
||||
|
||||
queryClient.setQueryData(
|
||||
[REACT_QUERY_KEY.GET_FUNNEL_DETAILS, funnel.funnel_id],
|
||||
(oldData: any) => {
|
||||
@@ -163,17 +184,9 @@ export default function useFunnelConfiguration({
|
||||
(step) => step.service_name === '' || step.span_name === '',
|
||||
);
|
||||
|
||||
if (hasFunnelLatencyTypeChanged(lastValidatedSteps, debouncedSteps)) {
|
||||
handleRunFunnel();
|
||||
setLastValidatedSteps(debouncedSteps);
|
||||
}
|
||||
// Only validate if funnel steps definitions
|
||||
else if (
|
||||
!hasIncompleteStepFields &&
|
||||
hasFunnelStepDefinitionsChanged(lastValidatedSteps, debouncedSteps)
|
||||
) {
|
||||
queryClient.refetchQueries(validateStepsQueryKey);
|
||||
setLastValidatedSteps(debouncedSteps);
|
||||
if (!hasIncompleteStepFields) {
|
||||
setLastUpdatedSteps(debouncedSteps);
|
||||
}
|
||||
|
||||
// Show success notification only when requested
|
||||
@@ -216,17 +229,18 @@ export default function useFunnelConfiguration({
|
||||
getUpdatePayload,
|
||||
hasFunnelStepDefinitionsChanged,
|
||||
hasStepsChanged,
|
||||
lastValidatedSteps,
|
||||
lastUpdatedSteps,
|
||||
queryClient,
|
||||
validateStepsQueryKey,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
disableAutoSave,
|
||||
localStorageSavedSteps,
|
||||
clearLocalStorageSavedSteps,
|
||||
]);
|
||||
|
||||
return {
|
||||
isPopoverOpen,
|
||||
setIsPopoverOpen,
|
||||
steps,
|
||||
isSaving: updateStepsMutation.isLoading,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -20,10 +20,11 @@ export function useFunnelMetrics({
|
||||
metricsData: MetricItem[];
|
||||
conversionRate: number;
|
||||
} {
|
||||
const { startTime, endTime } = useFunnelContext();
|
||||
const { startTime, endTime, steps } = useFunnelContext();
|
||||
const payload = {
|
||||
start_time: startTime,
|
||||
end_time: endTime,
|
||||
steps,
|
||||
};
|
||||
|
||||
const {
|
||||
@@ -81,6 +82,7 @@ export function useFunnelStepsMetrics({
|
||||
end_time: endTime,
|
||||
step_start: stepStart,
|
||||
step_end: stepEnd,
|
||||
steps,
|
||||
};
|
||||
|
||||
const {
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
FunnelOverviewResponse,
|
||||
FunnelStepsOverviewPayload,
|
||||
FunnelStepsOverviewResponse,
|
||||
FunnelStepsPayload,
|
||||
FunnelStepsResponse,
|
||||
getFunnelById,
|
||||
getFunnelErrorTraces,
|
||||
@@ -37,6 +38,7 @@ import {
|
||||
CreateFunnelPayload,
|
||||
CreateFunnelResponse,
|
||||
FunnelData,
|
||||
FunnelStepData,
|
||||
} from 'types/api/traceFunnels';
|
||||
|
||||
export const useFunnelsList = (): UseQueryResult<
|
||||
@@ -117,12 +119,14 @@ export const useValidateFunnelSteps = ({
|
||||
startTime,
|
||||
endTime,
|
||||
enabled,
|
||||
steps,
|
||||
}: {
|
||||
funnelId: string;
|
||||
selectedTime: string;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
enabled: boolean;
|
||||
steps: FunnelStepData[];
|
||||
}): UseQueryResult<
|
||||
SuccessResponse<ValidateFunnelResponse> | ErrorResponse,
|
||||
Error
|
||||
@@ -130,11 +134,19 @@ export const useValidateFunnelSteps = ({
|
||||
useQuery({
|
||||
queryFn: ({ signal }) =>
|
||||
validateFunnelSteps(
|
||||
funnelId,
|
||||
{ start_time: startTime, end_time: endTime },
|
||||
{ start_time: startTime, end_time: endTime, steps },
|
||||
signal,
|
||||
),
|
||||
queryKey: [REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS, funnelId, selectedTime],
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.VALIDATE_FUNNEL_STEPS,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
steps.map((step) => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { latency_type, ...rest } = step;
|
||||
return rest;
|
||||
}),
|
||||
],
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
});
|
||||
@@ -168,18 +180,17 @@ export const useFunnelOverview = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
hasFunnelBeenExecuted,
|
||||
isUpdatingFunnel,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelOverview(funnelId, payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelOverview(payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -190,18 +201,19 @@ export const useFunnelSlowTraces = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
hasFunnelBeenExecuted,
|
||||
isUpdatingFunnel,
|
||||
} = useFunnelContext();
|
||||
return useQuery<SuccessResponse<SlowTraceData> | ErrorResponse, Error>({
|
||||
queryFn: ({ signal }) => getFunnelSlowTraces(funnelId, payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelSlowTraces(payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_SLOW_TRACES,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -212,7 +224,7 @@ export const useFunnelErrorTraces = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
hasFunnelBeenExecuted,
|
||||
isUpdatingFunnel,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelErrorTraces(funnelId, payload, signal),
|
||||
@@ -222,35 +234,31 @@ export const useFunnelErrorTraces = (
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
});
|
||||
};
|
||||
|
||||
export function useFunnelStepsGraphData(
|
||||
funnelId: string,
|
||||
payload: FunnelStepsPayload,
|
||||
): UseQueryResult<SuccessResponse<FunnelStepsResponse> | ErrorResponse, Error> {
|
||||
const {
|
||||
startTime,
|
||||
endTime,
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
hasFunnelBeenExecuted,
|
||||
isUpdatingFunnel,
|
||||
} = useFunnelContext();
|
||||
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) =>
|
||||
getFunnelSteps(
|
||||
funnelId,
|
||||
{ start_time: startTime, end_time: endTime },
|
||||
signal,
|
||||
),
|
||||
queryFn: ({ signal }) => getFunnelSteps(payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_STEPS_GRAPH_DATA,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -264,17 +272,18 @@ export const useFunnelStepsOverview = (
|
||||
const {
|
||||
selectedTime,
|
||||
validTracesCount,
|
||||
hasFunnelBeenExecuted,
|
||||
isUpdatingFunnel,
|
||||
} = useFunnelContext();
|
||||
return useQuery({
|
||||
queryFn: ({ signal }) => getFunnelStepsOverview(funnelId, payload, signal),
|
||||
queryFn: ({ signal }) => getFunnelStepsOverview(payload, signal),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_FUNNEL_STEPS_OVERVIEW,
|
||||
funnelId,
|
||||
selectedTime,
|
||||
payload.step_start ?? '',
|
||||
payload.step_end ?? '',
|
||||
payload.steps,
|
||||
],
|
||||
enabled: !!funnelId && validTracesCount > 0 && hasFunnelBeenExecuted,
|
||||
enabled: !!funnelId && validTracesCount > 0 && !isUpdatingFunnel,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -38,6 +38,21 @@
|
||||
}
|
||||
}
|
||||
|
||||
.alert-empty-card {
|
||||
margin-top: 50px;
|
||||
.ant-empty-description {
|
||||
color: var(--text-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.alert-empty-card {
|
||||
.ant-empty-description {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.alert-details {
|
||||
margin-top: 10px;
|
||||
.divider {
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import './AlertDetails.styles.scss';
|
||||
|
||||
import { Breadcrumb, Button, Divider } from 'antd';
|
||||
import { Breadcrumb, Button, Divider, Empty } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { Filters } from 'components/AlertDetailsFilters/Filters';
|
||||
import NotFound from 'components/NotFound';
|
||||
import RouteTab from 'components/RouteTab';
|
||||
import Spinner from 'components/Spinner';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -70,6 +69,7 @@ BreadCrumbItem.defaultProps = {
|
||||
function AlertDetails(): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
const { routes } = useRouteTabUtils();
|
||||
const { t } = useTranslation(['alerts']);
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
@@ -90,7 +90,11 @@ function AlertDetails(): JSX.Element {
|
||||
!isValidRuleId ||
|
||||
(alertDetailsResponse && alertDetailsResponse.statusCode !== 200)
|
||||
) {
|
||||
return <NotFound />;
|
||||
return (
|
||||
<div className="alert-empty-card">
|
||||
<Empty description={t('alert_rule_not_found')} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const handleTabChange = (route: string): void => {
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
}
|
||||
|
||||
.ant-tabs-content-holder {
|
||||
padding-left: 16px;
|
||||
padding-right: 16px;
|
||||
padding: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
.edit-alert-channels-container {
|
||||
width: 90%;
|
||||
margin: 12px auto;
|
||||
margin: 12px;
|
||||
|
||||
border: 1px solid var(--Slate-500, #161922);
|
||||
background: var(--Ink-400, #121317);
|
||||
|
||||
@@ -15,23 +15,27 @@ import {
|
||||
import EditAlertChannels from 'container/EditAlertChannels';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Channels } from 'types/api/channels/getAll';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
function ChannelsEdit(): JSX.Element {
|
||||
const { id } = useParams<Params>();
|
||||
const { t } = useTranslation();
|
||||
|
||||
// Extract channelId from URL pathname since useParams doesn't work in nested routing
|
||||
const { pathname } = window.location;
|
||||
const channelIdMatch = pathname.match(/\/settings\/channels\/edit\/([^/]+)/);
|
||||
const channelId = channelIdMatch ? channelIdMatch[1] : undefined;
|
||||
|
||||
const { isFetching, isError, data, error } = useQuery<
|
||||
SuccessResponseV2<Channels>,
|
||||
APIError
|
||||
>(['getChannel', id], {
|
||||
>(['getChannel', channelId], {
|
||||
queryFn: () =>
|
||||
get({
|
||||
id,
|
||||
id: channelId || '',
|
||||
}),
|
||||
enabled: !!channelId,
|
||||
});
|
||||
|
||||
if (isError) {
|
||||
@@ -144,8 +148,5 @@ function ChannelsEdit(): JSX.Element {
|
||||
</div>
|
||||
);
|
||||
}
|
||||
interface Params {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export default ChannelsEdit;
|
||||
|
||||
@@ -7,6 +7,7 @@ import NewWidget from 'container/NewWidget';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { generatePath, useLocation, useParams } from 'react-router-dom';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
@@ -52,11 +53,13 @@ function DashboardWidget(): JSX.Element | null {
|
||||
}
|
||||
|
||||
return (
|
||||
<NewWidget
|
||||
yAxisUnit={selectedWidget?.yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
fillSpans={selectedWidget?.fillSpans}
|
||||
/>
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
yAxisUnit={selectedWidget?.yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
fillSpans={selectedWidget?.fillSpans}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,9 +3,14 @@ import ROUTES from 'constants/routes';
|
||||
import InfraMonitoringHosts from 'container/InfraMonitoringHosts';
|
||||
import InfraMonitoringK8s from 'container/InfraMonitoringK8s';
|
||||
import { Inbox } from 'lucide-react';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
|
||||
export const Hosts: TabRoutes = {
|
||||
Component: InfraMonitoringHosts,
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<InfraMonitoringHosts />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Inbox size={16} /> Hosts
|
||||
@@ -16,7 +21,11 @@ export const Hosts: TabRoutes = {
|
||||
};
|
||||
|
||||
export const Kubernetes: TabRoutes = {
|
||||
Component: InfraMonitoringK8s,
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<InfraMonitoringK8s />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Inbox size={16} /> Kubernetes
|
||||
|
||||
@@ -10,6 +10,7 @@ import LogsFilters from 'container/LogsFilters';
|
||||
import LogsSearchFilter from 'container/LogsSearchFilter';
|
||||
import LogsTable from 'container/LogsTable';
|
||||
import history from 'lib/history';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -82,69 +83,71 @@ function OldLogsExplorer(): JSX.Element {
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="old-logs-explorer">
|
||||
<SpaceContainer
|
||||
split={<Divider type="vertical" />}
|
||||
align="center"
|
||||
direction="horizontal"
|
||||
>
|
||||
<LogsSearchFilter />
|
||||
<LogLiveTail />
|
||||
</SpaceContainer>
|
||||
<PreferenceContextProvider>
|
||||
<div className="old-logs-explorer">
|
||||
<SpaceContainer
|
||||
split={<Divider type="vertical" />}
|
||||
align="center"
|
||||
direction="horizontal"
|
||||
>
|
||||
<LogsSearchFilter />
|
||||
<LogLiveTail />
|
||||
</SpaceContainer>
|
||||
|
||||
<LogsAggregate />
|
||||
<LogsAggregate />
|
||||
|
||||
<Row gutter={20} wrap={false}>
|
||||
<LogsFilters />
|
||||
<Col flex={1} className="logs-col-container">
|
||||
<Row>
|
||||
<Col flex={1}>
|
||||
<Space align="baseline" direction="horizontal">
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
value={selectedViewModeOption}
|
||||
onChange={onChangeVeiwMode}
|
||||
>
|
||||
{viewModeOptionList.map((option) => (
|
||||
<Select.Option key={option.value}>{option.label}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
|
||||
{isFormatButtonVisible && (
|
||||
<Popover
|
||||
<Row gutter={20} wrap={false}>
|
||||
<LogsFilters />
|
||||
<Col flex={1} className="logs-col-container">
|
||||
<Row>
|
||||
<Col flex={1}>
|
||||
<Space align="baseline" direction="horizontal">
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
placement="right"
|
||||
content={renderPopoverContent}
|
||||
style={defaultSelectStyle}
|
||||
value={selectedViewModeOption}
|
||||
onChange={onChangeVeiwMode}
|
||||
>
|
||||
<Button>Format</Button>
|
||||
</Popover>
|
||||
)}
|
||||
{viewModeOptionList.map((option) => (
|
||||
<Select.Option key={option.value}>{option.label}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
defaultValue={order}
|
||||
onChange={handleChangeOrder}
|
||||
>
|
||||
{orderItems.map((item) => (
|
||||
<Select.Option key={item.enum}>{item.name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
</Col>
|
||||
{isFormatButtonVisible && (
|
||||
<Popover
|
||||
getPopupContainer={popupContainer}
|
||||
placement="right"
|
||||
content={renderPopoverContent}
|
||||
>
|
||||
<Button>Format</Button>
|
||||
</Popover>
|
||||
)}
|
||||
|
||||
<Col>
|
||||
<LogControls />
|
||||
</Col>
|
||||
</Row>
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
defaultValue={order}
|
||||
onChange={handleChangeOrder}
|
||||
>
|
||||
{orderItems.map((item) => (
|
||||
<Select.Option key={item.enum}>{item.name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Space>
|
||||
</Col>
|
||||
|
||||
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
|
||||
</Col>
|
||||
</Row>
|
||||
<Col>
|
||||
<LogControls />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<LogDetailedView />
|
||||
</div>
|
||||
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<LogDetailedView />
|
||||
</div>
|
||||
</PreferenceContextProvider>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import NewDashboard from 'container/NewDashboard';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect } from 'react';
|
||||
import { ErrorType } from 'types/common';
|
||||
|
||||
@@ -35,7 +36,11 @@ function DashboardPage(): JSX.Element {
|
||||
return <Spinner tip="Loading.." />;
|
||||
}
|
||||
|
||||
return <NewDashboard />;
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<NewDashboard />
|
||||
</PreferenceContextProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default DashboardPage;
|
||||
|
||||
@@ -24,7 +24,12 @@ import { getRoutes } from './utils';
|
||||
function SettingsPage(): JSX.Element {
|
||||
const { pathname, search } = useLocation();
|
||||
|
||||
const { user, featureFlags, trialInfo } = useAppContext();
|
||||
const {
|
||||
user,
|
||||
featureFlags,
|
||||
trialInfo,
|
||||
isFetchingActiveLicense,
|
||||
} = useAppContext();
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
const [settingsMenuItems, setSettingsMenuItems] = useState<SidebarItem[]>(
|
||||
@@ -51,6 +56,21 @@ function SettingsPage(): JSX.Element {
|
||||
setSettingsMenuItems((prevItems) => {
|
||||
let updatedItems = [...prevItems];
|
||||
|
||||
if (trialInfo?.workSpaceBlock && !isFetchingActiveLicense) {
|
||||
updatedItems = updatedItems.map((item) => ({
|
||||
...item,
|
||||
isEnabled: !!(
|
||||
isAdmin &&
|
||||
(item.key === ROUTES.BILLING ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.MY_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS)
|
||||
),
|
||||
}));
|
||||
|
||||
return updatedItems;
|
||||
}
|
||||
|
||||
if (isCloudUser) {
|
||||
if (isAdmin) {
|
||||
updatedItems = updatedItems.map((item) => ({
|
||||
@@ -61,7 +81,8 @@ function SettingsPage(): JSX.Element {
|
||||
item.key === ROUTES.CUSTOM_DOMAIN_SETTINGS ||
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.INGESTION_SETTINGS ||
|
||||
item.key === ROUTES.ORG_SETTINGS
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -72,7 +93,8 @@ function SettingsPage(): JSX.Element {
|
||||
...item,
|
||||
isEnabled:
|
||||
item.key === ROUTES.INGESTION_SETTINGS ||
|
||||
item.key === ROUTES.INTEGRATIONS
|
||||
item.key === ROUTES.INTEGRATIONS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -87,7 +109,8 @@ function SettingsPage(): JSX.Element {
|
||||
item.key === ROUTES.BILLING ||
|
||||
item.key === ROUTES.INTEGRATIONS ||
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.ORG_SETTINGS
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -107,7 +130,9 @@ function SettingsPage(): JSX.Element {
|
||||
updatedItems = updatedItems.map((item) => ({
|
||||
...item,
|
||||
isEnabled:
|
||||
item.key === ROUTES.API_KEYS || item.key === ROUTES.ORG_SETTINGS
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
@@ -125,7 +150,15 @@ function SettingsPage(): JSX.Element {
|
||||
|
||||
return updatedItems;
|
||||
});
|
||||
}, [isAdmin, isEditor, isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
}, [
|
||||
isAdmin,
|
||||
isEditor,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isFetchingActiveLicense,
|
||||
trialInfo?.workSpaceBlock,
|
||||
pathname,
|
||||
]);
|
||||
|
||||
const routes = useMemo(
|
||||
() =>
|
||||
@@ -184,6 +217,13 @@ function SettingsPage(): JSX.Element {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (
|
||||
pathname.startsWith(ROUTES.CHANNELS_EDIT) &&
|
||||
key === ROUTES.ALL_CHANNELS
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return pathname === key;
|
||||
};
|
||||
|
||||
|
||||
@@ -32,7 +32,12 @@ export const getRoutes = (
|
||||
const isEditor = userRole === USER_ROLES.EDITOR;
|
||||
|
||||
if (isWorkspaceBlocked && isAdmin) {
|
||||
settings.push(...organizationSettings(t));
|
||||
settings.push(
|
||||
...organizationSettings(t),
|
||||
...mySettings(t),
|
||||
...billingSettings(t),
|
||||
...keyboardShortcuts(t),
|
||||
);
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
@@ -67,19 +67,15 @@ export default function TraceDetailsPage(): JSX.Element {
|
||||
key: 'trace-details',
|
||||
children: <TraceDetailsV2 />,
|
||||
},
|
||||
...(process.env.NODE_ENV === 'development'
|
||||
? [
|
||||
{
|
||||
label: (
|
||||
<div className="tab-item">
|
||||
<Cone className="funnel-icon" size={16} /> Funnels
|
||||
</div>
|
||||
),
|
||||
key: 'funnels',
|
||||
children: <div />,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
label: (
|
||||
<div className="tab-item">
|
||||
<Cone className="funnel-icon" size={16} /> Funnels
|
||||
</div>
|
||||
),
|
||||
key: 'funnels',
|
||||
children: <div />,
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<div className="tab-item">
|
||||
|
||||
@@ -2,6 +2,7 @@ import './DeleteFunnelStep.styles.scss';
|
||||
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { Trash2, X } from 'lucide-react';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
|
||||
interface DeleteFunnelStepProps {
|
||||
isOpen: boolean;
|
||||
@@ -14,8 +15,10 @@ function DeleteFunnelStep({
|
||||
onClose,
|
||||
onStepRemove,
|
||||
}: DeleteFunnelStepProps): JSX.Element {
|
||||
const { handleRunFunnel } = useFunnelContext();
|
||||
const handleStepRemoval = (): void => {
|
||||
onStepRemove();
|
||||
handleRunFunnel();
|
||||
onClose();
|
||||
};
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import useFunnelConfiguration from 'hooks/TracesFunnels/useFunnelConfiguration';
|
||||
import { PencilLine } from 'lucide-react';
|
||||
import FunnelItemPopover from 'pages/TracesFunnels/components/FunnelsList/FunnelItemPopover';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import CopyToClipboard from 'periscope/components/CopyToClipboard';
|
||||
import { memo, useState } from 'react';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
@@ -21,7 +22,6 @@ interface FunnelConfigurationProps {
|
||||
funnel: FunnelData;
|
||||
isTraceDetailsPage?: boolean;
|
||||
span?: Span;
|
||||
disableAutoSave?: boolean;
|
||||
triggerAutoSave?: boolean;
|
||||
showNotifications?: boolean;
|
||||
}
|
||||
@@ -30,15 +30,19 @@ function FunnelConfiguration({
|
||||
funnel,
|
||||
isTraceDetailsPage,
|
||||
span,
|
||||
disableAutoSave,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
}: FunnelConfigurationProps): JSX.Element {
|
||||
const { isPopoverOpen, setIsPopoverOpen, steps } = useFunnelConfiguration({
|
||||
const { triggerSave } = useFunnelContext();
|
||||
const {
|
||||
isPopoverOpen,
|
||||
setIsPopoverOpen,
|
||||
steps,
|
||||
isSaving,
|
||||
} = useFunnelConfiguration({
|
||||
funnel,
|
||||
disableAutoSave,
|
||||
triggerAutoSave,
|
||||
showNotifications,
|
||||
triggerAutoSave: triggerAutoSave || triggerSave,
|
||||
showNotifications: showNotifications || triggerSave,
|
||||
});
|
||||
const [isDescriptionModalOpen, setIsDescriptionModalOpen] = useState<boolean>(
|
||||
false,
|
||||
@@ -106,7 +110,7 @@ function FunnelConfiguration({
|
||||
|
||||
{!isTraceDetailsPage && (
|
||||
<>
|
||||
<StepsFooter stepsCount={steps.length} />
|
||||
<StepsFooter stepsCount={steps.length} isSaving={isSaving || false} />
|
||||
<AddFunnelDescriptionModal
|
||||
isOpen={isDescriptionModalOpen}
|
||||
onClose={handleDescriptionModalClose}
|
||||
@@ -122,7 +126,6 @@ function FunnelConfiguration({
|
||||
FunnelConfiguration.defaultProps = {
|
||||
isTraceDetailsPage: false,
|
||||
span: undefined,
|
||||
disableAutoSave: false,
|
||||
triggerAutoSave: false,
|
||||
showNotifications: false,
|
||||
};
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
color: var(--bg-vanilla-400);
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-radius: 6px;
|
||||
width: 100%;
|
||||
.step-popover {
|
||||
opacity: 0;
|
||||
width: 22px;
|
||||
|
||||
@@ -40,11 +40,6 @@
|
||||
letter-spacing: 0.12px;
|
||||
border-radius: 2px;
|
||||
|
||||
&--sync {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
&--run {
|
||||
background-color: var(--bg-robin-500);
|
||||
}
|
||||
|
||||
@@ -1,53 +1,14 @@
|
||||
import './StepsFooter.styles.scss';
|
||||
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Button, Skeleton, Spin } from 'antd';
|
||||
import { Button, Skeleton } from 'antd';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Cone, Play, RefreshCcw } from 'lucide-react';
|
||||
import { Check, Cone } from 'lucide-react';
|
||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useMemo } from 'react';
|
||||
import { useIsFetching, useIsMutating } from 'react-query';
|
||||
|
||||
const useFunnelResultsLoading = (): boolean => {
|
||||
const { funnelId } = useFunnelContext();
|
||||
|
||||
const isFetchingFunnelOverview = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingStepsGraphData = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_STEPS_GRAPH_DATA, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingErrorTraces = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_ERROR_TRACES, funnelId],
|
||||
});
|
||||
|
||||
const isFetchingSlowTraces = useIsFetching({
|
||||
queryKey: [REACT_QUERY_KEY.GET_FUNNEL_SLOW_TRACES, funnelId],
|
||||
});
|
||||
|
||||
return useMemo(() => {
|
||||
if (!funnelId) {
|
||||
return false;
|
||||
}
|
||||
return (
|
||||
!!isFetchingFunnelOverview ||
|
||||
!!isFetchingStepsGraphData ||
|
||||
!!isFetchingErrorTraces ||
|
||||
!!isFetchingSlowTraces
|
||||
);
|
||||
}, [
|
||||
funnelId,
|
||||
isFetchingFunnelOverview,
|
||||
isFetchingStepsGraphData,
|
||||
isFetchingErrorTraces,
|
||||
isFetchingSlowTraces,
|
||||
]);
|
||||
};
|
||||
import { useIsMutating } from 'react-query';
|
||||
|
||||
interface StepsFooterProps {
|
||||
stepsCount: number;
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
function ValidTracesCount(): JSX.Element {
|
||||
@@ -93,21 +54,13 @@ function ValidTracesCount(): JSX.Element {
|
||||
return <span className="steps-footer__valid-traces">Valid traces found</span>;
|
||||
}
|
||||
|
||||
function StepsFooter({ stepsCount }: StepsFooterProps): JSX.Element {
|
||||
function StepsFooter({ stepsCount, isSaving }: StepsFooterProps): JSX.Element {
|
||||
const {
|
||||
validTracesCount,
|
||||
handleRunFunnel,
|
||||
hasFunnelBeenExecuted,
|
||||
funnelId,
|
||||
hasIncompleteStepFields,
|
||||
handleSaveFunnel,
|
||||
hasUnsavedChanges,
|
||||
} = useFunnelContext();
|
||||
|
||||
const isFunnelResultsLoading = useFunnelResultsLoading();
|
||||
|
||||
const isFunnelUpdateMutating = useIsMutating([
|
||||
REACT_QUERY_KEY.UPDATE_FUNNEL_STEPS,
|
||||
funnelId,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="steps-footer">
|
||||
<div className="steps-footer__left">
|
||||
@@ -117,38 +70,16 @@ function StepsFooter({ stepsCount }: StepsFooterProps): JSX.Element {
|
||||
<ValidTracesCount />
|
||||
</div>
|
||||
<div className="steps-footer__right">
|
||||
{!!isFunnelUpdateMutating && (
|
||||
<div className="steps-footer__button steps-footer__button--updating">
|
||||
<Spin
|
||||
indicator={<LoadingOutlined style={{ color: 'grey' }} />}
|
||||
size="small"
|
||||
/>
|
||||
Updating
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!hasFunnelBeenExecuted ? (
|
||||
<Button
|
||||
disabled={validTracesCount === 0}
|
||||
onClick={handleRunFunnel}
|
||||
type="primary"
|
||||
className="steps-footer__button steps-footer__button--run"
|
||||
icon={<Play size={16} />}
|
||||
>
|
||||
Run funnel
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
type="text"
|
||||
className="steps-footer__button steps-footer__button--sync"
|
||||
icon={<RefreshCcw size={16} />}
|
||||
onClick={handleRunFunnel}
|
||||
loading={isFunnelResultsLoading}
|
||||
disabled={validTracesCount === 0}
|
||||
>
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
disabled={hasIncompleteStepFields || !hasUnsavedChanges}
|
||||
onClick={handleSaveFunnel}
|
||||
type="primary"
|
||||
className="steps-footer__button steps-footer__button--run"
|
||||
icon={<Check size={14} />}
|
||||
loading={isSaving}
|
||||
>
|
||||
Save funnel
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -29,13 +29,20 @@ Chart.register(
|
||||
);
|
||||
|
||||
function FunnelGraph(): JSX.Element {
|
||||
const { funnelId } = useFunnelContext();
|
||||
const { funnelId, startTime, endTime, steps } = useFunnelContext();
|
||||
|
||||
const payload = {
|
||||
start_time: startTime,
|
||||
end_time: endTime,
|
||||
steps,
|
||||
};
|
||||
|
||||
const {
|
||||
data: stepsData,
|
||||
isLoading,
|
||||
isFetching,
|
||||
isError,
|
||||
} = useFunnelStepsGraphData(funnelId);
|
||||
} = useFunnelStepsGraphData(funnelId, payload);
|
||||
|
||||
const data = useMemo(() => stepsData?.payload?.data?.[0]?.data, [
|
||||
stepsData?.payload?.data,
|
||||
|
||||
@@ -16,7 +16,6 @@ function FunnelResults(): JSX.Element {
|
||||
isValidateStepsLoading,
|
||||
hasIncompleteStepFields,
|
||||
hasAllEmptyStepFields,
|
||||
hasFunnelBeenExecuted,
|
||||
funnelId,
|
||||
} = useFunnelContext();
|
||||
|
||||
@@ -47,14 +46,6 @@ function FunnelResults(): JSX.Element {
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (!hasFunnelBeenExecuted) {
|
||||
return (
|
||||
<EmptyFunnelResults
|
||||
title="Funnel has not been run yet."
|
||||
description="Run the funnel to see the results"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="funnel-results">
|
||||
|
||||
@@ -7,6 +7,7 @@ import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTable from './FunnelTable';
|
||||
import { topTracesTableColumns } from './utils';
|
||||
@@ -24,6 +25,7 @@ interface FunnelTopTracesTableProps {
|
||||
SuccessResponse<SlowTraceData | ErrorTraceData> | ErrorResponse,
|
||||
Error
|
||||
>;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function FunnelTopTracesTable({
|
||||
@@ -32,6 +34,7 @@ function FunnelTopTracesTable({
|
||||
stepBOrder,
|
||||
title,
|
||||
tooltip,
|
||||
steps,
|
||||
useQueryHook,
|
||||
}: FunnelTopTracesTableProps): JSX.Element {
|
||||
const { startTime, endTime } = useFunnelContext();
|
||||
@@ -41,8 +44,9 @@ function FunnelTopTracesTable({
|
||||
end_time: endTime,
|
||||
step_start: stepAOrder,
|
||||
step_end: stepBOrder,
|
||||
steps,
|
||||
}),
|
||||
[startTime, endTime, stepAOrder, stepBOrder],
|
||||
[startTime, endTime, stepAOrder, stepBOrder, steps],
|
||||
);
|
||||
|
||||
const { data: response, isLoading, isFetching } = useQueryHook(
|
||||
|
||||
@@ -6,7 +6,7 @@ import FunnelMetricsTable from './FunnelMetricsTable';
|
||||
function OverallMetrics(): JSX.Element {
|
||||
const { funnelId } = useParams<{ funnelId: string }>();
|
||||
const { isLoading, metricsData, conversionRate, isError } = useFunnelMetrics({
|
||||
funnelId: funnelId || '',
|
||||
funnelId,
|
||||
});
|
||||
|
||||
return (
|
||||
|
||||
@@ -52,11 +52,13 @@ function StepsTransitionResults(): JSX.Element {
|
||||
funnelId={funnelId}
|
||||
stepAOrder={stepAOrder}
|
||||
stepBOrder={stepBOrder}
|
||||
steps={steps}
|
||||
/>
|
||||
<TopTracesWithErrors
|
||||
funnelId={funnelId}
|
||||
stepAOrder={stepAOrder}
|
||||
stepBOrder={stepBOrder}
|
||||
steps={steps}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { useFunnelSlowTraces } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTopTracesTable from './FunnelTopTracesTable';
|
||||
|
||||
@@ -6,6 +7,7 @@ interface TopSlowestTracesProps {
|
||||
funnelId: string;
|
||||
stepAOrder: number;
|
||||
stepBOrder: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function TopSlowestTraces(props: TopSlowestTracesProps): JSX.Element {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { useFunnelErrorTraces } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
import FunnelTopTracesTable from './FunnelTopTracesTable';
|
||||
|
||||
@@ -6,6 +7,7 @@ interface TopTracesWithErrorsProps {
|
||||
funnelId: string;
|
||||
stepAOrder: number;
|
||||
stepBOrder: number;
|
||||
steps: FunnelStepData[];
|
||||
}
|
||||
|
||||
function TopTracesWithErrors(props: TopTracesWithErrorsProps): JSX.Element {
|
||||
|
||||
@@ -18,10 +18,4 @@ export const topTracesTableColumns = [
|
||||
key: 'duration_ms',
|
||||
render: (value: string): string => getYAxisFormattedValue(value, 'ms'),
|
||||
},
|
||||
{
|
||||
title: 'SPAN COUNT',
|
||||
dataIndex: 'span_count',
|
||||
key: 'span_count',
|
||||
render: (value: number): string => value.toString(),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -14,8 +14,6 @@ export const initialStepsData: FunnelStepData[] = [
|
||||
latency_pointer: 'start',
|
||||
latency_type: undefined,
|
||||
has_errors: false,
|
||||
name: '',
|
||||
description: '',
|
||||
},
|
||||
{
|
||||
id: v4(),
|
||||
@@ -29,8 +27,6 @@ export const initialStepsData: FunnelStepData[] = [
|
||||
latency_pointer: 'start',
|
||||
latency_type: LatencyOptions.P95,
|
||||
has_errors: false,
|
||||
name: '',
|
||||
description: '',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ValidateFunnelResponse } from 'api/traceFunnels';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { Time } from 'container/TopNav/DateTimeSelection/config';
|
||||
import {
|
||||
CustomTimeType,
|
||||
Time as TimeV2,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { normalizeSteps } from 'hooks/TracesFunnels/useFunnelConfiguration';
|
||||
import { useValidateFunnelSteps } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { initialStepsData } from 'pages/TracesFunnelDetails/constants';
|
||||
import {
|
||||
createContext,
|
||||
@@ -41,6 +41,9 @@ interface FunnelContextType {
|
||||
handleStepChange: (index: number, newStep: Partial<FunnelStepData>) => void;
|
||||
handleStepRemoval: (index: number) => void;
|
||||
handleRunFunnel: () => void;
|
||||
handleSaveFunnel: () => void;
|
||||
triggerSave: boolean;
|
||||
hasUnsavedChanges: boolean;
|
||||
validationResponse:
|
||||
| SuccessResponse<ValidateFunnelResponse>
|
||||
| ErrorResponse
|
||||
@@ -54,8 +57,10 @@ interface FunnelContextType {
|
||||
spanName: string,
|
||||
) => void;
|
||||
handleRestoreSteps: (oldSteps: FunnelStepData[]) => void;
|
||||
hasFunnelBeenExecuted: boolean;
|
||||
setHasFunnelBeenExecuted: Dispatch<SetStateAction<boolean>>;
|
||||
isUpdatingFunnel: boolean;
|
||||
setIsUpdatingFunnel: Dispatch<SetStateAction<boolean>>;
|
||||
lastUpdatedSteps: FunnelStepData[];
|
||||
setLastUpdatedSteps: Dispatch<SetStateAction<FunnelStepData[]>>;
|
||||
}
|
||||
|
||||
const FunnelContext = createContext<FunnelContextType | undefined>(undefined);
|
||||
@@ -86,6 +91,19 @@ export function FunnelProvider({
|
||||
const funnel = data?.payload;
|
||||
const initialSteps = funnel?.steps?.length ? funnel.steps : initialStepsData;
|
||||
const [steps, setSteps] = useState<FunnelStepData[]>(initialSteps);
|
||||
const [triggerSave, setTriggerSave] = useState<boolean>(false);
|
||||
const [isUpdatingFunnel, setIsUpdatingFunnel] = useState<boolean>(false);
|
||||
const [lastUpdatedSteps, setLastUpdatedSteps] = useState<FunnelStepData[]>(
|
||||
initialSteps,
|
||||
);
|
||||
|
||||
// Check if there are unsaved changes by comparing with initial steps from API
|
||||
const hasUnsavedChanges = useMemo(() => {
|
||||
const normalizedCurrentSteps = normalizeSteps(steps);
|
||||
const normalizedInitialSteps = normalizeSteps(lastUpdatedSteps);
|
||||
return !isEqual(normalizedCurrentSteps, normalizedInitialSteps);
|
||||
}, [steps, lastUpdatedSteps]);
|
||||
|
||||
const { hasIncompleteStepFields, hasAllEmptyStepFields } = useMemo(
|
||||
() => ({
|
||||
hasAllEmptyStepFields: steps.every(
|
||||
@@ -98,15 +116,6 @@ export function FunnelProvider({
|
||||
[steps],
|
||||
);
|
||||
|
||||
const [unexecutedFunnels, setUnexecutedFunnels] = useLocalStorage<string[]>(
|
||||
LOCALSTORAGE.UNEXECUTED_FUNNELS,
|
||||
[],
|
||||
);
|
||||
|
||||
const [hasFunnelBeenExecuted, setHasFunnelBeenExecuted] = useState(
|
||||
!unexecutedFunnels.includes(funnelId),
|
||||
);
|
||||
|
||||
const {
|
||||
data: validationResponse,
|
||||
isLoading: isValidationLoading,
|
||||
@@ -116,7 +125,13 @@ export function FunnelProvider({
|
||||
selectedTime,
|
||||
startTime,
|
||||
endTime,
|
||||
enabled: !!funnelId && !!selectedTime && !!startTime && !!endTime,
|
||||
enabled:
|
||||
!!funnelId &&
|
||||
!!selectedTime &&
|
||||
!!startTime &&
|
||||
!!endTime &&
|
||||
!hasIncompleteStepFields,
|
||||
steps,
|
||||
});
|
||||
|
||||
const validTracesCount = useMemo(
|
||||
@@ -185,11 +200,7 @@ export function FunnelProvider({
|
||||
|
||||
const handleRunFunnel = useCallback(async (): Promise<void> => {
|
||||
if (validTracesCount === 0) return;
|
||||
if (!hasFunnelBeenExecuted) {
|
||||
setUnexecutedFunnels(unexecutedFunnels.filter((id) => id !== funnelId));
|
||||
|
||||
setHasFunnelBeenExecuted(true);
|
||||
}
|
||||
queryClient.refetchQueries([
|
||||
REACT_QUERY_KEY.GET_FUNNEL_OVERVIEW,
|
||||
funnelId,
|
||||
@@ -215,15 +226,13 @@ export function FunnelProvider({
|
||||
funnelId,
|
||||
selectedTime,
|
||||
]);
|
||||
}, [
|
||||
funnelId,
|
||||
hasFunnelBeenExecuted,
|
||||
unexecutedFunnels,
|
||||
queryClient,
|
||||
selectedTime,
|
||||
setUnexecutedFunnels,
|
||||
validTracesCount,
|
||||
]);
|
||||
}, [funnelId, queryClient, selectedTime, validTracesCount]);
|
||||
|
||||
const handleSaveFunnel = useCallback(() => {
|
||||
setTriggerSave(true);
|
||||
// Reset the trigger after a brief moment to allow useFunnelConfiguration to pick it up
|
||||
setTimeout(() => setTriggerSave(false), 100);
|
||||
}, []);
|
||||
|
||||
const value = useMemo<FunnelContextType>(
|
||||
() => ({
|
||||
@@ -239,14 +248,19 @@ export function FunnelProvider({
|
||||
handleAddStep: addNewStep,
|
||||
handleStepRemoval,
|
||||
handleRunFunnel,
|
||||
handleSaveFunnel,
|
||||
triggerSave,
|
||||
validationResponse,
|
||||
isValidateStepsLoading: isValidationLoading || isValidationFetching,
|
||||
hasIncompleteStepFields,
|
||||
hasAllEmptyStepFields,
|
||||
handleReplaceStep,
|
||||
handleRestoreSteps,
|
||||
hasFunnelBeenExecuted,
|
||||
setHasFunnelBeenExecuted,
|
||||
hasUnsavedChanges,
|
||||
setIsUpdatingFunnel,
|
||||
isUpdatingFunnel,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
}),
|
||||
[
|
||||
funnelId,
|
||||
@@ -260,6 +274,8 @@ export function FunnelProvider({
|
||||
addNewStep,
|
||||
handleStepRemoval,
|
||||
handleRunFunnel,
|
||||
handleSaveFunnel,
|
||||
triggerSave,
|
||||
validationResponse,
|
||||
isValidationLoading,
|
||||
isValidationFetching,
|
||||
@@ -267,8 +283,11 @@ export function FunnelProvider({
|
||||
hasAllEmptyStepFields,
|
||||
handleReplaceStep,
|
||||
handleRestoreSteps,
|
||||
hasFunnelBeenExecuted,
|
||||
setHasFunnelBeenExecuted,
|
||||
hasUnsavedChanges,
|
||||
setIsUpdatingFunnel,
|
||||
isUpdatingFunnel,
|
||||
lastUpdatedSteps,
|
||||
setLastUpdatedSteps,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -4,11 +4,9 @@ import { Input } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { AxiosError } from 'axios';
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useCreateFunnel } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { Check, X } from 'lucide-react';
|
||||
@@ -34,11 +32,6 @@ function CreateFunnel({
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const [unexecutedFunnels, setUnexecutedFunnels] = useLocalStorage<string[]>(
|
||||
LOCALSTORAGE.UNEXECUTED_FUNNELS,
|
||||
[],
|
||||
);
|
||||
|
||||
const handleCreate = (): void => {
|
||||
createFunnelMutation.mutate(
|
||||
{
|
||||
@@ -61,9 +54,6 @@ function CreateFunnel({
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_FUNNELS_LIST]);
|
||||
|
||||
const funnelId = data?.payload?.funnel_id;
|
||||
if (funnelId) {
|
||||
setUnexecutedFunnels([...unexecutedFunnels, funnelId]);
|
||||
}
|
||||
|
||||
onClose(funnelId);
|
||||
if (funnelId && redirectToDetails) {
|
||||
|
||||
@@ -2,13 +2,16 @@ import '../RenameFunnel/RenameFunnel.styles.scss';
|
||||
import './DeleteFunnel.styles.scss';
|
||||
|
||||
import SignozModal from 'components/SignozModal/SignozModal';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useDeleteFunnel } from 'hooks/TracesFunnels/useFunnels';
|
||||
import { useLocalStorage } from 'hooks/useLocalStorage';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Trash2, X } from 'lucide-react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { FunnelStepData } from 'types/api/traceFunnels';
|
||||
|
||||
interface DeleteFunnelProps {
|
||||
isOpen: boolean;
|
||||
@@ -29,6 +32,13 @@ function DeleteFunnel({
|
||||
|
||||
const history = useHistory();
|
||||
const { pathname } = history.location;
|
||||
|
||||
// localStorage hook for funnel steps
|
||||
const localStorageKey = `${LOCALSTORAGE.FUNNEL_STEPS}_${funnelId}`;
|
||||
const [, , clearLocalStorageSavedSteps] = useLocalStorage<
|
||||
FunnelStepData[] | null
|
||||
>(localStorageKey, null);
|
||||
|
||||
const handleDelete = (): void => {
|
||||
deleteFunnelMutation.mutate(
|
||||
{
|
||||
@@ -39,6 +49,7 @@ function DeleteFunnel({
|
||||
notifications.success({
|
||||
message: 'Funnel deleted successfully',
|
||||
});
|
||||
clearLocalStorageSavedSteps();
|
||||
onClose();
|
||||
|
||||
if (
|
||||
|
||||
@@ -14,8 +14,7 @@ function TracesModulePage(): JSX.Element {
|
||||
|
||||
const routes: TabRoutes[] = [
|
||||
tracesExplorer,
|
||||
// TODO(shaheer): remove this check after everything is ready
|
||||
process.env.NODE_ENV === 'development' ? tracesFunnel(pathname) : null,
|
||||
tracesFunnel(pathname),
|
||||
tracesSaveView,
|
||||
].filter(Boolean) as TabRoutes[];
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
TRACES_FUNNELS_DETAIL: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
API_KEYS: ['ADMIN'],
|
||||
CUSTOM_DOMAIN_SETTINGS: ['ADMIN'],
|
||||
LOGS_BASE: [],
|
||||
LOGS_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
OLD_LOGS_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
SHORTCUTS: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
INTEGRATIONS: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
@@ -120,4 +120,7 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
API_MONITORING: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
WORKSPACE_ACCESS_RESTRICTED: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METRICS_EXPLORER_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
INFRASTRUCTURE_MONITORING_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
API_MONITORING_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
MESSAGING_QUEUES_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
};
|
||||
|
||||
12
go.mod
12
go.mod
@@ -8,7 +8,7 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
@@ -69,8 +69,8 @@ require (
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.38.0
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842
|
||||
golang.org/x/oauth2 v0.24.0
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0
|
||||
golang.org/x/oauth2 v0.26.0
|
||||
golang.org/x/sync v0.14.0
|
||||
golang.org/x/text v0.25.0
|
||||
google.golang.org/protobuf v1.36.0
|
||||
@@ -125,7 +125,7 @@ require (
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/go-openapi/validate v0.24.0 // indirect
|
||||
github.com/goccy/go-json v0.10.3 // indirect
|
||||
github.com/goccy/go-json v0.10.4 // indirect
|
||||
github.com/gofrs/uuid v4.4.0+incompatible // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf // indirect
|
||||
@@ -182,7 +182,7 @@ require (
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.111.0 // indirect
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.21 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c // indirect
|
||||
@@ -267,7 +267,7 @@ require (
|
||||
golang.org/x/net v0.40.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/time v0.8.0 // indirect
|
||||
golang.org/x/tools v0.28.0 // indirect
|
||||
golang.org/x/tools v0.29.0 // indirect
|
||||
gonum.org/v1/gonum v0.15.1 // indirect
|
||||
google.golang.org/api v0.213.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20241216192217-9240e9c98484 // indirect
|
||||
|
||||
24
go.sum
24
go.sum
@@ -100,8 +100,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39 h1:Dl8QqZNAsj2atxP572OzsszPK0XPpd3LLPNPRAUJ5wo=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.39/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43 h1:upWUoxDl5kCE/WI5+di2oqA/wJi2NU/PRyN8zDR078c=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.43/go.mod h1:iUGoKEaNQmLNptTwEz9o5kZ0ntbCMQsrV53Y2TDd1Qg=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
@@ -322,8 +322,8 @@ github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpG
|
||||
github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/go-zookeeper/zk v1.0.4 h1:DPzxraQx7OrPyXq2phlGlNSIyWEsAox0RJmjTseMV6I=
|
||||
github.com/go-zookeeper/zk v1.0.4/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
|
||||
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
|
||||
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
|
||||
github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA=
|
||||
@@ -758,8 +758,8 @@ github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3v
|
||||
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
|
||||
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
@@ -1138,8 +1138,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -1241,8 +1241,8 @@ golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ
|
||||
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE=
|
||||
golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -1425,8 +1425,8 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
|
||||
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
|
||||
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
|
||||
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
||||
@@ -75,17 +75,15 @@ comparison
|
||||
| key NOT CONTAINS value
|
||||
;
|
||||
|
||||
// in(...) or in[...] - now also supports variables
|
||||
// in(...) or in[...]
|
||||
inClause
|
||||
: IN LPAREN valueList RPAREN
|
||||
| IN LBRACK valueList RBRACK
|
||||
| IN variable // NEW: support for IN $var, IN {{var}}, IN [[var]]
|
||||
;
|
||||
|
||||
notInClause
|
||||
: NOT IN LPAREN valueList RPAREN
|
||||
| NOT IN LBRACK valueList RBRACK
|
||||
| NOT IN variable // NEW: support for NOT IN $var, etc.
|
||||
;
|
||||
|
||||
// List of values for in(...) or in[...]
|
||||
@@ -128,21 +126,13 @@ array
|
||||
|
||||
/*
|
||||
* A 'value' can be a string literal (double or single-quoted),
|
||||
// a numeric literal, boolean, a "bare" token, or a variable.
|
||||
// a numeric literal, boolean, or a "bare" token as needed.
|
||||
*/
|
||||
value
|
||||
: QUOTED_TEXT
|
||||
| NUMBER
|
||||
| BOOL
|
||||
| KEY
|
||||
| variable // NEW: variables can be used as values
|
||||
;
|
||||
|
||||
// NEW: Variable rule to support different variable syntaxes
|
||||
variable
|
||||
: DOLLAR_VAR
|
||||
| CURLY_VAR
|
||||
| SQUARE_VAR
|
||||
;
|
||||
|
||||
/*
|
||||
@@ -200,11 +190,6 @@ BOOL
|
||||
| [Ff][Aa][Ll][Ss][Ee]
|
||||
;
|
||||
|
||||
// NEW: Variable token types
|
||||
DOLLAR_VAR : '$' [a-zA-Z_] [a-zA-Z0-9._]* ;
|
||||
CURLY_VAR : '{{' [ \t]* '.'? [a-zA-Z_] [a-zA-Z0-9._]* [ \t]* '}}' ;
|
||||
SQUARE_VAR : '[[' [ \t]* '.'? [a-zA-Z_] [a-zA-Z0-9._]* [ \t]* ']]' ;
|
||||
|
||||
fragment SIGN : [+-] ;
|
||||
|
||||
// Numbers: optional sign, then digits, optional fractional part,
|
||||
|
||||
@@ -12,4 +12,16 @@ type Analytics interface {
|
||||
|
||||
// Sends analytics messages to an analytics backend.
|
||||
Send(context.Context, ...analyticstypes.Message)
|
||||
|
||||
// Tracks an event on a group level. Input is group, event name, and attributes. The user is "stats_<org_id>".
|
||||
TrackGroup(context.Context, string, string, map[string]any)
|
||||
|
||||
// Tracks an event on a user level and attributes it with the group. Input is group, user, event name, and attributes.
|
||||
TrackUser(context.Context, string, string, string, map[string]any)
|
||||
|
||||
// Identifies a group. Input is group, traits.
|
||||
IdentifyGroup(context.Context, string, map[string]any)
|
||||
|
||||
// Identifies a user. Input is group, user, traits.
|
||||
IdentifyUser(context.Context, string, string, map[string]any)
|
||||
}
|
||||
|
||||
@@ -24,6 +24,18 @@ func (provider *Provider) Start(_ context.Context) error {
|
||||
|
||||
func (provider *Provider) Send(ctx context.Context, messages ...analyticstypes.Message) {}
|
||||
|
||||
func (provider *Provider) TrackGroup(ctx context.Context, group, event string, attributes map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) TrackUser(ctx context.Context, group, user, event string, attributes map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
}
|
||||
|
||||
func (provider *Provider) Stop(_ context.Context) error {
|
||||
close(provider.stopC)
|
||||
return nil
|
||||
|
||||
@@ -27,7 +27,25 @@ func (provider *provider) Start(_ context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.Message) {}
|
||||
func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.Message) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) TrackGroup(ctx context.Context, group, event string, attributes map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) TrackUser(ctx context.Context, group, user, event string, attributes map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(_ context.Context) error {
|
||||
close(provider.stopC)
|
||||
|
||||
@@ -50,6 +50,100 @@ func (provider *provider) Send(ctx context.Context, messages ...analyticstypes.M
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) TrackGroup(ctx context.Context, group, event string, properties map[string]any) {
|
||||
if properties == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping event", "group", group, "event", event)
|
||||
return
|
||||
}
|
||||
|
||||
err := provider.client.Enqueue(analyticstypes.Track{
|
||||
UserId: "stats_" + group,
|
||||
Event: event,
|
||||
Properties: analyticstypes.NewPropertiesFromMap(properties),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: group,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) TrackUser(ctx context.Context, group, user, event string, properties map[string]any) {
|
||||
if properties == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping event", "user", user, "group", group, "event", event)
|
||||
return
|
||||
}
|
||||
|
||||
err := provider.client.Enqueue(analyticstypes.Track{
|
||||
UserId: user,
|
||||
Event: event,
|
||||
Properties: analyticstypes.NewPropertiesFromMap(properties),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: group,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyGroup(ctx context.Context, group string, traits map[string]any) {
|
||||
if traits == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping identify", "group", group)
|
||||
return
|
||||
}
|
||||
|
||||
// identify the user
|
||||
err := provider.client.Enqueue(analyticstypes.Identify{
|
||||
UserId: "stats_" + group,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
|
||||
// identify the group using the stats user
|
||||
err = provider.client.Enqueue(analyticstypes.Group{
|
||||
UserId: "stats_" + group,
|
||||
GroupId: group,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) IdentifyUser(ctx context.Context, group, user string, traits map[string]any) {
|
||||
if traits == nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "empty attributes received, skipping identify", "user", user, "group", group)
|
||||
return
|
||||
}
|
||||
|
||||
// identify the user
|
||||
err := provider.client.Enqueue(analyticstypes.Identify{
|
||||
UserId: user,
|
||||
Traits: analyticstypes.NewTraitsFromMap(traits),
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
|
||||
// associate the user with the group
|
||||
err = provider.client.Enqueue(analyticstypes.Group{
|
||||
UserId: user,
|
||||
GroupId: group,
|
||||
Traits: analyticstypes.NewTraits().Set("id", group), // A trait is required
|
||||
})
|
||||
if err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to send message to segment", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
if err := provider.client.Close(); err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "unable to close segment client", "err", err)
|
||||
|
||||
@@ -70,7 +70,7 @@ func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, er
|
||||
}
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("searchText")
|
||||
name := r.URL.Query().Get("name")
|
||||
|
||||
req = telemetrytypes.FieldKeySelector{
|
||||
StartUnixMilli: startUnixMilli,
|
||||
@@ -92,10 +92,8 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("name")
|
||||
keySelector.Name = name
|
||||
existingQuery := r.URL.Query().Get("existingQuery")
|
||||
value := r.URL.Query().Get("searchText")
|
||||
value := r.URL.Query().Get("value")
|
||||
|
||||
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -45,19 +44,7 @@ func (module *module) Create(ctx context.Context, orgID valuer.UUID, createdBy s
|
||||
return nil, err
|
||||
}
|
||||
|
||||
module.analytics.Send(ctx,
|
||||
analyticstypes.Track{
|
||||
UserId: creator.String(),
|
||||
Event: "Dashboard Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(dashboardtypes.NewStatsFromStorableDashboards([]*dashboardtypes.StorableDashboard{storableDashboard})),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: orgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
module.analytics.TrackUser(ctx, orgID.String(), creator.String(), "Dashboard Created", dashboardtypes.NewStatsFromStorableDashboards([]*dashboardtypes.StorableDashboard{storableDashboard}))
|
||||
return dashboard, nil
|
||||
}
|
||||
|
||||
|
||||
792
pkg/modules/tracefunnel/clickhouse_queries.go
Normal file
792
pkg/modules/tracefunnel/clickhouse_queries.go
Normal file
@@ -0,0 +1,792 @@
|
||||
package tracefunnel
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func BuildTwoStepFunnelValidationQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
toDateTime64(%[3]d/1e9, 9) AS start_ts,
|
||||
toDateTime64(%[4]d/1e9, 9) AS end_ts,
|
||||
|
||||
('%[5]s','%[6]s') AS step1,
|
||||
('%[7]s','%[8]s') AS step2
|
||||
|
||||
SELECT
|
||||
trace_id
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[9]s)
|
||||
OR
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[10]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
)
|
||||
ORDER BY t1_time
|
||||
LIMIT 5;`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildThreeStepFunnelValidationQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
containsErrorT3 int,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
serviceNameT3 string,
|
||||
spanNameT3 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
clauseStep3 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
%[3]d AS contains_error_t3,
|
||||
toDateTime64(%[4]d/1e9, 9) AS start_ts,
|
||||
toDateTime64(%[5]d/1e9, 9) AS end_ts,
|
||||
|
||||
('%[6]s','%[7]s') AS step1,
|
||||
('%[8]s','%[9]s') AS step2,
|
||||
('%[10]s','%[11]s') AS step3
|
||||
|
||||
SELECT
|
||||
trace_id
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
minIf(timestamp, serviceName = step3.1 AND name = step3.2) AS t3_time
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[12]s)
|
||||
OR (serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[13]s)
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[14]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
)
|
||||
ORDER BY t1_time
|
||||
LIMIT 5;`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
serviceNameT3,
|
||||
spanNameT3,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildTwoStepFunnelOverviewQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
latencyPointerT1 string,
|
||||
latencyPointerT2 string,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
'%[3]s' AS latency_pointer_t1,
|
||||
'%[4]s' AS latency_pointer_t2,
|
||||
toDateTime64(%[5]d/1e9, 9) AS start_ts,
|
||||
toDateTime64(%[6]d/1e9, 9) AS end_ts,
|
||||
(%[6]d - %[5]d)/1e9 AS time_window_sec,
|
||||
|
||||
('%[7]s','%[8]s') AS step1,
|
||||
('%[9]s','%[10]s') AS step2
|
||||
|
||||
, funnel AS (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[11]s)
|
||||
OR
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[12]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
)
|
||||
|
||||
, totals AS (
|
||||
SELECT
|
||||
count(DISTINCT trace_id) AS total_s1_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN s1_error = 1 THEN trace_id END) AS sum_s1_error,
|
||||
count(DISTINCT CASE WHEN s2_error = 1 THEN trace_id END) AS sum_s2_error,
|
||||
avgIf((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time) AS avg_duration,
|
||||
quantileIf(0.99)((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time) AS latency
|
||||
FROM funnel
|
||||
)
|
||||
|
||||
SELECT
|
||||
round(if(total_s1_spans > 0, total_s2_spans * 100.0 / total_s1_spans, 0), 2) AS conversion_rate,
|
||||
total_s2_spans / time_window_sec AS avg_rate,
|
||||
greatest(sum_s1_error, sum_s2_error) AS errors,
|
||||
avg_duration,
|
||||
latency
|
||||
FROM totals;
|
||||
`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildThreeStepFunnelOverviewQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
containsErrorT3 int,
|
||||
latencyPointerT1 string,
|
||||
latencyPointerT2 string,
|
||||
latencyPointerT3 string,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
serviceNameT3 string,
|
||||
spanNameT3 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
clauseStep3 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
%[3]d AS contains_error_t3,
|
||||
'%[4]s' AS latency_pointer_t1,
|
||||
'%[5]s' AS latency_pointer_t2,
|
||||
'%[6]s' AS latency_pointer_t3,
|
||||
toDateTime64(%[7]d/1e9, 9) AS start_ts,
|
||||
toDateTime64(%[8]d/1e9, 9) AS end_ts,
|
||||
(%[8]d - %[7]d)/1e9 AS time_window_sec,
|
||||
|
||||
('%[9]s','%[10]s') AS step1,
|
||||
('%[11]s','%[12]s') AS step2,
|
||||
('%[13]s','%[14]s') AS step3
|
||||
|
||||
, funnel AS (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
minIf(timestamp, serviceName = step3.1 AND name = step3.2) AS t3_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step3.1 AND name = step3.2)) AS s3_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[15]s)
|
||||
OR (serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[16]s)
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[17]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
)
|
||||
|
||||
, totals AS (
|
||||
SELECT
|
||||
count(DISTINCT trace_id) AS total_s1_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN t3_time > t2_time AND t2_time > t1_time THEN trace_id END) AS total_s3_spans,
|
||||
|
||||
count(DISTINCT CASE WHEN s1_error = 1 THEN trace_id END) AS sum_s1_error,
|
||||
count(DISTINCT CASE WHEN s2_error = 1 THEN trace_id END) AS sum_s2_error,
|
||||
count(DISTINCT CASE WHEN s3_error = 1 THEN trace_id END) AS sum_s3_error,
|
||||
|
||||
avgIf((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time AND t3_time > t2_time) AS avg_funnel_duration,
|
||||
quantileIf(0.99)((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t1_time))/1e6, t1_time > 0 AND t2_time > t1_time AND t3_time > t2_time) AS p99_funnel_latency
|
||||
|
||||
|
||||
FROM funnel
|
||||
)
|
||||
|
||||
SELECT
|
||||
round(if(total_s1_spans > 0, total_s3_spans * 100.0 / total_s1_spans, 0), 2) AS conversion_rate,
|
||||
total_s3_spans / nullIf(time_window_sec, 0) AS avg_rate,
|
||||
greatest(sum_s1_error, sum_s2_error, sum_s3_error) AS errors,
|
||||
avg_funnel_duration AS avg_duration,
|
||||
p99_funnel_latency AS latency
|
||||
FROM totals;
|
||||
`
|
||||
return fmt.Sprintf(
|
||||
queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
latencyPointerT3,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
serviceNameT3,
|
||||
spanNameT3,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildTwoStepFunnelCountQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
toDateTime64(%[3]d/1e9,9) AS start_ts,
|
||||
toDateTime64(%[4]d/1e9,9) AS end_ts,
|
||||
|
||||
('%[5]s','%[6]s') AS step1,
|
||||
('%[7]s','%[8]s') AS step2
|
||||
|
||||
SELECT
|
||||
count(DISTINCT trace_id) AS total_s1_spans,
|
||||
count(DISTINCT CASE WHEN t1_error = 1 THEN trace_id END) AS total_s1_errored_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time AND t2_error = 1 THEN trace_id END) AS total_s2_errored_spans
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS t1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS t2_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[9]s)
|
||||
OR
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[10]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
) AS funnel;
|
||||
`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildThreeStepFunnelCountQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
containsErrorT3 int,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
serviceNameT3 string,
|
||||
spanNameT3 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
clauseStep3 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
%[3]d AS contains_error_t3,
|
||||
toDateTime64(%[4]d/1e9,9) AS start_ts,
|
||||
toDateTime64(%[5]d/1e9,9) AS end_ts,
|
||||
|
||||
('%[6]s','%[7]s') AS step1,
|
||||
('%[8]s','%[9]s') AS step2,
|
||||
('%[10]s','%[11]s') AS step3
|
||||
|
||||
SELECT
|
||||
count(DISTINCT trace_id) AS total_s1_spans,
|
||||
count(DISTINCT CASE WHEN t1_error = 1 THEN trace_id END) AS total_s1_errored_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time AND t2_error = 1 THEN trace_id END) AS total_s2_errored_spans,
|
||||
count(DISTINCT CASE WHEN t3_time > t2_time AND t2_time > t1_time THEN trace_id END) AS total_s3_spans,
|
||||
count(DISTINCT CASE WHEN t3_time > t2_time AND t2_time > t1_time AND t3_error = 1 THEN trace_id END) AS total_s3_errored_spans
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
minIf(timestamp, serviceName = step3.1 AND name = step3.2) AS t3_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS t1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS t2_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step3.1 AND name = step3.2)) AS t3_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[12]s)
|
||||
OR (serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[13]s)
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[14]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
) AS funnel;
|
||||
`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
serviceNameT3,
|
||||
spanNameT3,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildTwoStepFunnelTopSlowTracesQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
toDateTime64(%[3]d/1e9, 9) AS start_ts,
|
||||
toDateTime64(%[4]d/1e9, 9) AS end_ts,
|
||||
|
||||
('%[5]s','%[6]s') AS step1,
|
||||
('%[7]s','%[8]s') AS step2
|
||||
|
||||
SELECT
|
||||
trace_id,
|
||||
(toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6 AS duration_ms,
|
||||
span_count
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
count() AS span_count
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[9]s)
|
||||
OR
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[10]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0 AND t2_time > t1_time
|
||||
) AS funnel
|
||||
ORDER BY duration_ms DESC
|
||||
LIMIT 5;
|
||||
`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildTwoStepFunnelTopSlowErrorTracesQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
) string {
|
||||
queryTemplate := `
|
||||
WITH
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
toDateTime64(%[3]d/1e9, 9) AS start_ts,
|
||||
toDateTime64(%[4]d/1e9, 9) AS end_ts,
|
||||
|
||||
('%[5]s','%[6]s') AS step1,
|
||||
('%[7]s','%[8]s') AS step2
|
||||
|
||||
SELECT
|
||||
trace_id,
|
||||
(toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6 AS duration_ms,
|
||||
span_count
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS t1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS t2_error,
|
||||
count() AS span_count
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[9]s)
|
||||
OR
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[10]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0 AND t2_time > t1_time
|
||||
) AS funnel
|
||||
WHERE
|
||||
(t1_error = 1 OR t2_error = 1)
|
||||
ORDER BY duration_ms DESC
|
||||
LIMIT 5;
|
||||
`
|
||||
return fmt.Sprintf(queryTemplate,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildTwoStepFunnelStepOverviewQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
latencyPointerT1 string,
|
||||
latencyPointerT2 string,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
latencyTypeT2 string,
|
||||
) string {
|
||||
const tpl = `
|
||||
WITH
|
||||
toDateTime64(%[5]d / 1e9, 9) AS start_ts,
|
||||
toDateTime64(%[6]d / 1e9, 9) AS end_ts,
|
||||
(%[6]d - %[5]d) / 1e9 AS time_window_sec,
|
||||
|
||||
('%[7]s', '%[8]s') AS step1,
|
||||
('%[9]s', '%[10]s') AS step2,
|
||||
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2
|
||||
|
||||
SELECT
|
||||
round(total_s2_spans * 100.0 / total_s1_spans, 2) AS conversion_rate,
|
||||
total_s2_spans / time_window_sec AS avg_rate,
|
||||
greatest(sum_s1_error, sum_s2_error) AS errors,
|
||||
avg_duration,
|
||||
latency
|
||||
FROM (
|
||||
SELECT
|
||||
count(DISTINCT trace_id) AS total_s1_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN s1_error = 1 THEN trace_id END) AS sum_s1_error,
|
||||
count(DISTINCT CASE WHEN s2_error = 1 THEN trace_id END) AS sum_s2_error,
|
||||
|
||||
avgIf(
|
||||
(toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6,
|
||||
t1_time > 0 AND t2_time > t1_time
|
||||
) AS avg_duration,
|
||||
|
||||
quantileIf(%[13]s)(
|
||||
(toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6,
|
||||
t1_time > 0 AND t2_time > t1_time
|
||||
) AS latency
|
||||
FROM (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[11]s)
|
||||
OR
|
||||
(serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[12]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
) AS funnel
|
||||
) AS totals;
|
||||
`
|
||||
|
||||
return fmt.Sprintf(tpl,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
latencyTypeT2,
|
||||
)
|
||||
}
|
||||
|
||||
func BuildThreeStepFunnelStepOverviewQuery(
|
||||
containsErrorT1 int,
|
||||
containsErrorT2 int,
|
||||
containsErrorT3 int,
|
||||
latencyPointerT1 string,
|
||||
latencyPointerT2 string,
|
||||
latencyPointerT3 string,
|
||||
startTs int64,
|
||||
endTs int64,
|
||||
serviceNameT1 string,
|
||||
spanNameT1 string,
|
||||
serviceNameT2 string,
|
||||
spanNameT2 string,
|
||||
serviceNameT3 string,
|
||||
spanNameT3 string,
|
||||
clauseStep1 string,
|
||||
clauseStep2 string,
|
||||
clauseStep3 string,
|
||||
stepStart int64,
|
||||
stepEnd int64,
|
||||
latencyTypeT2 string,
|
||||
latencyTypeT3 string,
|
||||
) string {
|
||||
const baseWithAndFunnel = `
|
||||
WITH
|
||||
toDateTime64(%[7]d/1e9, 9) AS start_ts,
|
||||
toDateTime64(%[8]d/1e9, 9) AS end_ts,
|
||||
(%[8]d - %[7]d) / 1e9 AS time_window_sec,
|
||||
|
||||
('%[9]s','%[10]s') AS step1,
|
||||
('%[11]s','%[12]s') AS step2,
|
||||
('%[13]s','%[14]s') AS step3,
|
||||
|
||||
%[1]d AS contains_error_t1,
|
||||
%[2]d AS contains_error_t2,
|
||||
%[3]d AS contains_error_t3,
|
||||
|
||||
funnel AS (
|
||||
SELECT
|
||||
trace_id,
|
||||
minIf(timestamp, serviceName = step1.1 AND name = step1.2) AS t1_time,
|
||||
minIf(timestamp, serviceName = step2.1 AND name = step2.2) AS t2_time,
|
||||
minIf(timestamp, serviceName = step3.1 AND name = step3.2) AS t3_time,
|
||||
toUInt8(anyIf(has_error, serviceName = step1.1 AND name = step1.2)) AS s1_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step2.1 AND name = step2.2)) AS s2_error,
|
||||
toUInt8(anyIf(has_error, serviceName = step3.1 AND name = step3.2)) AS s3_error
|
||||
FROM signoz_traces.distributed_signoz_index_v3
|
||||
WHERE
|
||||
timestamp BETWEEN start_ts AND end_ts
|
||||
AND (
|
||||
(serviceName = step1.1 AND name = step1.2 AND (contains_error_t1 = 0 OR has_error = true) %[15]s)
|
||||
OR (serviceName = step2.1 AND name = step2.2 AND (contains_error_t2 = 0 OR has_error = true) %[16]s)
|
||||
OR (serviceName = step3.1 AND name = step3.2 AND (contains_error_t3 = 0 OR has_error = true) %[17]s)
|
||||
)
|
||||
GROUP BY trace_id
|
||||
HAVING t1_time > 0
|
||||
)
|
||||
`
|
||||
|
||||
const totals12 = `
|
||||
SELECT
|
||||
round(if(total_s1_spans > 0, total_s2_spans * 100.0 / total_s1_spans, 0), 2) AS conversion_rate,
|
||||
total_s2_spans / time_window_sec AS avg_rate,
|
||||
greatest(sum_s1_error, sum_s2_error) AS errors,
|
||||
avg_duration_12 AS avg_duration,
|
||||
latency_12 AS latency
|
||||
FROM (
|
||||
SELECT
|
||||
count(DISTINCT CASE WHEN t2_time > t1_time THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT trace_id) AS total_s1_spans,
|
||||
count(DISTINCT CASE WHEN s1_error = 1 THEN trace_id END) AS sum_s1_error,
|
||||
count(DISTINCT CASE WHEN s2_error = 1 THEN trace_id END) AS sum_s2_error,
|
||||
avgIf((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6, t1_time > 0 AND t2_time > t1_time) AS avg_duration_12,
|
||||
quantileIf(%[18]s)((toUnixTimestamp64Nano(t2_time) - toUnixTimestamp64Nano(t1_time)) / 1e6, t1_time > 0 AND t2_time > t1_time) AS latency_12
|
||||
FROM funnel
|
||||
) AS totals;
|
||||
`
|
||||
|
||||
const totals23 = `
|
||||
SELECT
|
||||
round(if(total_s2_spans > 0, total_s3_spans * 100.0 / total_s2_spans, 0), 2) AS conversion_rate,
|
||||
total_s3_spans / time_window_sec AS avg_rate,
|
||||
greatest(sum_s2_error, sum_s3_error) AS errors,
|
||||
avg_duration_23 AS avg_duration,
|
||||
latency_23 AS latency
|
||||
FROM (
|
||||
SELECT
|
||||
count(DISTINCT CASE WHEN t2_time > 0 AND t3_time > t2_time THEN trace_id END) AS total_s3_spans,
|
||||
count(DISTINCT CASE WHEN t2_time > 0 THEN trace_id END) AS total_s2_spans,
|
||||
count(DISTINCT CASE WHEN s2_error = 1 THEN trace_id END) AS sum_s2_error,
|
||||
count(DISTINCT CASE WHEN s3_error = 1 THEN trace_id END) AS sum_s3_error,
|
||||
avgIf((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t2_time)) / 1e6, t2_time > 0 AND t3_time > t2_time) AS avg_duration_23,
|
||||
quantileIf(%[19]s)((toUnixTimestamp64Nano(t3_time) - toUnixTimestamp64Nano(t2_time)) / 1e6, t2_time > 0 AND t3_time > t2_time) AS latency_23
|
||||
FROM funnel
|
||||
) AS totals;
|
||||
`
|
||||
|
||||
const fallback = `
|
||||
SELECT 0 AS conversion_rate, 0 AS avg_rate, 0 AS errors, 0 AS avg_duration, 0 AS latency;
|
||||
`
|
||||
|
||||
var totalsTpl string
|
||||
switch {
|
||||
case stepStart == 1 && stepEnd == 2:
|
||||
totalsTpl = totals12
|
||||
case stepStart == 2 && stepEnd == 3:
|
||||
totalsTpl = totals23
|
||||
default:
|
||||
totalsTpl = fallback
|
||||
}
|
||||
|
||||
return fmt.Sprintf(
|
||||
baseWithAndFunnel+totalsTpl,
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
latencyPointerT3,
|
||||
startTs,
|
||||
endTs,
|
||||
serviceNameT1,
|
||||
spanNameT1,
|
||||
serviceNameT2,
|
||||
spanNameT2,
|
||||
serviceNameT3,
|
||||
spanNameT3,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
latencyTypeT2,
|
||||
latencyTypeT3,
|
||||
)
|
||||
}
|
||||
475
pkg/modules/tracefunnel/query.go
Normal file
475
pkg/modules/tracefunnel/query.go
Normal file
@@ -0,0 +1,475 @@
|
||||
package tracefunnel
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
tracev4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
|
||||
)
|
||||
|
||||
// sanitizeClause adds AND prefix to non-empty clauses if not already present
|
||||
func sanitizeClause(clause string) string {
|
||||
if clause == "" {
|
||||
return ""
|
||||
}
|
||||
// Check if clause already starts with AND
|
||||
if strings.HasPrefix(strings.TrimSpace(clause), "AND") {
|
||||
return clause
|
||||
}
|
||||
return "AND " + clause
|
||||
}
|
||||
|
||||
func ValidateTraces(funnel *tracefunneltypes.StorableFunnel, timeRange tracefunneltypes.TimeRange) (*v3.ClickHouseQuery, error) {
|
||||
var query string
|
||||
var err error
|
||||
|
||||
funnelSteps := funnel.Steps
|
||||
containsErrorT1 := 0
|
||||
containsErrorT2 := 0
|
||||
containsErrorT3 := 0
|
||||
|
||||
if funnelSteps[0].HasErrors {
|
||||
containsErrorT1 = 1
|
||||
}
|
||||
if funnelSteps[1].HasErrors {
|
||||
containsErrorT2 = 1
|
||||
}
|
||||
if len(funnel.Steps) > 2 && funnelSteps[2].HasErrors {
|
||||
containsErrorT3 = 1
|
||||
}
|
||||
|
||||
// Build filter clauses for each step
|
||||
clauseStep1, err := tracev4.BuildTracesFilter(funnelSteps[0].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep2, err := tracev4.BuildTracesFilter(funnelSteps[1].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep3 := ""
|
||||
if len(funnel.Steps) > 2 {
|
||||
clauseStep3, err = tracev4.BuildTracesFilter(funnelSteps[2].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize clauses
|
||||
clauseStep1 = sanitizeClause(clauseStep1)
|
||||
clauseStep2 = sanitizeClause(clauseStep2)
|
||||
clauseStep3 = sanitizeClause(clauseStep3)
|
||||
|
||||
if len(funnel.Steps) > 2 {
|
||||
query = BuildThreeStepFunnelValidationQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
funnelSteps[2].ServiceName,
|
||||
funnelSteps[2].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
)
|
||||
} else {
|
||||
query = BuildTwoStepFunnelValidationQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
|
||||
return &v3.ClickHouseQuery{
|
||||
Query: query,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetFunnelAnalytics(funnel *tracefunneltypes.StorableFunnel, timeRange tracefunneltypes.TimeRange) (*v3.ClickHouseQuery, error) {
|
||||
var query string
|
||||
var err error
|
||||
|
||||
funnelSteps := funnel.Steps
|
||||
containsErrorT1 := 0
|
||||
containsErrorT2 := 0
|
||||
containsErrorT3 := 0
|
||||
latencyPointerT1 := funnelSteps[0].LatencyPointer
|
||||
latencyPointerT2 := funnelSteps[1].LatencyPointer
|
||||
latencyPointerT3 := "start"
|
||||
if len(funnel.Steps) > 2 {
|
||||
latencyPointerT3 = funnelSteps[2].LatencyPointer
|
||||
}
|
||||
|
||||
if funnelSteps[0].HasErrors {
|
||||
containsErrorT1 = 1
|
||||
}
|
||||
if funnelSteps[1].HasErrors {
|
||||
containsErrorT2 = 1
|
||||
}
|
||||
if len(funnel.Steps) > 2 && funnelSteps[2].HasErrors {
|
||||
containsErrorT3 = 1
|
||||
}
|
||||
|
||||
// Build filter clauses for each step
|
||||
clauseStep1, err := tracev4.BuildTracesFilter(funnelSteps[0].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep2, err := tracev4.BuildTracesFilter(funnelSteps[1].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep3 := ""
|
||||
if len(funnel.Steps) > 2 {
|
||||
clauseStep3, err = tracev4.BuildTracesFilter(funnelSteps[2].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize clauses
|
||||
clauseStep1 = sanitizeClause(clauseStep1)
|
||||
clauseStep2 = sanitizeClause(clauseStep2)
|
||||
clauseStep3 = sanitizeClause(clauseStep3)
|
||||
|
||||
if len(funnel.Steps) > 2 {
|
||||
query = BuildThreeStepFunnelOverviewQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
latencyPointerT3,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
funnelSteps[2].ServiceName,
|
||||
funnelSteps[2].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
)
|
||||
} else {
|
||||
query = BuildTwoStepFunnelOverviewQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
return &v3.ClickHouseQuery{Query: query}, nil
|
||||
}
|
||||
|
||||
func GetFunnelStepAnalytics(funnel *tracefunneltypes.StorableFunnel, timeRange tracefunneltypes.TimeRange, stepStart, stepEnd int64) (*v3.ClickHouseQuery, error) {
|
||||
var query string
|
||||
var err error
|
||||
|
||||
funnelSteps := funnel.Steps
|
||||
containsErrorT1 := 0
|
||||
containsErrorT2 := 0
|
||||
containsErrorT3 := 0
|
||||
latencyPointerT1 := funnelSteps[0].LatencyPointer
|
||||
latencyPointerT2 := funnelSteps[1].LatencyPointer
|
||||
latencyPointerT3 := "start"
|
||||
if len(funnel.Steps) > 2 {
|
||||
latencyPointerT3 = funnelSteps[2].LatencyPointer
|
||||
}
|
||||
latencyTypeT2 := "0.99"
|
||||
latencyTypeT3 := "0.99"
|
||||
|
||||
if stepStart == stepEnd {
|
||||
return nil, fmt.Errorf("step start and end cannot be the same for /step/overview")
|
||||
}
|
||||
|
||||
if funnelSteps[0].HasErrors {
|
||||
containsErrorT1 = 1
|
||||
}
|
||||
if funnelSteps[1].HasErrors {
|
||||
containsErrorT2 = 1
|
||||
}
|
||||
if len(funnel.Steps) > 2 && funnelSteps[2].HasErrors {
|
||||
containsErrorT3 = 1
|
||||
}
|
||||
|
||||
if funnelSteps[1].LatencyType != "" {
|
||||
latency := strings.ToLower(funnelSteps[1].LatencyType)
|
||||
if latency == "p90" {
|
||||
latencyTypeT2 = "0.90"
|
||||
} else if latency == "p95" {
|
||||
latencyTypeT2 = "0.95"
|
||||
} else {
|
||||
latencyTypeT2 = "0.99"
|
||||
}
|
||||
}
|
||||
if len(funnel.Steps) > 2 && funnelSteps[2].LatencyType != "" {
|
||||
latency := strings.ToLower(funnelSteps[2].LatencyType)
|
||||
if latency == "p90" {
|
||||
latencyTypeT3 = "0.90"
|
||||
} else if latency == "p95" {
|
||||
latencyTypeT3 = "0.95"
|
||||
} else {
|
||||
latencyTypeT3 = "0.99"
|
||||
}
|
||||
}
|
||||
|
||||
// Build filter clauses for each step
|
||||
clauseStep1, err := tracev4.BuildTracesFilter(funnelSteps[0].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep2, err := tracev4.BuildTracesFilter(funnelSteps[1].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep3 := ""
|
||||
if len(funnel.Steps) > 2 {
|
||||
clauseStep3, err = tracev4.BuildTracesFilter(funnelSteps[2].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize clauses
|
||||
clauseStep1 = sanitizeClause(clauseStep1)
|
||||
clauseStep2 = sanitizeClause(clauseStep2)
|
||||
clauseStep3 = sanitizeClause(clauseStep3)
|
||||
|
||||
if len(funnel.Steps) > 2 {
|
||||
query = BuildThreeStepFunnelStepOverviewQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
latencyPointerT3,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
funnelSteps[2].ServiceName,
|
||||
funnelSteps[2].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
stepStart,
|
||||
stepEnd,
|
||||
latencyTypeT2,
|
||||
latencyTypeT3,
|
||||
)
|
||||
} else {
|
||||
query = BuildTwoStepFunnelStepOverviewQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
latencyPointerT1,
|
||||
latencyPointerT2,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
latencyTypeT2,
|
||||
)
|
||||
}
|
||||
return &v3.ClickHouseQuery{Query: query}, nil
|
||||
}
|
||||
|
||||
func GetStepAnalytics(funnel *tracefunneltypes.StorableFunnel, timeRange tracefunneltypes.TimeRange) (*v3.ClickHouseQuery, error) {
|
||||
var query string
|
||||
|
||||
funnelSteps := funnel.Steps
|
||||
containsErrorT1 := 0
|
||||
containsErrorT2 := 0
|
||||
containsErrorT3 := 0
|
||||
|
||||
if funnelSteps[0].HasErrors {
|
||||
containsErrorT1 = 1
|
||||
}
|
||||
if funnelSteps[1].HasErrors {
|
||||
containsErrorT2 = 1
|
||||
}
|
||||
if len(funnel.Steps) > 2 && funnelSteps[2].HasErrors {
|
||||
containsErrorT3 = 1
|
||||
}
|
||||
|
||||
// Build filter clauses for each step
|
||||
clauseStep1, err := tracev4.BuildTracesFilter(funnelSteps[0].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep2, err := tracev4.BuildTracesFilter(funnelSteps[1].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep3 := ""
|
||||
if len(funnel.Steps) > 2 {
|
||||
clauseStep3, err = tracev4.BuildTracesFilter(funnelSteps[2].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize clauses
|
||||
clauseStep1 = sanitizeClause(clauseStep1)
|
||||
clauseStep2 = sanitizeClause(clauseStep2)
|
||||
clauseStep3 = sanitizeClause(clauseStep3)
|
||||
|
||||
if len(funnel.Steps) > 2 {
|
||||
query = BuildThreeStepFunnelCountQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
containsErrorT3,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
funnelSteps[2].ServiceName,
|
||||
funnelSteps[2].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
clauseStep3,
|
||||
)
|
||||
} else {
|
||||
query = BuildTwoStepFunnelCountQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[0].ServiceName,
|
||||
funnelSteps[0].SpanName,
|
||||
funnelSteps[1].ServiceName,
|
||||
funnelSteps[1].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
}
|
||||
|
||||
return &v3.ClickHouseQuery{
|
||||
Query: query,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetSlowestTraces(funnel *tracefunneltypes.StorableFunnel, timeRange tracefunneltypes.TimeRange, stepStart, stepEnd int64) (*v3.ClickHouseQuery, error) {
|
||||
funnelSteps := funnel.Steps
|
||||
containsErrorT1 := 0
|
||||
containsErrorT2 := 0
|
||||
stepStartOrder := 0
|
||||
stepEndOrder := 1
|
||||
|
||||
if stepStart != stepEnd {
|
||||
stepStartOrder = int(stepStart) - 1
|
||||
stepEndOrder = int(stepEnd) - 1
|
||||
if funnelSteps[stepStartOrder].HasErrors {
|
||||
containsErrorT1 = 1
|
||||
}
|
||||
if funnelSteps[stepEndOrder].HasErrors {
|
||||
containsErrorT2 = 1
|
||||
}
|
||||
}
|
||||
|
||||
// Build filter clauses for the steps
|
||||
clauseStep1, err := tracev4.BuildTracesFilter(funnelSteps[stepStartOrder].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep2, err := tracev4.BuildTracesFilter(funnelSteps[stepEndOrder].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Sanitize clauses
|
||||
clauseStep1 = sanitizeClause(clauseStep1)
|
||||
clauseStep2 = sanitizeClause(clauseStep2)
|
||||
|
||||
query := BuildTwoStepFunnelTopSlowTracesQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[stepStartOrder].ServiceName,
|
||||
funnelSteps[stepStartOrder].SpanName,
|
||||
funnelSteps[stepEndOrder].ServiceName,
|
||||
funnelSteps[stepEndOrder].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
return &v3.ClickHouseQuery{Query: query}, nil
|
||||
}
|
||||
|
||||
func GetErroredTraces(funnel *tracefunneltypes.StorableFunnel, timeRange tracefunneltypes.TimeRange, stepStart, stepEnd int64) (*v3.ClickHouseQuery, error) {
|
||||
funnelSteps := funnel.Steps
|
||||
containsErrorT1 := 0
|
||||
containsErrorT2 := 0
|
||||
stepStartOrder := 0
|
||||
stepEndOrder := 1
|
||||
|
||||
if stepStart != stepEnd {
|
||||
stepStartOrder = int(stepStart) - 1
|
||||
stepEndOrder = int(stepEnd) - 1
|
||||
if funnelSteps[stepStartOrder].HasErrors {
|
||||
containsErrorT1 = 1
|
||||
}
|
||||
if funnelSteps[stepEndOrder].HasErrors {
|
||||
containsErrorT2 = 1
|
||||
}
|
||||
}
|
||||
|
||||
// Build filter clauses for the steps
|
||||
clauseStep1, err := tracev4.BuildTracesFilter(funnelSteps[stepStartOrder].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseStep2, err := tracev4.BuildTracesFilter(funnelSteps[stepEndOrder].Filters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Sanitize clauses
|
||||
clauseStep1 = sanitizeClause(clauseStep1)
|
||||
clauseStep2 = sanitizeClause(clauseStep2)
|
||||
|
||||
query := BuildTwoStepFunnelTopSlowErrorTracesQuery(
|
||||
containsErrorT1,
|
||||
containsErrorT2,
|
||||
timeRange.StartTime,
|
||||
timeRange.EndTime,
|
||||
funnelSteps[stepStartOrder].ServiceName,
|
||||
funnelSteps[stepStartOrder].SpanName,
|
||||
funnelSteps[stepEndOrder].ServiceName,
|
||||
funnelSteps[stepEndOrder].SpanName,
|
||||
clauseStep1,
|
||||
clauseStep2,
|
||||
)
|
||||
return &v3.ClickHouseQuery{Query: query}, nil
|
||||
}
|
||||
31
pkg/modules/user/impluser/getter.go
Normal file
31
pkg/modules/user/impluser/getter.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type getter struct {
|
||||
store types.UserStore
|
||||
}
|
||||
|
||||
func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
gettableUsers, err := module.store.ListUsers(ctx, orgID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users := make([]*types.User, len(gettableUsers))
|
||||
for i, user := range gettableUsers {
|
||||
users[i] = &user.User
|
||||
}
|
||||
|
||||
return users, nil
|
||||
}
|
||||
@@ -326,7 +326,7 @@ func (h *handler) UpdateUser(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
user.UpdatedAt = time.Now()
|
||||
|
||||
updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user)
|
||||
updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user, claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
@@ -347,7 +347,7 @@ func (h *handler) DeleteUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.module.DeleteUser(ctx, claims.OrgID, id); err != nil {
|
||||
if err := h.module.DeleteUser(ctx, claims.OrgID, id, claims.UserID); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/emailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -135,35 +134,9 @@ func (m *Module) CreateUserWithPassword(ctx context.Context, user *types.User, p
|
||||
return nil, err
|
||||
}
|
||||
|
||||
m.analytics.Send(ctx,
|
||||
analyticstypes.Identify{
|
||||
UserId: user.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraits().
|
||||
SetName(user.DisplayName).
|
||||
SetEmail(user.Email).
|
||||
Set("role", user.Role).
|
||||
SetCreatedAt(user.CreatedAt),
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: user.ID.String(),
|
||||
GroupId: user.OrgID,
|
||||
},
|
||||
analyticstypes.Track{
|
||||
UserId: user.ID.String(),
|
||||
Event: "User Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(map[string]any{
|
||||
"role": user.Role,
|
||||
"email": user.Email,
|
||||
"name": user.DisplayName,
|
||||
}),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: user.OrgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
traitsOrProperties := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traitsOrProperties)
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Created", traitsOrProperties)
|
||||
|
||||
return user, nil
|
||||
}
|
||||
@@ -173,35 +146,9 @@ func (m *Module) CreateUser(ctx context.Context, user *types.User) error {
|
||||
return err
|
||||
}
|
||||
|
||||
m.analytics.Send(ctx,
|
||||
analyticstypes.Identify{
|
||||
UserId: user.ID.String(),
|
||||
Traits: analyticstypes.
|
||||
NewTraits().
|
||||
SetName(user.DisplayName).
|
||||
SetEmail(user.Email).
|
||||
Set("role", user.Role).
|
||||
SetCreatedAt(user.CreatedAt),
|
||||
},
|
||||
analyticstypes.Group{
|
||||
UserId: user.ID.String(),
|
||||
GroupId: user.OrgID,
|
||||
},
|
||||
analyticstypes.Track{
|
||||
UserId: user.ID.String(),
|
||||
Event: "User Created",
|
||||
Properties: analyticstypes.NewPropertiesFromMap(map[string]any{
|
||||
"role": user.Role,
|
||||
"email": user.Email,
|
||||
"name": user.DisplayName,
|
||||
}),
|
||||
Context: &analyticstypes.Context{
|
||||
Extra: map[string]interface{}{
|
||||
analyticstypes.KeyGroupID: user.OrgID,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
traitsOrProperties := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traitsOrProperties)
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Created", traitsOrProperties)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -226,11 +173,22 @@ func (m *Module) ListUsers(ctx context.Context, orgID string) ([]*types.Gettable
|
||||
return m.store.ListUsers(ctx, orgID)
|
||||
}
|
||||
|
||||
func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error) {
|
||||
return m.store.UpdateUser(ctx, orgID, id, user)
|
||||
func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error) {
|
||||
user, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(user)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traits)
|
||||
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Updated", traits)
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (m *Module) DeleteUser(ctx context.Context, orgID string, id string) error {
|
||||
func (m *Module) DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error {
|
||||
user, err := m.store.GetUserByID(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -250,7 +208,15 @@ func (m *Module) DeleteUser(ctx context.Context, orgID string, id string) error
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
}
|
||||
|
||||
return m.store.DeleteUser(ctx, orgID, user.ID.StringValue())
|
||||
if err := m.store.DeleteUser(ctx, orgID, user.ID.StringValue()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Deleted", map[string]any{
|
||||
"deleted_by": deletedBy,
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Module) CreateResetPasswordToken(ctx context.Context, userID string) (*types.ResetPasswordRequest, error) {
|
||||
@@ -644,10 +610,16 @@ func (m *Module) Register(ctx context.Context, req *types.PostableRegisterOrgAnd
|
||||
}
|
||||
|
||||
func (m *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
stats := make(map[string]any)
|
||||
count, err := m.store.CountByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if err == nil {
|
||||
stats["user.count"] = count
|
||||
}
|
||||
|
||||
return map[string]any{"user.count": count}, nil
|
||||
count, err = m.store.CountAPIKeyByOrgID(ctx, orgID)
|
||||
if err == nil {
|
||||
stats["factor.api_key.count"] = count
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
@@ -826,3 +826,21 @@ func (store *store) CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64,
|
||||
|
||||
return int64(count), nil
|
||||
}
|
||||
|
||||
func (store *store) CountAPIKeyByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) {
|
||||
apiKey := new(types.StorableAPIKey)
|
||||
|
||||
count, err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(apiKey).
|
||||
Join("JOIN users ON users.id = storable_api_key.user_id").
|
||||
Where("org_id = ?", orgID).
|
||||
Count(ctx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return int64(count), nil
|
||||
}
|
||||
|
||||
@@ -28,8 +28,8 @@ type Module interface {
|
||||
GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error)
|
||||
GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error)
|
||||
ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error)
|
||||
UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error)
|
||||
DeleteUser(ctx context.Context, orgID string, id string) error
|
||||
UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error
|
||||
|
||||
// login
|
||||
GetAuthenticatedUser(ctx context.Context, orgID, email, password, refreshToken string) (*types.User, error)
|
||||
@@ -70,6 +70,11 @@ type Module interface {
|
||||
statsreporter.StatsCollector
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
// invite
|
||||
CreateInvite(http.ResponseWriter, *http.Request)
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -26,14 +26,11 @@ HAS=25
|
||||
HASANY=26
|
||||
HASALL=27
|
||||
BOOL=28
|
||||
DOLLAR_VAR=29
|
||||
CURLY_VAR=30
|
||||
SQUARE_VAR=31
|
||||
NUMBER=32
|
||||
QUOTED_TEXT=33
|
||||
KEY=34
|
||||
WS=35
|
||||
FREETEXT=36
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -26,14 +26,11 @@ HAS=25
|
||||
HASANY=26
|
||||
HASALL=27
|
||||
BOOL=28
|
||||
DOLLAR_VAR=29
|
||||
CURLY_VAR=30
|
||||
SQUARE_VAR=31
|
||||
NUMBER=32
|
||||
QUOTED_TEXT=33
|
||||
KEY=34
|
||||
WS=35
|
||||
FREETEXT=36
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
@@ -117,12 +117,6 @@ func (s *BaseFilterQueryListener) EnterValue(ctx *ValueContext) {}
|
||||
// ExitValue is called when production value is exited.
|
||||
func (s *BaseFilterQueryListener) ExitValue(ctx *ValueContext) {}
|
||||
|
||||
// EnterVariable is called when production variable is entered.
|
||||
func (s *BaseFilterQueryListener) EnterVariable(ctx *VariableContext) {}
|
||||
|
||||
// ExitVariable is called when production variable is exited.
|
||||
func (s *BaseFilterQueryListener) ExitVariable(ctx *VariableContext) {}
|
||||
|
||||
// EnterKey is called when production key is entered.
|
||||
func (s *BaseFilterQueryListener) EnterKey(ctx *KeyContext) {}
|
||||
|
||||
|
||||
@@ -72,10 +72,6 @@ func (v *BaseFilterQueryVisitor) VisitValue(ctx *ValueContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitVariable(ctx *VariableContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BaseFilterQueryVisitor) VisitKey(ctx *KeyContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
@@ -50,213 +50,178 @@ func filterquerylexerLexerInit() {
|
||||
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "DOLLAR_VAR", "CURLY_VAR", "SQUARE_VAR",
|
||||
"NUMBER", "QUOTED_TEXT", "KEY", "WS", "FREETEXT",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
|
||||
"FREETEXT",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
|
||||
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "DOLLAR_VAR", "CURLY_VAR", "SQUARE_VAR",
|
||||
"SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS",
|
||||
"KEY", "WS", "DIGIT", "FREETEXT",
|
||||
"HAS", "HASANY", "HASALL", "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT",
|
||||
"SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS", "KEY", "WS", "DIGIT",
|
||||
"FREETEXT",
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 0, 36, 404, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 0, 33, 334, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
|
||||
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
|
||||
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
|
||||
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
|
||||
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
|
||||
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
|
||||
7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 1, 0, 1,
|
||||
0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3,
|
||||
5, 97, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9,
|
||||
1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1,
|
||||
12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 124, 8, 13, 11, 13, 12, 13, 125,
|
||||
1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1,
|
||||
14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 143, 8, 15, 11, 15, 12, 15, 144,
|
||||
1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1,
|
||||
16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17,
|
||||
167, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1,
|
||||
19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 184, 8, 19, 1, 20,
|
||||
1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1,
|
||||
23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25,
|
||||
1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1,
|
||||
27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 227,
|
||||
8, 27, 1, 28, 1, 28, 1, 28, 5, 28, 232, 8, 28, 10, 28, 12, 28, 235, 9,
|
||||
28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 241, 8, 29, 10, 29, 12, 29, 244,
|
||||
9, 29, 1, 29, 3, 29, 247, 8, 29, 1, 29, 1, 29, 5, 29, 251, 8, 29, 10, 29,
|
||||
12, 29, 254, 9, 29, 1, 29, 5, 29, 257, 8, 29, 10, 29, 12, 29, 260, 9, 29,
|
||||
1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 269, 8, 30, 10,
|
||||
30, 12, 30, 272, 9, 30, 1, 30, 3, 30, 275, 8, 30, 1, 30, 1, 30, 5, 30,
|
||||
279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 5, 30, 285, 8, 30, 10, 30,
|
||||
12, 30, 288, 9, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 3, 32, 296,
|
||||
8, 32, 1, 32, 4, 32, 299, 8, 32, 11, 32, 12, 32, 300, 1, 32, 1, 32, 5,
|
||||
32, 305, 8, 32, 10, 32, 12, 32, 308, 9, 32, 3, 32, 310, 8, 32, 1, 32, 1,
|
||||
32, 3, 32, 314, 8, 32, 1, 32, 4, 32, 317, 8, 32, 11, 32, 12, 32, 318, 3,
|
||||
32, 321, 8, 32, 1, 32, 3, 32, 324, 8, 32, 1, 32, 1, 32, 4, 32, 328, 8,
|
||||
32, 11, 32, 12, 32, 329, 1, 32, 1, 32, 3, 32, 334, 8, 32, 1, 32, 4, 32,
|
||||
337, 8, 32, 11, 32, 12, 32, 338, 3, 32, 341, 8, 32, 3, 32, 343, 8, 32,
|
||||
1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 349, 8, 33, 10, 33, 12, 33, 352, 9,
|
||||
33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 359, 8, 33, 10, 33, 12, 33,
|
||||
362, 9, 33, 1, 33, 3, 33, 365, 8, 33, 1, 34, 1, 34, 5, 34, 369, 8, 34,
|
||||
10, 34, 12, 34, 372, 9, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1,
|
||||
36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 386, 8, 37, 10, 37, 12, 37,
|
||||
389, 9, 37, 1, 38, 4, 38, 392, 8, 38, 11, 38, 12, 38, 393, 1, 38, 1, 38,
|
||||
1, 39, 1, 39, 1, 40, 4, 40, 401, 8, 40, 11, 40, 12, 40, 402, 0, 0, 41,
|
||||
1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11,
|
||||
23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20,
|
||||
41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29,
|
||||
59, 30, 61, 31, 63, 0, 65, 32, 67, 33, 69, 0, 71, 0, 73, 0, 75, 34, 77,
|
||||
35, 79, 0, 81, 36, 1, 0, 32, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105,
|
||||
105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110,
|
||||
110, 2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32,
|
||||
2, 0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2,
|
||||
0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2,
|
||||
0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0,
|
||||
68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0,
|
||||
85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 3, 0, 65, 90, 95, 95, 97, 122,
|
||||
5, 0, 46, 46, 48, 57, 65, 90, 95, 95, 97, 122, 2, 0, 43, 43, 45, 45, 2,
|
||||
0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 2, 0, 65, 90, 97, 122, 5, 0, 45,
|
||||
45, 48, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0,
|
||||
48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93,
|
||||
93, 437, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1,
|
||||
0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15,
|
||||
1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0,
|
||||
23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0,
|
||||
0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0,
|
||||
0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0,
|
||||
0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1,
|
||||
0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61,
|
||||
1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0,
|
||||
77, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 3, 85, 1, 0, 0, 0,
|
||||
5, 87, 1, 0, 0, 0, 7, 89, 1, 0, 0, 0, 9, 91, 1, 0, 0, 0, 11, 96, 1, 0,
|
||||
0, 0, 13, 98, 1, 0, 0, 0, 15, 101, 1, 0, 0, 0, 17, 104, 1, 0, 0, 0, 19,
|
||||
106, 1, 0, 0, 0, 21, 109, 1, 0, 0, 0, 23, 111, 1, 0, 0, 0, 25, 114, 1,
|
||||
0, 0, 0, 27, 119, 1, 0, 0, 0, 29, 132, 1, 0, 0, 0, 31, 138, 1, 0, 0, 0,
|
||||
33, 152, 1, 0, 0, 0, 35, 160, 1, 0, 0, 0, 37, 168, 1, 0, 0, 0, 39, 175,
|
||||
1, 0, 0, 0, 41, 185, 1, 0, 0, 0, 43, 188, 1, 0, 0, 0, 45, 192, 1, 0, 0,
|
||||
0, 47, 196, 1, 0, 0, 0, 49, 199, 1, 0, 0, 0, 51, 203, 1, 0, 0, 0, 53, 210,
|
||||
1, 0, 0, 0, 55, 226, 1, 0, 0, 0, 57, 228, 1, 0, 0, 0, 59, 236, 1, 0, 0,
|
||||
0, 61, 264, 1, 0, 0, 0, 63, 292, 1, 0, 0, 0, 65, 342, 1, 0, 0, 0, 67, 364,
|
||||
1, 0, 0, 0, 69, 366, 1, 0, 0, 0, 71, 373, 1, 0, 0, 0, 73, 376, 1, 0, 0,
|
||||
0, 75, 380, 1, 0, 0, 0, 77, 391, 1, 0, 0, 0, 79, 397, 1, 0, 0, 0, 81, 400,
|
||||
1, 0, 0, 0, 83, 84, 5, 40, 0, 0, 84, 2, 1, 0, 0, 0, 85, 86, 5, 41, 0, 0,
|
||||
86, 4, 1, 0, 0, 0, 87, 88, 5, 91, 0, 0, 88, 6, 1, 0, 0, 0, 89, 90, 5, 93,
|
||||
0, 0, 90, 8, 1, 0, 0, 0, 91, 92, 5, 44, 0, 0, 92, 10, 1, 0, 0, 0, 93, 97,
|
||||
5, 61, 0, 0, 94, 95, 5, 61, 0, 0, 95, 97, 5, 61, 0, 0, 96, 93, 1, 0, 0,
|
||||
0, 96, 94, 1, 0, 0, 0, 97, 12, 1, 0, 0, 0, 98, 99, 5, 33, 0, 0, 99, 100,
|
||||
5, 61, 0, 0, 100, 14, 1, 0, 0, 0, 101, 102, 5, 60, 0, 0, 102, 103, 5, 62,
|
||||
0, 0, 103, 16, 1, 0, 0, 0, 104, 105, 5, 60, 0, 0, 105, 18, 1, 0, 0, 0,
|
||||
106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0, 0, 108, 20, 1, 0, 0, 0, 109,
|
||||
110, 5, 62, 0, 0, 110, 22, 1, 0, 0, 0, 111, 112, 5, 62, 0, 0, 112, 113,
|
||||
5, 61, 0, 0, 113, 24, 1, 0, 0, 0, 114, 115, 7, 0, 0, 0, 115, 116, 7, 1,
|
||||
0, 0, 116, 117, 7, 2, 0, 0, 117, 118, 7, 3, 0, 0, 118, 26, 1, 0, 0, 0,
|
||||
119, 120, 7, 4, 0, 0, 120, 121, 7, 5, 0, 0, 121, 123, 7, 6, 0, 0, 122,
|
||||
124, 7, 7, 0, 0, 123, 122, 1, 0, 0, 0, 124, 125, 1, 0, 0, 0, 125, 123,
|
||||
1, 0, 0, 0, 125, 126, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 128, 7, 0,
|
||||
0, 0, 128, 129, 7, 1, 0, 0, 129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0,
|
||||
131, 28, 1, 0, 0, 0, 132, 133, 7, 1, 0, 0, 133, 134, 7, 0, 0, 0, 134, 135,
|
||||
7, 1, 0, 0, 135, 136, 7, 2, 0, 0, 136, 137, 7, 3, 0, 0, 137, 30, 1, 0,
|
||||
0, 0, 138, 139, 7, 4, 0, 0, 139, 140, 7, 5, 0, 0, 140, 142, 7, 6, 0, 0,
|
||||
141, 143, 7, 7, 0, 0, 142, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144,
|
||||
142, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 147,
|
||||
7, 1, 0, 0, 147, 148, 7, 0, 0, 0, 148, 149, 7, 1, 0, 0, 149, 150, 7, 2,
|
||||
0, 0, 150, 151, 7, 3, 0, 0, 151, 32, 1, 0, 0, 0, 152, 153, 7, 8, 0, 0,
|
||||
153, 154, 7, 3, 0, 0, 154, 155, 7, 6, 0, 0, 155, 156, 7, 9, 0, 0, 156,
|
||||
157, 7, 3, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 4, 0, 0, 159, 34, 1,
|
||||
0, 0, 0, 160, 161, 7, 3, 0, 0, 161, 162, 7, 10, 0, 0, 162, 163, 7, 1, 0,
|
||||
0, 163, 164, 7, 11, 0, 0, 164, 166, 7, 6, 0, 0, 165, 167, 7, 11, 0, 0,
|
||||
166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 36, 1, 0, 0, 0, 168, 169,
|
||||
7, 12, 0, 0, 169, 170, 7, 3, 0, 0, 170, 171, 7, 13, 0, 0, 171, 172, 7,
|
||||
3, 0, 0, 172, 173, 7, 10, 0, 0, 173, 174, 7, 14, 0, 0, 174, 38, 1, 0, 0,
|
||||
0, 175, 176, 7, 15, 0, 0, 176, 177, 7, 5, 0, 0, 177, 178, 7, 4, 0, 0, 178,
|
||||
179, 7, 6, 0, 0, 179, 180, 7, 16, 0, 0, 180, 181, 7, 1, 0, 0, 181, 183,
|
||||
7, 4, 0, 0, 182, 184, 7, 11, 0, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0,
|
||||
0, 0, 184, 40, 1, 0, 0, 0, 185, 186, 7, 1, 0, 0, 186, 187, 7, 4, 0, 0,
|
||||
187, 42, 1, 0, 0, 0, 188, 189, 7, 4, 0, 0, 189, 190, 7, 5, 0, 0, 190, 191,
|
||||
7, 6, 0, 0, 191, 44, 1, 0, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 4,
|
||||
0, 0, 194, 195, 7, 17, 0, 0, 195, 46, 1, 0, 0, 0, 196, 197, 7, 5, 0, 0,
|
||||
197, 198, 7, 12, 0, 0, 198, 48, 1, 0, 0, 0, 199, 200, 7, 18, 0, 0, 200,
|
||||
201, 7, 16, 0, 0, 201, 202, 7, 11, 0, 0, 202, 50, 1, 0, 0, 0, 203, 204,
|
||||
7, 18, 0, 0, 204, 205, 7, 16, 0, 0, 205, 206, 7, 11, 0, 0, 206, 207, 7,
|
||||
16, 0, 0, 207, 208, 7, 4, 0, 0, 208, 209, 7, 19, 0, 0, 209, 52, 1, 0, 0,
|
||||
0, 210, 211, 7, 18, 0, 0, 211, 212, 7, 16, 0, 0, 212, 213, 7, 11, 0, 0,
|
||||
213, 214, 7, 16, 0, 0, 214, 215, 7, 0, 0, 0, 215, 216, 7, 0, 0, 0, 216,
|
||||
54, 1, 0, 0, 0, 217, 218, 7, 6, 0, 0, 218, 219, 7, 12, 0, 0, 219, 220,
|
||||
7, 20, 0, 0, 220, 227, 7, 3, 0, 0, 221, 222, 7, 21, 0, 0, 222, 223, 7,
|
||||
16, 0, 0, 223, 224, 7, 0, 0, 0, 224, 225, 7, 11, 0, 0, 225, 227, 7, 3,
|
||||
0, 0, 226, 217, 1, 0, 0, 0, 226, 221, 1, 0, 0, 0, 227, 56, 1, 0, 0, 0,
|
||||
228, 229, 5, 36, 0, 0, 229, 233, 7, 22, 0, 0, 230, 232, 7, 23, 0, 0, 231,
|
||||
230, 1, 0, 0, 0, 232, 235, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233, 234,
|
||||
1, 0, 0, 0, 234, 58, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, 236, 237, 5, 123,
|
||||
0, 0, 237, 238, 5, 123, 0, 0, 238, 242, 1, 0, 0, 0, 239, 241, 7, 7, 0,
|
||||
0, 240, 239, 1, 0, 0, 0, 241, 244, 1, 0, 0, 0, 242, 240, 1, 0, 0, 0, 242,
|
||||
243, 1, 0, 0, 0, 243, 246, 1, 0, 0, 0, 244, 242, 1, 0, 0, 0, 245, 247,
|
||||
5, 46, 0, 0, 246, 245, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 1, 0,
|
||||
0, 0, 248, 252, 7, 22, 0, 0, 249, 251, 7, 23, 0, 0, 250, 249, 1, 0, 0,
|
||||
0, 251, 254, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, 253,
|
||||
258, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 257, 7, 7, 0, 0, 256, 255,
|
||||
1, 0, 0, 0, 257, 260, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0,
|
||||
0, 0, 259, 261, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 261, 262, 5, 125, 0,
|
||||
0, 262, 263, 5, 125, 0, 0, 263, 60, 1, 0, 0, 0, 264, 265, 5, 91, 0, 0,
|
||||
265, 266, 5, 91, 0, 0, 266, 270, 1, 0, 0, 0, 267, 269, 7, 7, 0, 0, 268,
|
||||
267, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271,
|
||||
1, 0, 0, 0, 271, 274, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 46,
|
||||
0, 0, 274, 273, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0,
|
||||
276, 280, 7, 22, 0, 0, 277, 279, 7, 23, 0, 0, 278, 277, 1, 0, 0, 0, 279,
|
||||
282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 286,
|
||||
1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 285, 7, 7, 0, 0, 284, 283, 1, 0,
|
||||
0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0,
|
||||
287, 289, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 93, 0, 0, 290,
|
||||
291, 5, 93, 0, 0, 291, 62, 1, 0, 0, 0, 292, 293, 7, 24, 0, 0, 293, 64,
|
||||
1, 0, 0, 0, 294, 296, 3, 63, 31, 0, 295, 294, 1, 0, 0, 0, 295, 296, 1,
|
||||
0, 0, 0, 296, 298, 1, 0, 0, 0, 297, 299, 3, 79, 39, 0, 298, 297, 1, 0,
|
||||
0, 0, 299, 300, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0,
|
||||
301, 309, 1, 0, 0, 0, 302, 306, 5, 46, 0, 0, 303, 305, 3, 79, 39, 0, 304,
|
||||
303, 1, 0, 0, 0, 305, 308, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 306, 307,
|
||||
1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 309, 302, 1, 0,
|
||||
0, 0, 309, 310, 1, 0, 0, 0, 310, 320, 1, 0, 0, 0, 311, 313, 7, 3, 0, 0,
|
||||
312, 314, 3, 63, 31, 0, 313, 312, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314,
|
||||
316, 1, 0, 0, 0, 315, 317, 3, 79, 39, 0, 316, 315, 1, 0, 0, 0, 317, 318,
|
||||
1, 0, 0, 0, 318, 316, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 321, 1, 0,
|
||||
0, 0, 320, 311, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 343, 1, 0, 0, 0,
|
||||
322, 324, 3, 63, 31, 0, 323, 322, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324,
|
||||
325, 1, 0, 0, 0, 325, 327, 5, 46, 0, 0, 326, 328, 3, 79, 39, 0, 327, 326,
|
||||
1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 329, 330, 1, 0,
|
||||
0, 0, 330, 340, 1, 0, 0, 0, 331, 333, 7, 3, 0, 0, 332, 334, 3, 63, 31,
|
||||
0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 336, 1, 0, 0, 0, 335,
|
||||
337, 3, 79, 39, 0, 336, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 336,
|
||||
1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 341, 1, 0, 0, 0, 340, 331, 1, 0,
|
||||
0, 0, 340, 341, 1, 0, 0, 0, 341, 343, 1, 0, 0, 0, 342, 295, 1, 0, 0, 0,
|
||||
342, 323, 1, 0, 0, 0, 343, 66, 1, 0, 0, 0, 344, 350, 5, 34, 0, 0, 345,
|
||||
349, 8, 25, 0, 0, 346, 347, 5, 92, 0, 0, 347, 349, 9, 0, 0, 0, 348, 345,
|
||||
1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0,
|
||||
0, 0, 350, 351, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0,
|
||||
353, 365, 5, 34, 0, 0, 354, 360, 5, 39, 0, 0, 355, 359, 8, 26, 0, 0, 356,
|
||||
357, 5, 92, 0, 0, 357, 359, 9, 0, 0, 0, 358, 355, 1, 0, 0, 0, 358, 356,
|
||||
1, 0, 0, 0, 359, 362, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 360, 361, 1, 0,
|
||||
0, 0, 361, 363, 1, 0, 0, 0, 362, 360, 1, 0, 0, 0, 363, 365, 5, 39, 0, 0,
|
||||
364, 344, 1, 0, 0, 0, 364, 354, 1, 0, 0, 0, 365, 68, 1, 0, 0, 0, 366, 370,
|
||||
7, 27, 0, 0, 367, 369, 7, 28, 0, 0, 368, 367, 1, 0, 0, 0, 369, 372, 1,
|
||||
0, 0, 0, 370, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 70, 1, 0, 0,
|
||||
0, 372, 370, 1, 0, 0, 0, 373, 374, 5, 91, 0, 0, 374, 375, 5, 93, 0, 0,
|
||||
375, 72, 1, 0, 0, 0, 376, 377, 5, 91, 0, 0, 377, 378, 5, 42, 0, 0, 378,
|
||||
379, 5, 93, 0, 0, 379, 74, 1, 0, 0, 0, 380, 387, 3, 69, 34, 0, 381, 382,
|
||||
5, 46, 0, 0, 382, 386, 3, 69, 34, 0, 383, 386, 3, 71, 35, 0, 384, 386,
|
||||
3, 73, 36, 0, 385, 381, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 384, 1,
|
||||
0, 0, 0, 386, 389, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0,
|
||||
0, 388, 76, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 392, 7, 29, 0, 0, 391,
|
||||
390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 393, 394,
|
||||
1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 396, 6, 38, 0, 0, 396, 78, 1, 0,
|
||||
0, 0, 397, 398, 7, 30, 0, 0, 398, 80, 1, 0, 0, 0, 399, 401, 8, 31, 0, 0,
|
||||
400, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402,
|
||||
403, 1, 0, 0, 0, 403, 82, 1, 0, 0, 0, 39, 0, 96, 125, 144, 166, 183, 226,
|
||||
233, 242, 246, 252, 258, 270, 274, 280, 286, 295, 300, 306, 309, 313, 318,
|
||||
320, 323, 329, 333, 338, 340, 342, 348, 350, 358, 360, 364, 370, 385, 387,
|
||||
393, 402, 1, 6, 0, 0,
|
||||
7, 36, 2, 37, 7, 37, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
|
||||
4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 91, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7,
|
||||
1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11,
|
||||
1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 118,
|
||||
8, 13, 11, 13, 12, 13, 119, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1,
|
||||
14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 137,
|
||||
8, 15, 11, 15, 12, 15, 138, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1,
|
||||
16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17,
|
||||
1, 17, 1, 17, 1, 17, 3, 17, 161, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1,
|
||||
18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19,
|
||||
3, 19, 178, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1,
|
||||
22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24,
|
||||
1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1,
|
||||
26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27,
|
||||
1, 27, 1, 27, 3, 27, 221, 8, 27, 1, 28, 1, 28, 1, 29, 3, 29, 226, 8, 29,
|
||||
1, 29, 4, 29, 229, 8, 29, 11, 29, 12, 29, 230, 1, 29, 1, 29, 5, 29, 235,
|
||||
8, 29, 10, 29, 12, 29, 238, 9, 29, 3, 29, 240, 8, 29, 1, 29, 1, 29, 3,
|
||||
29, 244, 8, 29, 1, 29, 4, 29, 247, 8, 29, 11, 29, 12, 29, 248, 3, 29, 251,
|
||||
8, 29, 1, 29, 3, 29, 254, 8, 29, 1, 29, 1, 29, 4, 29, 258, 8, 29, 11, 29,
|
||||
12, 29, 259, 1, 29, 1, 29, 3, 29, 264, 8, 29, 1, 29, 4, 29, 267, 8, 29,
|
||||
11, 29, 12, 29, 268, 3, 29, 271, 8, 29, 3, 29, 273, 8, 29, 1, 30, 1, 30,
|
||||
1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 1,
|
||||
30, 1, 30, 1, 30, 1, 30, 5, 30, 289, 8, 30, 10, 30, 12, 30, 292, 9, 30,
|
||||
1, 30, 3, 30, 295, 8, 30, 1, 31, 1, 31, 5, 31, 299, 8, 31, 10, 31, 12,
|
||||
31, 302, 9, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34,
|
||||
1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 316, 8, 34, 10, 34, 12, 34, 319, 9,
|
||||
34, 1, 35, 4, 35, 322, 8, 35, 11, 35, 12, 35, 323, 1, 35, 1, 35, 1, 36,
|
||||
1, 36, 1, 37, 4, 37, 331, 8, 37, 11, 37, 12, 37, 332, 0, 0, 38, 1, 1, 3,
|
||||
2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12,
|
||||
25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21,
|
||||
43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 29,
|
||||
61, 30, 63, 0, 65, 0, 67, 0, 69, 31, 71, 32, 73, 0, 75, 33, 1, 0, 30, 2,
|
||||
0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2,
|
||||
0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0, 79, 79, 111, 111, 2,
|
||||
0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66, 66, 98, 98, 2, 0, 87,
|
||||
87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82,
|
||||
82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67,
|
||||
67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72,
|
||||
104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70,
|
||||
102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92,
|
||||
92, 2, 0, 65, 90, 97, 122, 5, 0, 45, 45, 48, 58, 65, 90, 95, 95, 97, 122,
|
||||
3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34,
|
||||
39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, 0, 0, 0, 3, 1,
|
||||
0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1,
|
||||
0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19,
|
||||
1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0,
|
||||
27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0,
|
||||
0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0,
|
||||
0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0,
|
||||
0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 59, 1,
|
||||
0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 75,
|
||||
1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, 1, 0, 0, 0, 7,
|
||||
83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, 92, 1, 0, 0,
|
||||
0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, 0, 0, 21, 103,
|
||||
1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, 113, 1, 0, 0,
|
||||
0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, 0, 0, 0, 35, 154,
|
||||
1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, 41, 179, 1, 0, 0,
|
||||
0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, 1, 0, 0, 0, 49, 193,
|
||||
1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, 0, 55, 220, 1, 0, 0,
|
||||
0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, 1, 0, 0, 0, 63, 296,
|
||||
1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, 0, 69, 310, 1, 0, 0,
|
||||
0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, 1, 0, 0, 0, 77, 78,
|
||||
5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, 80, 4, 1, 0, 0, 0,
|
||||
81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, 0, 0, 84, 8, 1,
|
||||
0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, 5, 61, 0, 0,
|
||||
88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, 0, 90, 88, 1,
|
||||
0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, 5, 61, 0, 0,
|
||||
94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, 0, 97, 16, 1,
|
||||
0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, 5, 60, 0, 0,
|
||||
101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104,
|
||||
22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, 107, 24,
|
||||
1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 2,
|
||||
0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, 0, 0,
|
||||
114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, 117,
|
||||
116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120,
|
||||
1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, 7, 1,
|
||||
0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, 0, 0,
|
||||
126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129,
|
||||
130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 7,
|
||||
4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, 0,
|
||||
0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138,
|
||||
139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, 142,
|
||||
7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, 7, 3,
|
||||
0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, 0, 0,
|
||||
148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, 151,
|
||||
152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, 7,
|
||||
3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11,
|
||||
0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0,
|
||||
160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163,
|
||||
164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167,
|
||||
7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, 1, 0, 0, 0, 169, 170, 7, 15,
|
||||
0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, 0, 0, 172, 173, 7, 6, 0, 0,
|
||||
173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, 175, 177, 7, 4, 0, 0, 176,
|
||||
178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 40,
|
||||
1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, 4, 0, 0, 181, 42, 1, 0,
|
||||
0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, 0, 184, 185, 7, 6, 0, 0,
|
||||
185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, 188, 7, 4, 0, 0, 188,
|
||||
189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, 7, 5, 0, 0, 191, 192,
|
||||
7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, 0, 0, 194, 195, 7, 16,
|
||||
0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, 197, 198, 7, 18, 0, 0,
|
||||
198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, 201, 7, 16, 0, 0, 201,
|
||||
202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, 1, 0, 0, 0, 204, 205,
|
||||
7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, 11, 0, 0, 207, 208, 7,
|
||||
16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, 0, 0, 210, 54, 1, 0, 0,
|
||||
0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, 213, 214, 7, 20, 0, 0,
|
||||
214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, 217, 7, 16, 0, 0, 217,
|
||||
218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, 7, 3, 0, 0, 220, 211,
|
||||
1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, 0, 0, 222, 223, 7, 22,
|
||||
0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, 0, 225, 224, 1, 0, 0, 0,
|
||||
225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, 229, 3, 73, 36, 0, 228,
|
||||
227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231,
|
||||
1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, 0, 0, 233, 235, 3, 73,
|
||||
36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0,
|
||||
236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239,
|
||||
232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243,
|
||||
7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1,
|
||||
0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, 0, 246, 245, 1, 0,
|
||||
0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0,
|
||||
249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251,
|
||||
273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, 0, 0, 0, 253, 254,
|
||||
1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, 0, 256, 258, 3, 73,
|
||||
36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0,
|
||||
259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, 263, 7, 3, 0, 0, 262,
|
||||
264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 266,
|
||||
1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1,
|
||||
0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 271, 1, 0, 0,
|
||||
0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272,
|
||||
225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, 0, 0, 0, 274, 280, 5,
|
||||
34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, 0, 0, 277, 279, 9, 0,
|
||||
0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0,
|
||||
280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282,
|
||||
280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, 5, 39, 0, 0, 285, 289,
|
||||
8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, 0, 0, 0, 288, 285, 1,
|
||||
0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0,
|
||||
0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293,
|
||||
295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, 1, 0, 0, 0, 295, 62,
|
||||
1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, 0, 0, 298, 297, 1,
|
||||
0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0,
|
||||
0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 91, 0, 0, 304,
|
||||
305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, 0, 0, 307, 308,
|
||||
5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, 310, 317, 3, 63,
|
||||
31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, 313, 316, 3, 65,
|
||||
32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, 313, 1, 0, 0,
|
||||
0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317,
|
||||
318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 7,
|
||||
27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 321, 1, 0, 0,
|
||||
0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 35, 0, 0, 326,
|
||||
72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, 1, 0, 0, 0, 329, 331, 8,
|
||||
29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 330, 1, 0, 0,
|
||||
0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, 30, 0, 90, 119, 138, 160,
|
||||
177, 220, 225, 230, 236, 239, 243, 248, 250, 253, 259, 263, 268, 270, 272,
|
||||
278, 280, 288, 290, 294, 300, 315, 317, 323, 332, 1, 6, 0, 0,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||
@@ -325,12 +290,9 @@ const (
|
||||
FilterQueryLexerHASANY = 26
|
||||
FilterQueryLexerHASALL = 27
|
||||
FilterQueryLexerBOOL = 28
|
||||
FilterQueryLexerDOLLAR_VAR = 29
|
||||
FilterQueryLexerCURLY_VAR = 30
|
||||
FilterQueryLexerSQUARE_VAR = 31
|
||||
FilterQueryLexerNUMBER = 32
|
||||
FilterQueryLexerQUOTED_TEXT = 33
|
||||
FilterQueryLexerKEY = 34
|
||||
FilterQueryLexerWS = 35
|
||||
FilterQueryLexerFREETEXT = 36
|
||||
FilterQueryLexerNUMBER = 29
|
||||
FilterQueryLexerQUOTED_TEXT = 30
|
||||
FilterQueryLexerKEY = 31
|
||||
FilterQueryLexerWS = 32
|
||||
FilterQueryLexerFREETEXT = 33
|
||||
)
|
||||
|
||||
@@ -56,9 +56,6 @@ type FilterQueryListener interface {
|
||||
// EnterValue is called when entering the value production.
|
||||
EnterValue(c *ValueContext)
|
||||
|
||||
// EnterVariable is called when entering the variable production.
|
||||
EnterVariable(c *VariableContext)
|
||||
|
||||
// EnterKey is called when entering the key production.
|
||||
EnterKey(c *KeyContext)
|
||||
|
||||
@@ -110,9 +107,6 @@ type FilterQueryListener interface {
|
||||
// ExitValue is called when exiting the value production.
|
||||
ExitValue(c *ValueContext)
|
||||
|
||||
// ExitVariable is called when exiting the variable production.
|
||||
ExitVariable(c *VariableContext)
|
||||
|
||||
// ExitKey is called when exiting the key production.
|
||||
ExitKey(c *KeyContext)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -56,9 +56,6 @@ type FilterQueryVisitor interface {
|
||||
// Visit a parse tree produced by FilterQueryParser#value.
|
||||
VisitValue(ctx *ValueContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#variable.
|
||||
VisitVariable(ctx *VariableContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by FilterQueryParser#key.
|
||||
VisitKey(ctx *KeyContext) interface{}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ type builderQuery[T any] struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
stmtBuilder qbtypes.StatementBuilder[T]
|
||||
spec qbtypes.QueryBuilderQuery[T]
|
||||
variables map[string]qbtypes.VariableItem
|
||||
|
||||
fromMS uint64
|
||||
toMS uint64
|
||||
@@ -33,13 +32,11 @@ func newBuilderQuery[T any](
|
||||
spec qbtypes.QueryBuilderQuery[T],
|
||||
tr qbtypes.TimeRange,
|
||||
kind qbtypes.RequestType,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) *builderQuery[T] {
|
||||
return &builderQuery[T]{
|
||||
telemetryStore: telemetryStore,
|
||||
stmtBuilder: stmtBuilder,
|
||||
spec: spec,
|
||||
variables: variables,
|
||||
fromMS: tr.From,
|
||||
toMS: tr.To,
|
||||
kind: kind,
|
||||
@@ -177,7 +174,7 @@ func (q *builderQuery[T]) Execute(ctx context.Context) (*qbtypes.Result, error)
|
||||
return q.executeWindowList(ctx)
|
||||
}
|
||||
|
||||
stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec, q.variables)
|
||||
stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -281,7 +278,7 @@ func (q *builderQuery[T]) executeWindowList(ctx context.Context) (*qbtypes.Resul
|
||||
q.spec.Offset = 0
|
||||
q.spec.Limit = need
|
||||
|
||||
stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec, q.variables)
|
||||
stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -3,11 +3,8 @@ package querier
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/govaluate"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -109,15 +106,12 @@ func postProcessBuilderQuery[T any](
|
||||
q *querier,
|
||||
result *qbtypes.Result,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
_ *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
|
||||
// Apply functions
|
||||
if len(query.Functions) > 0 {
|
||||
// For builder queries, use the query's own step
|
||||
step := query.StepInterval.Duration.Milliseconds()
|
||||
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
|
||||
result = q.applyFunctions(result, functions)
|
||||
result = q.applyFunctions(result, query.Functions)
|
||||
}
|
||||
|
||||
return result
|
||||
@@ -136,10 +130,7 @@ func postProcessMetricQuery(
|
||||
}
|
||||
|
||||
if len(query.Functions) > 0 {
|
||||
// For metric queries, use the query's own step
|
||||
step := query.StepInterval.Duration.Milliseconds()
|
||||
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
|
||||
result = q.applyFunctions(result, functions)
|
||||
result = q.applyFunctions(result, query.Functions)
|
||||
}
|
||||
|
||||
// Apply reduce to for scalar request type
|
||||
@@ -231,11 +222,6 @@ func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes
|
||||
if result != nil {
|
||||
results[name] = result
|
||||
}
|
||||
} else if req.RequestType == qbtypes.RequestTypeScalar {
|
||||
result := q.processScalarFormula(ctx, results, formula, req)
|
||||
if result != nil {
|
||||
results[name] = result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,7 +233,7 @@ func (q *querier) processTimeSeriesFormula(
|
||||
ctx context.Context,
|
||||
results map[string]*qbtypes.Result,
|
||||
formula qbtypes.QueryBuilderFormula,
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
_ *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
// Prepare time series data for formula evaluation
|
||||
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
|
||||
@@ -292,218 +278,12 @@ func (q *querier) processTimeSeriesFormula(
|
||||
}
|
||||
|
||||
if len(formula.Functions) > 0 {
|
||||
// For formulas, calculate GCD of steps from queries in the expression
|
||||
step := q.calculateFormulaStep(formula.Expression, req)
|
||||
functions := q.prepareFillZeroArgsWithStep(formula.Functions, req, step)
|
||||
result = q.applyFunctions(result, functions)
|
||||
result = q.applyFunctions(result, formula.Functions)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// processScalarFormula handles formula evaluation for scalar data
|
||||
//
|
||||
// NOTE: This implementation has a known limitation with formulas that reference
|
||||
// specific aggregations by index (e.g., "A.0", "A.1") or multiple aggregations
|
||||
// from the same query (e.g., "A.0 * 2 + A.1"). The FormulaEvaluator's series
|
||||
// matching logic doesn't work correctly when converting scalar data to time series
|
||||
// format for these cases.
|
||||
//
|
||||
// Currently supported:
|
||||
// - Formulas between different queries: "A / B", "A * 2 + B"
|
||||
// - Simple references: "A" (defaults to first aggregation)
|
||||
//
|
||||
// Not supported:
|
||||
// - Indexed aggregation references: "A.0", "A.1"
|
||||
// - Multiple aggregations from same query: "A.0 + A.1"
|
||||
//
|
||||
// To properly support this, we would need to either:
|
||||
// 1. Fix the FormulaEvaluator's series lookup logic for scalar-converted data
|
||||
// 2. Implement a dedicated scalar formula evaluator
|
||||
func (q *querier) processScalarFormula(
|
||||
ctx context.Context,
|
||||
results map[string]*qbtypes.Result,
|
||||
formula qbtypes.QueryBuilderFormula,
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
// Convert scalar data to time series format with zero timestamp
|
||||
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
|
||||
|
||||
for queryName, result := range results {
|
||||
if scalarData, ok := result.Value.(*qbtypes.ScalarData); ok {
|
||||
// Convert scalar to time series
|
||||
tsData := &qbtypes.TimeSeriesData{
|
||||
QueryName: scalarData.QueryName,
|
||||
Aggregations: make([]*qbtypes.AggregationBucket, 0),
|
||||
}
|
||||
|
||||
// Find aggregation columns
|
||||
aggColumns := make(map[int]int) // aggregation index -> column index
|
||||
for colIdx, col := range scalarData.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeAggregation {
|
||||
aggColumns[int(col.AggregationIndex)] = colIdx
|
||||
}
|
||||
}
|
||||
|
||||
// Group rows by their label sets
|
||||
type labeledRowData struct {
|
||||
labels []*qbtypes.Label
|
||||
values map[int]float64 // aggregation index -> value
|
||||
}
|
||||
|
||||
// First pass: group all rows by their label combination
|
||||
rowsByLabels := make(map[string]*labeledRowData)
|
||||
for _, row := range scalarData.Data {
|
||||
// Build labels from group columns
|
||||
labels := make([]*qbtypes.Label, 0)
|
||||
for i, col := range scalarData.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeGroup && i < len(row) {
|
||||
labels = append(labels, &qbtypes.Label{
|
||||
Key: col.TelemetryFieldKey,
|
||||
Value: row[i],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
labelKey := qbtypes.GetUniqueSeriesKey(labels)
|
||||
|
||||
// Get or create row data
|
||||
rowData, exists := rowsByLabels[labelKey]
|
||||
if !exists {
|
||||
rowData = &labeledRowData{
|
||||
labels: labels,
|
||||
values: make(map[int]float64),
|
||||
}
|
||||
rowsByLabels[labelKey] = rowData
|
||||
}
|
||||
|
||||
// Store all aggregation values from this row
|
||||
for aggIdx, colIdx := range aggColumns {
|
||||
if colIdx < len(row) {
|
||||
if val, ok := toFloat64(row[colIdx]); ok {
|
||||
rowData.values[aggIdx] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get sorted label keys for consistent ordering
|
||||
labelKeys := make([]string, 0, len(rowsByLabels))
|
||||
for key := range rowsByLabels {
|
||||
labelKeys = append(labelKeys, key)
|
||||
}
|
||||
slices.Sort(labelKeys)
|
||||
|
||||
// Create aggregation buckets
|
||||
aggIndices := make([]int, 0, len(aggColumns))
|
||||
for aggIdx := range aggColumns {
|
||||
aggIndices = append(aggIndices, aggIdx)
|
||||
}
|
||||
slices.Sort(aggIndices)
|
||||
|
||||
// For each aggregation, create a bucket with series in consistent order
|
||||
for _, aggIdx := range aggIndices {
|
||||
colIdx := aggColumns[aggIdx]
|
||||
|
||||
bucket := &qbtypes.AggregationBucket{
|
||||
Index: aggIdx,
|
||||
Alias: scalarData.Columns[colIdx].Name,
|
||||
Meta: scalarData.Columns[colIdx].Meta,
|
||||
Series: make([]*qbtypes.TimeSeries, 0),
|
||||
}
|
||||
|
||||
// Create series in the same order (by label key)
|
||||
for _, labelKey := range labelKeys {
|
||||
rowData := rowsByLabels[labelKey]
|
||||
|
||||
// Only create series if we have a value for this aggregation
|
||||
if val, exists := rowData.values[aggIdx]; exists {
|
||||
series := &qbtypes.TimeSeries{
|
||||
Labels: rowData.labels,
|
||||
Values: []*qbtypes.TimeSeriesValue{{
|
||||
Timestamp: 0,
|
||||
Value: val,
|
||||
}},
|
||||
}
|
||||
bucket.Series = append(bucket.Series, series)
|
||||
}
|
||||
}
|
||||
|
||||
tsData.Aggregations = append(tsData.Aggregations, bucket)
|
||||
}
|
||||
|
||||
timeSeriesData[queryName] = tsData
|
||||
}
|
||||
}
|
||||
|
||||
// Create formula evaluator
|
||||
canDefaultZero := make(map[string]bool)
|
||||
evaluator, err := qbtypes.NewFormulaEvaluator(formula.Expression, canDefaultZero)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to create formula evaluator", "error", err, "formula", formula.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Evaluate the formula
|
||||
formulaSeries, err := evaluator.EvaluateFormula(timeSeriesData)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to evaluate formula", "error", err, "formula", formula.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Convert back to scalar format
|
||||
scalarResult := &qbtypes.ScalarData{
|
||||
QueryName: formula.Name,
|
||||
Columns: make([]*qbtypes.ColumnDescriptor, 0),
|
||||
Data: make([][]any, 0),
|
||||
}
|
||||
|
||||
// Build columns from first series
|
||||
if len(formulaSeries) > 0 && len(formulaSeries[0].Labels) > 0 {
|
||||
// Add group columns
|
||||
for _, label := range formulaSeries[0].Labels {
|
||||
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: label.Key,
|
||||
QueryName: formula.Name,
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Add result column
|
||||
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "__result"},
|
||||
QueryName: formula.Name,
|
||||
AggregationIndex: 0,
|
||||
Type: qbtypes.ColumnTypeAggregation,
|
||||
})
|
||||
|
||||
// Build rows
|
||||
for _, series := range formulaSeries {
|
||||
row := make([]any, len(scalarResult.Columns))
|
||||
|
||||
// Add group values
|
||||
for i, label := range series.Labels {
|
||||
if i < len(row)-1 {
|
||||
row[i] = label.Value
|
||||
}
|
||||
}
|
||||
|
||||
// Add aggregation value (from single value at timestamp 0)
|
||||
if len(series.Values) > 0 {
|
||||
row[len(row)-1] = series.Values[0].Value
|
||||
} else {
|
||||
row[len(row)-1] = "n/a"
|
||||
}
|
||||
|
||||
scalarResult.Data = append(scalarResult.Data, row)
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Value: scalarResult,
|
||||
}
|
||||
}
|
||||
|
||||
// filterDisabledQueries removes results for disabled queries
|
||||
func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]*qbtypes.Result {
|
||||
filtered := make(map[string]*qbtypes.Result)
|
||||
@@ -870,98 +650,3 @@ func toFloat64(v any) (float64, bool) {
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// gcd calculates the greatest common divisor
|
||||
func gcd(a, b int64) int64 {
|
||||
if b == 0 {
|
||||
return a
|
||||
}
|
||||
return gcd(b, a%b)
|
||||
}
|
||||
|
||||
// prepareFillZeroArgsWithStep prepares fillZero function arguments with a specific step
|
||||
func (q *querier) prepareFillZeroArgsWithStep(functions []qbtypes.Function, req *qbtypes.QueryRangeRequest, step int64) []qbtypes.Function {
|
||||
// Check if we need to modify any functions
|
||||
needsCopy := false
|
||||
for _, fn := range functions {
|
||||
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
|
||||
needsCopy = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no fillZero functions need arguments, return original slice
|
||||
if !needsCopy {
|
||||
return functions
|
||||
}
|
||||
|
||||
// Only copy if we need to modify
|
||||
updatedFunctions := make([]qbtypes.Function, len(functions))
|
||||
copy(updatedFunctions, functions)
|
||||
|
||||
// Process each function
|
||||
for i, fn := range updatedFunctions {
|
||||
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
|
||||
// Set the arguments: start, end, step
|
||||
fn.Args = []qbtypes.FunctionArg{
|
||||
{Value: float64(req.Start)},
|
||||
{Value: float64(req.End)},
|
||||
{Value: float64(step)},
|
||||
}
|
||||
updatedFunctions[i] = fn
|
||||
}
|
||||
}
|
||||
|
||||
return updatedFunctions
|
||||
}
|
||||
|
||||
// calculateFormulaStep calculates the GCD of steps from queries referenced in the formula
|
||||
func (q *querier) calculateFormulaStep(expression string, req *qbtypes.QueryRangeRequest) int64 {
|
||||
// Use govaluate to parse the expression and extract variables
|
||||
// This is the same library used by FormulaEvaluator
|
||||
parsedExpr, err := govaluate.NewEvaluableExpression(expression)
|
||||
if err != nil {
|
||||
// If we can't parse the expression, use default
|
||||
return 60000
|
||||
}
|
||||
|
||||
// Get the variables from the parsed expression
|
||||
variables := parsedExpr.Vars()
|
||||
|
||||
// Extract base query names (e.g., "A" from "A.0" or "A.my_alias")
|
||||
queryNames := make(map[string]bool)
|
||||
for _, variable := range variables {
|
||||
// Split by "." to get the base query name
|
||||
parts := strings.Split(variable, ".")
|
||||
if len(parts) > 0 {
|
||||
queryNames[parts[0]] = true
|
||||
}
|
||||
}
|
||||
|
||||
var steps []int64
|
||||
|
||||
// Collect steps only from queries referenced in the formula
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
info := getqueryInfo(query.Spec)
|
||||
// Check if this query is referenced in the formula
|
||||
if !info.Disabled && queryNames[info.Name] && info.Step.Duration > 0 {
|
||||
stepMs := info.Step.Duration.Milliseconds()
|
||||
if stepMs > 0 {
|
||||
steps = append(steps, stepMs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no steps found, use a default (60 seconds)
|
||||
if len(steps) == 0 {
|
||||
return 60000
|
||||
}
|
||||
|
||||
// Calculate GCD of all steps
|
||||
result := steps[0]
|
||||
for i := 1; i < len(steps); i++ {
|
||||
result = gcd(result, steps[i])
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,230 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func TestPrepareFillZeroArgsWithStep(t *testing.T) {
|
||||
q := &querier{}
|
||||
|
||||
req := &qbtypes.QueryRangeRequest{
|
||||
Start: 1000000,
|
||||
End: 2000000,
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
functions []qbtypes.Function
|
||||
step int64
|
||||
checkArgs bool
|
||||
}{
|
||||
{
|
||||
name: "fillZero without args",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameFillZero,
|
||||
Args: []qbtypes.FunctionArg{},
|
||||
},
|
||||
},
|
||||
step: 30000, // 30 seconds
|
||||
checkArgs: true,
|
||||
},
|
||||
{
|
||||
name: "fillZero with existing args",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameFillZero,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: 500000.0},
|
||||
{Value: 1500000.0},
|
||||
{Value: 15000.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
step: 60000,
|
||||
checkArgs: false, // Should not modify existing args
|
||||
},
|
||||
{
|
||||
name: "other function should not be modified",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
Args: []qbtypes.FunctionArg{},
|
||||
},
|
||||
},
|
||||
step: 60000,
|
||||
checkArgs: false,
|
||||
},
|
||||
{
|
||||
name: "no copy when fillZero already has args",
|
||||
functions: []qbtypes.Function{
|
||||
{
|
||||
Name: qbtypes.FunctionNameFillZero,
|
||||
Args: []qbtypes.FunctionArg{
|
||||
{Value: 1000.0},
|
||||
{Value: 2000.0},
|
||||
{Value: 500.0},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: qbtypes.FunctionNameAbsolute,
|
||||
},
|
||||
},
|
||||
step: 60000,
|
||||
checkArgs: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := q.prepareFillZeroArgsWithStep(tt.functions, req, tt.step)
|
||||
|
||||
if len(result) != len(tt.functions) {
|
||||
t.Fatalf("Expected %d functions, got %d", len(tt.functions), len(result))
|
||||
}
|
||||
|
||||
// Check if no copy was made when not needed
|
||||
if tt.name == "no copy when fillZero already has args" || tt.name == "other function should not be modified" {
|
||||
// Verify that the result is the same slice (no copy)
|
||||
if &result[0] != &tt.functions[0] {
|
||||
t.Errorf("Expected no copy, but a copy was made")
|
||||
}
|
||||
}
|
||||
|
||||
for _, fn := range result {
|
||||
if fn.Name == qbtypes.FunctionNameFillZero && tt.checkArgs {
|
||||
if len(fn.Args) != 3 {
|
||||
t.Errorf("Expected 3 args for fillZero, got %d", len(fn.Args))
|
||||
}
|
||||
// Check start
|
||||
if start, ok := fn.Args[0].Value.(float64); !ok || start != float64(req.Start) {
|
||||
t.Errorf("Expected start %f, got %v", float64(req.Start), fn.Args[0].Value)
|
||||
}
|
||||
// Check end
|
||||
if end, ok := fn.Args[1].Value.(float64); !ok || end != float64(req.End) {
|
||||
t.Errorf("Expected end %f, got %v", float64(req.End), fn.Args[1].Value)
|
||||
}
|
||||
// Check step
|
||||
if step, ok := fn.Args[2].Value.(float64); !ok || step != float64(tt.step) {
|
||||
t.Errorf("Expected step %f, got %v", float64(tt.step), fn.Args[2].Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCalculateFormulaStep(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
req *qbtypes.QueryRangeRequest
|
||||
expected int64
|
||||
}{
|
||||
{
|
||||
name: "single query reference",
|
||||
expression: "A * 2",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
StepInterval: qbtypes.Step{Duration: 120 * time.Second},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: 60000, // Only A is referenced
|
||||
},
|
||||
{
|
||||
name: "multiple query references",
|
||||
expression: "A + B",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: 30000, // GCD of 30s and 60s
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "(A + B) / C",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
StepInterval: qbtypes.Step{Duration: 120 * time.Second},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "C",
|
||||
StepInterval: qbtypes.Step{Duration: 180 * time.Second},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: 60000, // GCD of 60s, 120s, and 180s
|
||||
},
|
||||
{
|
||||
name: "no query references",
|
||||
expression: "100",
|
||||
req: &qbtypes.QueryRangeRequest{
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{},
|
||||
},
|
||||
},
|
||||
expected: 60000, // Default
|
||||
},
|
||||
}
|
||||
|
||||
q := &querier{}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := q.calculateFormulaStep(tt.expression, tt.req)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected step %d, got %d", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -7,12 +7,10 @@ import (
|
||||
"slices"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -109,7 +107,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
|
||||
// First pass: collect all metric names that need temporality
|
||||
metricNames := make([]string, 0)
|
||||
for idx, query := range req.CompositeQuery.Queries {
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
if query.Type == qbtypes.QueryTypeBuilder {
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]); ok {
|
||||
for _, agg := range spec.Aggregations {
|
||||
@@ -118,44 +116,6 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
}
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.RecommendedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.RecommendedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.RecommendedStepIntervalForMetric(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,13 +157,13 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
@@ -216,7 +176,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
|
||||
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
default:
|
||||
@@ -401,15 +361,15 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
|
||||
case *builderQuery[qbtypes.TraceAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
|
||||
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
case *builderQuery[qbtypes.LogAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
|
||||
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
case *builderQuery[qbtypes.MetricAggregation]:
|
||||
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
|
||||
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
|
||||
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -79,7 +79,6 @@ func newProvider(
|
||||
traceConditionBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
traceAggExprRewriter,
|
||||
telemetryStore,
|
||||
)
|
||||
|
||||
// Create log statement builder
|
||||
|
||||
@@ -20,8 +20,6 @@ import (
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -41,6 +39,7 @@ import (
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
traceFunnelsModule "github.com/SigNoz/signoz/pkg/modules/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
|
||||
@@ -56,6 +55,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/contextlinks"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
@@ -66,6 +66,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
@@ -1584,6 +1585,7 @@ func (aH *APIHandler) registerEvent(w http.ResponseWriter, r *http.Request) {
|
||||
switch request.EventType {
|
||||
case model.TrackEvent:
|
||||
telemetry.GetInstance().SendEvent(request.EventName, request.Attributes, claims.Email, request.RateLimited, true)
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, request.EventName, request.Attributes)
|
||||
case model.GroupEvent:
|
||||
telemetry.GetInstance().SendGroupEvent(request.Attributes, claims.Email)
|
||||
case model.IdentifyEvent:
|
||||
@@ -2023,7 +2025,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_, errv2 := aH.Signoz.Modules.User.Register(r.Context(), &req)
|
||||
user, errv2 := aH.Signoz.Modules.User.Register(r.Context(), &req)
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
return
|
||||
@@ -2033,7 +2035,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
// from here onwards, we expect admin (owner) to invite other users.
|
||||
aH.SetupCompleted = true
|
||||
|
||||
aH.Respond(w, nil)
|
||||
aH.Respond(w, user)
|
||||
}
|
||||
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
@@ -5232,4 +5234,421 @@ func (aH *APIHandler) RegisterTraceFunnelsRoutes(router *mux.Router, am *middlew
|
||||
traceFunnelsRouter.HandleFunc("/{funnel_id}",
|
||||
am.EditAccess(aH.Signoz.Handlers.TraceFunnel.UpdateFunnel)).
|
||||
Methods(http.MethodPut)
|
||||
|
||||
// Analytics endpoints
|
||||
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/validate", aH.handleValidateTraces).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/overview", aH.handleFunnelAnalytics).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/steps", aH.handleStepAnalytics).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/steps/overview", aH.handleFunnelStepAnalytics).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/slow-traces", aH.handleFunnelSlowTraces).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/error-traces", aH.handleFunnelErrorTraces).Methods("POST")
|
||||
|
||||
// Analytics endpoints
|
||||
traceFunnelsRouter.HandleFunc("/analytics/validate", aH.handleValidateTracesWithPayload).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/analytics/overview", aH.handleFunnelAnalyticsWithPayload).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/analytics/steps", aH.handleStepAnalyticsWithPayload).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/analytics/steps/overview", aH.handleFunnelStepAnalyticsWithPayload).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/analytics/slow-traces", aH.handleFunnelSlowTracesWithPayload).Methods("POST")
|
||||
traceFunnelsRouter.HandleFunc("/analytics/error-traces", aH.handleFunnelErrorTracesWithPayload).Methods("POST")
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleValidateTraces(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), valuer.MustNewUUID(funnelID), valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var timeRange traceFunnels.TimeRange
|
||||
if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding time range: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if len(funnel.Steps) < 2 {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("funnel must have at least 2 steps")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.ValidateTraces(funnel, timeRange)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelAnalytics(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), valuer.MustNewUUID(funnelID), valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var stepTransition traceFunnels.StepTransitionRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&stepTransition); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding time range: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetFunnelAnalytics(funnel, stepTransition.TimeRange)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelStepAnalytics(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), valuer.MustNewUUID(funnelID), valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var stepTransition traceFunnels.StepTransitionRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&stepTransition); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding time range: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetFunnelStepAnalytics(funnel, stepTransition.TimeRange, stepTransition.StepStart, stepTransition.StepEnd)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleStepAnalytics(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), valuer.MustNewUUID(funnelID), valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var timeRange traceFunnels.TimeRange
|
||||
if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding time range: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetStepAnalytics(funnel, timeRange)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelSlowTraces(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), valuer.MustNewUUID(funnelID), valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var req traceFunnels.StepTransitionRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid request body: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetSlowestTraces(funnel, req.TimeRange, req.StepStart, req.StepEnd)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelErrorTraces(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
funnelID := vars["funnel_id"]
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), valuer.MustNewUUID(funnelID), valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
var req traceFunnels.StepTransitionRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("invalid request body: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetErroredTraces(funnel, req.TimeRange, req.StepStart, req.StepEnd)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleValidateTracesWithPayload(w http.ResponseWriter, r *http.Request) {
|
||||
var req traceFunnels.PostableFunnel
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding request: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if len(req.Steps) < 2 {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("funnel must have at least 2 steps")}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// Create a StorableFunnel from the request
|
||||
funnel := &traceFunnels.StorableFunnel{
|
||||
Steps: req.Steps,
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.ValidateTraces(funnel, traceFunnels.TimeRange{
|
||||
StartTime: req.StartTime,
|
||||
EndTime: req.EndTime,
|
||||
})
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelAnalyticsWithPayload(w http.ResponseWriter, r *http.Request) {
|
||||
var req traceFunnels.PostableFunnel
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding request: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
funnel := &traceFunnels.StorableFunnel{
|
||||
Steps: req.Steps,
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetFunnelAnalytics(funnel, traceFunnels.TimeRange{
|
||||
StartTime: req.StartTime,
|
||||
EndTime: req.EndTime,
|
||||
})
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleStepAnalyticsWithPayload(w http.ResponseWriter, r *http.Request) {
|
||||
var req traceFunnels.PostableFunnel
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding request: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
funnel := &traceFunnels.StorableFunnel{
|
||||
Steps: req.Steps,
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetStepAnalytics(funnel, traceFunnels.TimeRange{
|
||||
StartTime: req.StartTime,
|
||||
EndTime: req.EndTime,
|
||||
})
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelStepAnalyticsWithPayload(w http.ResponseWriter, r *http.Request) {
|
||||
var req traceFunnels.PostableFunnel
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding request: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
funnel := &traceFunnels.StorableFunnel{
|
||||
Steps: req.Steps,
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetFunnelStepAnalytics(funnel, traceFunnels.TimeRange{
|
||||
StartTime: req.StartTime,
|
||||
EndTime: req.EndTime,
|
||||
}, req.StepStart, req.StepEnd)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelSlowTracesWithPayload(w http.ResponseWriter, r *http.Request) {
|
||||
var req traceFunnels.PostableFunnel
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding request: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
funnel := &traceFunnels.StorableFunnel{
|
||||
Steps: req.Steps,
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetSlowestTraces(funnel, traceFunnels.TimeRange{
|
||||
StartTime: req.StartTime,
|
||||
EndTime: req.EndTime,
|
||||
}, req.StepStart, req.StepEnd)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleFunnelErrorTracesWithPayload(w http.ResponseWriter, r *http.Request) {
|
||||
var req traceFunnels.PostableFunnel
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding request: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
funnel := &traceFunnels.StorableFunnel{
|
||||
Steps: req.Steps,
|
||||
}
|
||||
|
||||
chq, err := traceFunnelsModule.GetErroredTraces(funnel, traceFunnels.TimeRange{
|
||||
StartTime: req.StartTime,
|
||||
EndTime: req.EndTime,
|
||||
}, req.StepStart, req.StepEnd)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
@@ -217,7 +217,6 @@ func jsonReplaceField(filter v3.FilterItem, fields map[string]v3.AttributeKey) v
|
||||
}
|
||||
|
||||
func parseStrValue(valueStr string, operator v3.FilterOperator) (string, interface{}) {
|
||||
|
||||
valueType := "string"
|
||||
|
||||
// for the following operators it will always be string
|
||||
@@ -229,12 +228,12 @@ func parseStrValue(valueStr string, operator v3.FilterOperator) (string, interfa
|
||||
|
||||
var err error
|
||||
var parsedValue interface{}
|
||||
if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
|
||||
valueType = "bool"
|
||||
} else if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
|
||||
if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
|
||||
valueType = "int64"
|
||||
} else if parsedValue, err = strconv.ParseFloat(valueStr, 64); err == nil {
|
||||
valueType = "float64"
|
||||
} else if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
|
||||
valueType = "bool"
|
||||
} else {
|
||||
parsedValue = valueStr
|
||||
valueType = "string"
|
||||
|
||||
@@ -519,6 +519,28 @@ var testJSONFilterEnrichData = []struct {
|
||||
Value: 10,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "int64 string",
|
||||
Filter: v3.FilterItem{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body.intx",
|
||||
DataType: v3.AttributeKeyDataTypeUnspecified,
|
||||
Type: v3.AttributeKeyTypeUnspecified,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "0",
|
||||
},
|
||||
Result: v3.FilterItem{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body.intx",
|
||||
DataType: v3.AttributeKeyDataTypeInt64,
|
||||
Type: v3.AttributeKeyTypeUnspecified,
|
||||
IsJSON: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: int64(0),
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "float64",
|
||||
Filter: v3.FilterItem{
|
||||
|
||||
@@ -148,6 +148,60 @@ func BuildTracesFilterQuery(fs *v3.FilterSet) (string, error) {
|
||||
return queryString, nil
|
||||
}
|
||||
|
||||
// TODO: remove this function as this is identical to BuildTracesFilterQuery
|
||||
func BuildTracesFilter(fs *v3.FilterSet) (string, error) {
|
||||
var conditions []string
|
||||
|
||||
if fs != nil && len(fs.Items) != 0 {
|
||||
for _, item := range fs.Items {
|
||||
val := item.Value
|
||||
// generate the key
|
||||
columnName := getColumnName(item.Key)
|
||||
var fmtVal string
|
||||
item.Operator = v3.FilterOperator(strings.ToLower(strings.TrimSpace(string(item.Operator))))
|
||||
if item.Operator != v3.FilterOperatorExists && item.Operator != v3.FilterOperatorNotExists {
|
||||
var err error
|
||||
val, err = utils.ValidateAndCastValue(val, item.Key.DataType)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("invalid value for key %s: %v", item.Key.Key, err)
|
||||
}
|
||||
}
|
||||
if val != nil {
|
||||
fmtVal = utils.ClickHouseFormattedValue(val)
|
||||
}
|
||||
if operator, ok := tracesOperatorMappingV3[item.Operator]; ok {
|
||||
switch item.Operator {
|
||||
case v3.FilterOperatorContains, v3.FilterOperatorNotContains:
|
||||
// we also want to treat %, _ as literals for contains
|
||||
val := utils.QuoteEscapedStringForContains(fmt.Sprintf("%s", item.Value), false)
|
||||
conditions = append(conditions, fmt.Sprintf("%s %s '%%%s%%'", columnName, operator, val))
|
||||
case v3.FilterOperatorRegex, v3.FilterOperatorNotRegex:
|
||||
conditions = append(conditions, fmt.Sprintf(operator, columnName, fmtVal))
|
||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
||||
if item.Key.IsColumn {
|
||||
subQuery, err := existsSubQueryForFixedColumn(item.Key, item.Operator)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, subQuery)
|
||||
} else {
|
||||
cType := getClickHouseTracesColumnType(item.Key.Type)
|
||||
cDataType := getClickHouseTracesColumnDataType(item.Key.DataType)
|
||||
col := fmt.Sprintf("%s_%s", cType, cDataType)
|
||||
conditions = append(conditions, fmt.Sprintf(operator, col, item.Key.Key))
|
||||
}
|
||||
|
||||
default:
|
||||
conditions = append(conditions, fmt.Sprintf("%s %s %s", columnName, operator, fmtVal))
|
||||
}
|
||||
} else {
|
||||
return "", fmt.Errorf("unsupported operator %s", item.Operator)
|
||||
}
|
||||
}
|
||||
}
|
||||
return strings.Join(conditions, " AND "), nil
|
||||
}
|
||||
|
||||
func handleEmptyValuesInGroupBy(groupBy []v3.AttributeKey) (string, error) {
|
||||
// TODO(nitya): in future when we support user based mat column handle them
|
||||
// skipping now as we don't support creating them
|
||||
|
||||
@@ -65,11 +65,19 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
||||
// accustom log filters like `body.log.message EXISTS` into EXPR language
|
||||
// where User is attempting to check for keys present in JSON log body
|
||||
key, found := strings.CutPrefix(v.Key.Key, "body.")
|
||||
if found {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(key), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
if strings.HasPrefix(v.Key.Key, "body.") {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
} else if typ := getTypeName(v.Key.Type); typ != "" {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
|
||||
} else {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
|
||||
// if type of key is not available; is considered as TOP LEVEL key in OTEL Log Data model hence
|
||||
// switch Exist and Not Exists operators with NOT EQUAL and EQUAL respectively
|
||||
operator := v3.FilterOperatorNotEqual
|
||||
if v.Operator == v3.FilterOperatorNotExists {
|
||||
operator = v3.FilterOperatorEqual
|
||||
}
|
||||
|
||||
filter = fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
|
||||
}
|
||||
default:
|
||||
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
|
||||
|
||||
@@ -141,6 +141,34 @@ var testCases = []struct {
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// HavingExpressionRewriter rewrites having expressions to use the correct SQL column names
|
||||
type HavingExpressionRewriter struct {
|
||||
// Map of user-friendly names to SQL column names
|
||||
columnMap map[string]string
|
||||
}
|
||||
|
||||
// NewHavingExpressionRewriter creates a new having expression rewriter
|
||||
func NewHavingExpressionRewriter() *HavingExpressionRewriter {
|
||||
return &HavingExpressionRewriter{
|
||||
columnMap: make(map[string]string),
|
||||
}
|
||||
}
|
||||
|
||||
// RewriteForTraces rewrites having expression for trace queries
|
||||
func (r *HavingExpressionRewriter) RewriteForTraces(expression string, aggregations []qbtypes.TraceAggregation) string {
|
||||
r.buildTraceColumnMap(aggregations)
|
||||
return r.rewriteExpression(expression)
|
||||
}
|
||||
|
||||
// RewriteForLogs rewrites having expression for log queries
|
||||
func (r *HavingExpressionRewriter) RewriteForLogs(expression string, aggregations []qbtypes.LogAggregation) string {
|
||||
r.buildLogColumnMap(aggregations)
|
||||
return r.rewriteExpression(expression)
|
||||
}
|
||||
|
||||
// RewriteForMetrics rewrites having expression for metric queries
|
||||
func (r *HavingExpressionRewriter) RewriteForMetrics(expression string, aggregations []qbtypes.MetricAggregation) string {
|
||||
r.buildMetricColumnMap(aggregations)
|
||||
return r.rewriteExpression(expression)
|
||||
}
|
||||
|
||||
// buildTraceColumnMap builds the column mapping for trace aggregations
|
||||
func (r *HavingExpressionRewriter) buildTraceColumnMap(aggregations []qbtypes.TraceAggregation) {
|
||||
r.columnMap = make(map[string]string)
|
||||
|
||||
for idx, agg := range aggregations {
|
||||
sqlColumn := fmt.Sprintf("__result_%d", idx)
|
||||
|
||||
// Map alias if present
|
||||
if agg.Alias != "" {
|
||||
r.columnMap[agg.Alias] = sqlColumn
|
||||
}
|
||||
|
||||
// Map expression
|
||||
r.columnMap[agg.Expression] = sqlColumn
|
||||
|
||||
// Map __result{number} format
|
||||
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
|
||||
|
||||
// For single aggregation, also map __result
|
||||
if len(aggregations) == 1 {
|
||||
r.columnMap["__result"] = sqlColumn
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// buildLogColumnMap builds the column mapping for log aggregations
|
||||
func (r *HavingExpressionRewriter) buildLogColumnMap(aggregations []qbtypes.LogAggregation) {
|
||||
r.columnMap = make(map[string]string)
|
||||
|
||||
for idx, agg := range aggregations {
|
||||
sqlColumn := fmt.Sprintf("__result_%d", idx)
|
||||
|
||||
// Map alias if present
|
||||
if agg.Alias != "" {
|
||||
r.columnMap[agg.Alias] = sqlColumn
|
||||
}
|
||||
|
||||
// Map expression
|
||||
r.columnMap[agg.Expression] = sqlColumn
|
||||
|
||||
// Map __result{number} format
|
||||
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
|
||||
|
||||
// For single aggregation, also map __result
|
||||
if len(aggregations) == 1 {
|
||||
r.columnMap["__result"] = sqlColumn
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// buildMetricColumnMap builds the column mapping for metric aggregations
|
||||
func (r *HavingExpressionRewriter) buildMetricColumnMap(aggregations []qbtypes.MetricAggregation) {
|
||||
r.columnMap = make(map[string]string)
|
||||
|
||||
// For metrics, we typically have a single aggregation that results in "value" column
|
||||
// But we still need to handle the mapping for consistency
|
||||
|
||||
for idx, agg := range aggregations {
|
||||
// For metrics, the column is usually "value" in the final select
|
||||
sqlColumn := "value"
|
||||
|
||||
// Map different metric formats
|
||||
metricName := agg.MetricName
|
||||
|
||||
// Don't map the plain metric name - it's ambiguous
|
||||
// r.columnMap[metricName] = sqlColumn
|
||||
|
||||
// Map with space aggregation
|
||||
if agg.SpaceAggregation.StringValue() != "" {
|
||||
r.columnMap[fmt.Sprintf("%s(%s)", agg.SpaceAggregation.StringValue(), metricName)] = sqlColumn
|
||||
}
|
||||
|
||||
// Map with time aggregation
|
||||
if agg.TimeAggregation.StringValue() != "" {
|
||||
r.columnMap[fmt.Sprintf("%s(%s)", agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
|
||||
}
|
||||
|
||||
// Map with both aggregations
|
||||
if agg.TimeAggregation.StringValue() != "" && agg.SpaceAggregation.StringValue() != "" {
|
||||
r.columnMap[fmt.Sprintf("%s(%s(%s))", agg.SpaceAggregation.StringValue(), agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
|
||||
}
|
||||
|
||||
// If no aggregations specified, map the plain metric name
|
||||
if agg.TimeAggregation.StringValue() == "" && agg.SpaceAggregation.StringValue() == "" {
|
||||
r.columnMap[metricName] = sqlColumn
|
||||
}
|
||||
|
||||
// Map __result format
|
||||
r.columnMap["__result"] = sqlColumn
|
||||
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
|
||||
}
|
||||
}
|
||||
|
||||
// rewriteExpression rewrites the having expression using the column map
|
||||
func (r *HavingExpressionRewriter) rewriteExpression(expression string) string {
|
||||
// First, handle quoted strings to avoid replacing within them
|
||||
quotedStrings := make(map[string]string)
|
||||
quotePattern := regexp.MustCompile(`'[^']*'|"[^"]*"`)
|
||||
quotedIdx := 0
|
||||
|
||||
expression = quotePattern.ReplaceAllStringFunc(expression, func(match string) string {
|
||||
placeholder := fmt.Sprintf("__QUOTED_%d__", quotedIdx)
|
||||
quotedStrings[placeholder] = match
|
||||
quotedIdx++
|
||||
return placeholder
|
||||
})
|
||||
|
||||
// Sort column mappings by length (descending) to handle longer names first
|
||||
// This prevents partial replacements (e.g., "count" being replaced in "count_distinct")
|
||||
type mapping struct {
|
||||
from string
|
||||
to string
|
||||
}
|
||||
|
||||
mappings := make([]mapping, 0, len(r.columnMap))
|
||||
for from, to := range r.columnMap {
|
||||
mappings = append(mappings, mapping{from: from, to: to})
|
||||
}
|
||||
|
||||
// Sort by length descending
|
||||
for i := 0; i < len(mappings); i++ {
|
||||
for j := i + 1; j < len(mappings); j++ {
|
||||
if len(mappings[j].from) > len(mappings[i].from) {
|
||||
mappings[i], mappings[j] = mappings[j], mappings[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply replacements
|
||||
for _, m := range mappings {
|
||||
// For function expressions (containing parentheses), we need special handling
|
||||
if strings.Contains(m.from, "(") {
|
||||
// Escape special regex characters in the function name
|
||||
escapedFrom := regexp.QuoteMeta(m.from)
|
||||
pattern := regexp.MustCompile(`\b` + escapedFrom)
|
||||
expression = pattern.ReplaceAllString(expression, m.to)
|
||||
} else {
|
||||
// Use word boundaries to ensure we're replacing complete identifiers
|
||||
pattern := regexp.MustCompile(`\b` + regexp.QuoteMeta(m.from) + `\b`)
|
||||
expression = pattern.ReplaceAllString(expression, m.to)
|
||||
}
|
||||
}
|
||||
|
||||
// Restore quoted strings
|
||||
for placeholder, original := range quotedStrings {
|
||||
expression = strings.Replace(expression, placeholder, original, 1)
|
||||
}
|
||||
|
||||
return expression
|
||||
}
|
||||
@@ -1,281 +0,0 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func TestHavingExpressionRewriter_RewriteForTraces(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.TraceAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single aggregation with __result",
|
||||
expression: "__result > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 100",
|
||||
},
|
||||
{
|
||||
name: "single aggregation with alias",
|
||||
expression: "total_count > 100 AND total_count < 1000",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: "total_count"},
|
||||
},
|
||||
expected: "__result_0 > 100 AND __result_0 < 1000",
|
||||
},
|
||||
{
|
||||
name: "multiple aggregations with aliases",
|
||||
expression: "error_count > 10 OR success_count > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "countIf(status = 'error')", Alias: "error_count"},
|
||||
{Expression: "countIf(status = 'success')", Alias: "success_count"},
|
||||
},
|
||||
expected: "__result_0 > 10 OR __result_1 > 100",
|
||||
},
|
||||
{
|
||||
name: "expression reference",
|
||||
expression: "count() > 50",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 50",
|
||||
},
|
||||
{
|
||||
name: "__result{number} format",
|
||||
expression: "__result0 > 10 AND __result1 < 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
{Expression: "sum(duration)", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 10 AND __result_1 < 100",
|
||||
},
|
||||
{
|
||||
name: "complex expression with parentheses",
|
||||
expression: "(total > 100 AND errors < 10) OR (total < 50 AND errors = 0)",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: "total"},
|
||||
{Expression: "countIf(error = true)", Alias: "errors"},
|
||||
},
|
||||
expected: "(__result_0 > 100 AND __result_1 < 10) OR (__result_0 < 50 AND __result_1 = 0)",
|
||||
},
|
||||
{
|
||||
name: "with quoted strings",
|
||||
expression: "status = 'active' AND count > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "status", Alias: "status"},
|
||||
{Expression: "count()", Alias: "count"},
|
||||
},
|
||||
expected: "__result_0 = 'active' AND __result_1 > 100",
|
||||
},
|
||||
{
|
||||
name: "avoid partial replacements",
|
||||
expression: "count_distinct > 10 AND count > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count_distinct(user_id)", Alias: "count_distinct"},
|
||||
{Expression: "count()", Alias: "count"},
|
||||
},
|
||||
expected: "__result_0 > 10 AND __result_1 > 100",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForTraces(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHavingExpressionRewriter_RewriteForLogs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.LogAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single aggregation with __result",
|
||||
expression: "__result > 1000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 1000",
|
||||
},
|
||||
{
|
||||
name: "multiple aggregations with aliases and expressions",
|
||||
expression: "total_logs > 1000 AND avg(size) < 1024",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "count()", Alias: "total_logs"},
|
||||
{Expression: "avg(size)", Alias: ""},
|
||||
},
|
||||
expected: "__result_0 > 1000 AND __result_1 < 1024",
|
||||
},
|
||||
{
|
||||
name: "complex boolean expression",
|
||||
expression: "(error_logs > 100 AND error_logs < 1000) OR warning_logs > 5000",
|
||||
aggregations: []qbtypes.LogAggregation{
|
||||
{Expression: "countIf(level = 'error')", Alias: "error_logs"},
|
||||
{Expression: "countIf(level = 'warning')", Alias: "warning_logs"},
|
||||
},
|
||||
expected: "(__result_0 > 100 AND __result_0 < 1000) OR __result_1 > 5000",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForLogs(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHavingExpressionRewriter_RewriteForMetrics(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.MetricAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "metric with space aggregation",
|
||||
expression: "avg(cpu_usage) > 80",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
},
|
||||
},
|
||||
expected: "value > 80",
|
||||
},
|
||||
{
|
||||
name: "metric with time aggregation",
|
||||
expression: "rate(requests) > 1000",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "requests",
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
},
|
||||
},
|
||||
expected: "value > 1000",
|
||||
},
|
||||
{
|
||||
name: "metric with both aggregations",
|
||||
expression: "sum(rate(requests)) > 5000",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "requests",
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
expected: "value > 5000",
|
||||
},
|
||||
{
|
||||
name: "metric with __result",
|
||||
expression: "__result < 100",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "memory_usage",
|
||||
SpaceAggregation: metrictypes.SpaceAggregationMax,
|
||||
},
|
||||
},
|
||||
expected: "value < 100",
|
||||
},
|
||||
{
|
||||
name: "metric name without aggregation",
|
||||
expression: "temperature > 30",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "temperature",
|
||||
},
|
||||
},
|
||||
expected: "value > 30",
|
||||
},
|
||||
{
|
||||
name: "complex expression with parentheses",
|
||||
expression: "(avg(cpu_usage) > 80 AND avg(cpu_usage) < 95) OR __result > 99",
|
||||
aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
},
|
||||
},
|
||||
expected: "(value > 80 AND value < 95) OR value > 99",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForMetrics(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHavingExpressionRewriter_EdgeCases(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
aggregations []qbtypes.TraceAggregation
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty expression",
|
||||
expression: "",
|
||||
aggregations: []qbtypes.TraceAggregation{},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "no matching columns",
|
||||
expression: "unknown_column > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()", Alias: "total"},
|
||||
},
|
||||
expected: "unknown_column > 100",
|
||||
},
|
||||
{
|
||||
name: "expression within quoted string",
|
||||
expression: "status = 'count() > 100' AND total > 100",
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "status", Alias: "status"},
|
||||
{Expression: "count()", Alias: "total"},
|
||||
},
|
||||
expected: "__result_0 = 'count() > 100' AND __result_1 > 100",
|
||||
},
|
||||
{
|
||||
name: "double quotes",
|
||||
expression: `name = "test" AND count > 10`,
|
||||
aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "name", Alias: "name"},
|
||||
{Expression: "count()", Alias: "count"},
|
||||
},
|
||||
expected: `__result_0 = "test" AND __result_1 > 10`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rewriter := NewHavingExpressionRewriter()
|
||||
result := rewriter.RewriteForTraces(tt.expression, tt.aggregations)
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
937
pkg/querybuilder/never_true.go
Normal file
937
pkg/querybuilder/never_true.go
Normal file
@@ -0,0 +1,937 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
grammar "github.com/SigNoz/signoz/pkg/parser/grammar"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
)
|
||||
|
||||
// FieldConstraint represents a constraint on a field
|
||||
type FieldConstraint struct {
|
||||
Field string
|
||||
Operator qbtypes.FilterOperator
|
||||
Value interface{}
|
||||
Values []interface{} // For IN, NOT IN operations
|
||||
}
|
||||
|
||||
// ConstraintSet represents a set of constraints that must all be true (AND)
|
||||
type ConstraintSet struct {
|
||||
Constraints map[string][]FieldConstraint // field -> constraints
|
||||
}
|
||||
|
||||
// LogicalContradictionDetector implements the visitor pattern to detect logical contradictions
|
||||
type LogicalContradictionDetector struct {
|
||||
grammar.BaseFilterQueryVisitor
|
||||
constraintStack []*ConstraintSet // Stack of constraint sets for nested expressions
|
||||
contradictions []string
|
||||
notContextStack []bool // Stack to track NOT contexts
|
||||
}
|
||||
|
||||
// DetectContradictions analyzes a query string and returns any contradictions found
|
||||
func DetectContradictions(query string) ([]string, error) {
|
||||
// Setup ANTLR parsing pipeline
|
||||
input := antlr.NewInputStream(query)
|
||||
lexer := grammar.NewFilterQueryLexer(input)
|
||||
|
||||
// Error handling
|
||||
lexerErrorListener := NewErrorListener()
|
||||
lexer.RemoveErrorListeners()
|
||||
lexer.AddErrorListener(lexerErrorListener)
|
||||
|
||||
tokens := antlr.NewCommonTokenStream(lexer, 0)
|
||||
parser := grammar.NewFilterQueryParser(tokens)
|
||||
|
||||
parserErrorListener := NewErrorListener()
|
||||
parser.RemoveErrorListeners()
|
||||
parser.AddErrorListener(parserErrorListener)
|
||||
|
||||
// Parse the query
|
||||
tree := parser.Query()
|
||||
|
||||
// Check for syntax errors
|
||||
if len(parserErrorListener.SyntaxErrors) > 0 {
|
||||
return nil, fmt.Errorf("syntax errors: %v", parserErrorListener.SyntaxErrors)
|
||||
}
|
||||
|
||||
// Create detector and visit tree
|
||||
detector := &LogicalContradictionDetector{
|
||||
constraintStack: []*ConstraintSet{
|
||||
{Constraints: make(map[string][]FieldConstraint)},
|
||||
},
|
||||
contradictions: []string{},
|
||||
notContextStack: []bool{false},
|
||||
}
|
||||
|
||||
detector.Visit(tree)
|
||||
|
||||
// Deduplicate contradictions
|
||||
seen := make(map[string]bool)
|
||||
unique := []string{}
|
||||
for _, c := range detector.contradictions {
|
||||
if !seen[c] {
|
||||
seen[c] = true
|
||||
unique = append(unique, c)
|
||||
}
|
||||
}
|
||||
|
||||
return unique, nil
|
||||
}
|
||||
|
||||
// Helper methods for constraint stack
|
||||
func (d *LogicalContradictionDetector) currentConstraints() *ConstraintSet {
|
||||
return d.constraintStack[len(d.constraintStack)-1]
|
||||
}
|
||||
|
||||
// Helper methods for NOT context
|
||||
func (d *LogicalContradictionDetector) inNotContext() bool {
|
||||
return d.notContextStack[len(d.notContextStack)-1]
|
||||
}
|
||||
|
||||
func (d *LogicalContradictionDetector) pushNotContext(value bool) {
|
||||
d.notContextStack = append(d.notContextStack, value)
|
||||
}
|
||||
|
||||
func (d *LogicalContradictionDetector) popNotContext() {
|
||||
if len(d.notContextStack) > 1 {
|
||||
d.notContextStack = d.notContextStack[:len(d.notContextStack)-1]
|
||||
}
|
||||
}
|
||||
|
||||
// Visit dispatches to the appropriate visit method
|
||||
func (d *LogicalContradictionDetector) Visit(tree antlr.ParseTree) interface{} {
|
||||
if tree == nil {
|
||||
return nil
|
||||
}
|
||||
return tree.Accept(d)
|
||||
}
|
||||
|
||||
// VisitQuery is the entry point
|
||||
func (d *LogicalContradictionDetector) VisitQuery(ctx *grammar.QueryContext) interface{} {
|
||||
d.Visit(ctx.Expression())
|
||||
// Check final constraints
|
||||
d.checkContradictions(d.currentConstraints())
|
||||
return nil
|
||||
}
|
||||
|
||||
// VisitExpression just passes through to OrExpression
|
||||
func (d *LogicalContradictionDetector) VisitExpression(ctx *grammar.ExpressionContext) interface{} {
|
||||
return d.Visit(ctx.OrExpression())
|
||||
}
|
||||
|
||||
// VisitOrExpression handles OR logic
|
||||
func (d *LogicalContradictionDetector) VisitOrExpression(ctx *grammar.OrExpressionContext) interface{} {
|
||||
andExpressions := ctx.AllAndExpression()
|
||||
|
||||
if len(andExpressions) == 1 {
|
||||
// Single AND expression, just visit it
|
||||
return d.Visit(andExpressions[0])
|
||||
}
|
||||
|
||||
// Multiple AND expressions connected by OR
|
||||
// Visit each branch to find contradictions within branches
|
||||
for _, andExpr := range andExpressions {
|
||||
// Save current constraints
|
||||
savedConstraints := d.cloneConstraintSet(d.currentConstraints())
|
||||
|
||||
// Visit the AND expression
|
||||
d.Visit(andExpr)
|
||||
|
||||
// Restore constraints for next branch
|
||||
d.constraintStack[len(d.constraintStack)-1] = savedConstraints
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// VisitAndExpression handles AND logic (including implicit AND)
|
||||
func (d *LogicalContradictionDetector) VisitAndExpression(ctx *grammar.AndExpressionContext) interface{} {
|
||||
unaryExpressions := ctx.AllUnaryExpression()
|
||||
|
||||
// Visit each unary expression, accumulating constraints
|
||||
for _, unaryExpr := range unaryExpressions {
|
||||
d.Visit(unaryExpr)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// VisitUnaryExpression handles NOT operator
|
||||
func (d *LogicalContradictionDetector) VisitUnaryExpression(ctx *grammar.UnaryExpressionContext) interface{} {
|
||||
hasNot := ctx.NOT() != nil
|
||||
|
||||
if hasNot {
|
||||
// Push new NOT context (toggle current value)
|
||||
d.pushNotContext(!d.inNotContext())
|
||||
}
|
||||
|
||||
result := d.Visit(ctx.Primary())
|
||||
|
||||
if hasNot {
|
||||
// Pop NOT context
|
||||
d.popNotContext()
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// VisitPrimary handles different primary expressions
|
||||
func (d *LogicalContradictionDetector) VisitPrimary(ctx *grammar.PrimaryContext) interface{} {
|
||||
if ctx.OrExpression() != nil {
|
||||
// Parenthesized expression
|
||||
// If we're in an AND context, we continue with the same constraint set
|
||||
// Otherwise, we need to handle it specially
|
||||
return d.Visit(ctx.OrExpression())
|
||||
} else if ctx.Comparison() != nil {
|
||||
return d.Visit(ctx.Comparison())
|
||||
} else if ctx.FunctionCall() != nil {
|
||||
// Handle function calls if needed
|
||||
return nil
|
||||
} else if ctx.FullText() != nil {
|
||||
// Handle full text search if needed
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// VisitComparison extracts constraints from comparisons
|
||||
func (d *LogicalContradictionDetector) VisitComparison(ctx *grammar.ComparisonContext) interface{} {
|
||||
if ctx.Key() == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
field := ctx.Key().GetText()
|
||||
notContext := d.inNotContext()
|
||||
|
||||
// Handle EXISTS
|
||||
if ctx.EXISTS() != nil {
|
||||
operator := qbtypes.FilterOperatorExists
|
||||
if ctx.NOT() != nil {
|
||||
operator = qbtypes.FilterOperatorNotExists
|
||||
}
|
||||
// Apply NOT context
|
||||
if notContext {
|
||||
operator = negateOperator(operator)
|
||||
}
|
||||
constraint := FieldConstraint{
|
||||
Field: field,
|
||||
Operator: operator,
|
||||
}
|
||||
d.addConstraint(constraint)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle IN/NOT IN
|
||||
if ctx.InClause() != nil {
|
||||
values := d.extractValueList(ctx.InClause().(*grammar.InClauseContext).ValueList())
|
||||
operator := qbtypes.FilterOperatorIn
|
||||
if notContext {
|
||||
operator = negateOperator(operator)
|
||||
}
|
||||
constraint := FieldConstraint{
|
||||
Field: field,
|
||||
Operator: operator,
|
||||
Values: values,
|
||||
}
|
||||
d.addConstraint(constraint)
|
||||
return nil
|
||||
}
|
||||
|
||||
if ctx.NotInClause() != nil {
|
||||
values := d.extractValueList(ctx.NotInClause().(*grammar.NotInClauseContext).ValueList())
|
||||
operator := qbtypes.FilterOperatorNotIn
|
||||
if notContext {
|
||||
operator = negateOperator(operator)
|
||||
}
|
||||
constraint := FieldConstraint{
|
||||
Field: field,
|
||||
Operator: operator,
|
||||
Values: values,
|
||||
}
|
||||
d.addConstraint(constraint)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle BETWEEN
|
||||
if ctx.BETWEEN() != nil {
|
||||
values := ctx.AllValue()
|
||||
if len(values) == 2 {
|
||||
val1 := d.extractValue(values[0])
|
||||
val2 := d.extractValue(values[1])
|
||||
operator := qbtypes.FilterOperatorBetween
|
||||
if ctx.NOT() != nil {
|
||||
operator = qbtypes.FilterOperatorNotBetween
|
||||
}
|
||||
// Apply NOT context
|
||||
if notContext {
|
||||
operator = negateOperator(operator)
|
||||
}
|
||||
constraint := FieldConstraint{
|
||||
Field: field,
|
||||
Operator: operator,
|
||||
Values: []interface{}{val1, val2},
|
||||
}
|
||||
d.addConstraint(constraint)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle regular comparisons
|
||||
values := ctx.AllValue()
|
||||
if len(values) > 0 {
|
||||
value := d.extractValue(values[0])
|
||||
var operator qbtypes.FilterOperator
|
||||
|
||||
if ctx.EQUALS() != nil {
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
} else if ctx.NOT_EQUALS() != nil || ctx.NEQ() != nil {
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
} else if ctx.LT() != nil {
|
||||
operator = qbtypes.FilterOperatorLessThan
|
||||
} else if ctx.LE() != nil {
|
||||
operator = qbtypes.FilterOperatorLessThanOrEq
|
||||
} else if ctx.GT() != nil {
|
||||
operator = qbtypes.FilterOperatorGreaterThan
|
||||
} else if ctx.GE() != nil {
|
||||
operator = qbtypes.FilterOperatorGreaterThanOrEq
|
||||
} else if ctx.LIKE() != nil {
|
||||
operator = qbtypes.FilterOperatorLike
|
||||
} else if ctx.ILIKE() != nil {
|
||||
operator = qbtypes.FilterOperatorILike
|
||||
} else if ctx.NOT_LIKE() != nil {
|
||||
operator = qbtypes.FilterOperatorNotLike
|
||||
} else if ctx.NOT_ILIKE() != nil {
|
||||
operator = qbtypes.FilterOperatorNotILike
|
||||
} else if ctx.REGEXP() != nil {
|
||||
operator = qbtypes.FilterOperatorRegexp
|
||||
if ctx.NOT() != nil {
|
||||
operator = qbtypes.FilterOperatorNotRegexp
|
||||
}
|
||||
} else if ctx.CONTAINS() != nil {
|
||||
operator = qbtypes.FilterOperatorContains
|
||||
if ctx.NOT() != nil {
|
||||
operator = qbtypes.FilterOperatorNotContains
|
||||
}
|
||||
}
|
||||
|
||||
if operator != qbtypes.FilterOperatorUnknown {
|
||||
// Apply NOT context if needed
|
||||
if notContext {
|
||||
operator = negateOperator(operator)
|
||||
}
|
||||
|
||||
constraint := FieldConstraint{
|
||||
Field: field,
|
||||
Operator: operator,
|
||||
Value: value,
|
||||
}
|
||||
d.addConstraint(constraint)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for contradictions after adding this constraint
|
||||
d.checkContradictions(d.currentConstraints())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// extractValue extracts the actual value from a ValueContext
|
||||
func (d *LogicalContradictionDetector) extractValue(ctx grammar.IValueContext) interface{} {
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
text := ctx.QUOTED_TEXT().GetText()
|
||||
// Remove quotes
|
||||
if len(text) >= 2 {
|
||||
return text[1 : len(text)-1]
|
||||
}
|
||||
return text
|
||||
} else if ctx.NUMBER() != nil {
|
||||
return ctx.NUMBER().GetText()
|
||||
} else if ctx.BOOL() != nil {
|
||||
return ctx.BOOL().GetText()
|
||||
} else if ctx.KEY() != nil {
|
||||
return ctx.KEY().GetText()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// extractValueList extracts values from a ValueListContext
|
||||
func (d *LogicalContradictionDetector) extractValueList(ctx grammar.IValueListContext) []interface{} {
|
||||
if ctx == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
values := []interface{}{}
|
||||
for _, val := range ctx.AllValue() {
|
||||
values = append(values, d.extractValue(val))
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
// addConstraint adds a constraint to the current set
|
||||
func (d *LogicalContradictionDetector) addConstraint(constraint FieldConstraint) {
|
||||
constraints := d.currentConstraints()
|
||||
|
||||
// For positive operators that imply existence, add an implicit EXISTS constraint
|
||||
// This mirrors the behavior of AddDefaultExistsFilter in the FilterOperator type
|
||||
if constraint.Operator.AddDefaultExistsFilter() && !isNegativeOperator(constraint.Operator) {
|
||||
// The field must exist for positive predicates
|
||||
// This helps detect contradictions like: field = "value" AND field NOT EXISTS
|
||||
existsConstraint := FieldConstraint{
|
||||
Field: constraint.Field,
|
||||
Operator: qbtypes.FilterOperatorExists,
|
||||
}
|
||||
constraints.Constraints[constraint.Field] = append(
|
||||
constraints.Constraints[constraint.Field],
|
||||
existsConstraint,
|
||||
)
|
||||
}
|
||||
|
||||
constraints.Constraints[constraint.Field] = append(
|
||||
constraints.Constraints[constraint.Field],
|
||||
constraint,
|
||||
)
|
||||
}
|
||||
|
||||
// checkContradictions checks the given constraint set for contradictions
|
||||
func (d *LogicalContradictionDetector) checkContradictions(constraintSet *ConstraintSet) {
|
||||
for field, constraints := range constraintSet.Constraints {
|
||||
if len(constraints) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for contradictions in this field's constraints
|
||||
contradictions := d.findContradictionsInConstraints(field, constraints)
|
||||
d.contradictions = append(d.contradictions, contradictions...)
|
||||
}
|
||||
}
|
||||
|
||||
// findContradictionsInConstraints checks if a set of constraints on the same field contradict
|
||||
func (d *LogicalContradictionDetector) findContradictionsInConstraints(field string, constraints []FieldConstraint) []string {
|
||||
contradictions := []string{}
|
||||
|
||||
// Group constraints by type for easier checking
|
||||
var equalConstraints []FieldConstraint
|
||||
var notEqualConstraints []FieldConstraint
|
||||
var rangeConstraints []FieldConstraint
|
||||
var inConstraints []FieldConstraint
|
||||
var notInConstraints []FieldConstraint
|
||||
var existsConstraints []FieldConstraint
|
||||
var notExistsConstraints []FieldConstraint
|
||||
var betweenConstraints []FieldConstraint
|
||||
var notBetweenConstraints []FieldConstraint
|
||||
var likeConstraints []FieldConstraint
|
||||
|
||||
for _, c := range constraints {
|
||||
switch c.Operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
equalConstraints = append(equalConstraints, c)
|
||||
case qbtypes.FilterOperatorNotEqual:
|
||||
notEqualConstraints = append(notEqualConstraints, c)
|
||||
case qbtypes.FilterOperatorIn:
|
||||
inConstraints = append(inConstraints, c)
|
||||
case qbtypes.FilterOperatorNotIn:
|
||||
notInConstraints = append(notInConstraints, c)
|
||||
case qbtypes.FilterOperatorExists:
|
||||
existsConstraints = append(existsConstraints, c)
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
notExistsConstraints = append(notExistsConstraints, c)
|
||||
case qbtypes.FilterOperatorBetween:
|
||||
betweenConstraints = append(betweenConstraints, c)
|
||||
case qbtypes.FilterOperatorNotBetween:
|
||||
notBetweenConstraints = append(notBetweenConstraints, c)
|
||||
case qbtypes.FilterOperatorLike, qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotLike, qbtypes.FilterOperatorNotILike:
|
||||
likeConstraints = append(likeConstraints, c)
|
||||
default:
|
||||
// Handle range operators
|
||||
if isRangeOperator(c.Operator) {
|
||||
rangeConstraints = append(rangeConstraints, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for multiple different equality constraints
|
||||
if len(equalConstraints) > 1 {
|
||||
values := make(map[string]bool)
|
||||
for _, c := range equalConstraints {
|
||||
values[fmt.Sprintf("%v", c.Value)] = true
|
||||
}
|
||||
if len(values) > 1 {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' cannot equal multiple different values", field))
|
||||
}
|
||||
}
|
||||
|
||||
// Check equality vs not-equality
|
||||
for _, eq := range equalConstraints {
|
||||
for _, neq := range notEqualConstraints {
|
||||
if fmt.Sprintf("%v", eq.Value) == fmt.Sprintf("%v", neq.Value) {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' cannot both equal and not equal '%v'", field, eq.Value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check equality vs IN/NOT IN
|
||||
for _, eq := range equalConstraints {
|
||||
// Check against NOT IN
|
||||
for _, notIn := range notInConstraints {
|
||||
for _, v := range notIn.Values {
|
||||
if fmt.Sprintf("%v", eq.Value) == fmt.Sprintf("%v", v) {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' equals '%v' but is in NOT IN list", field, eq.Value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check against IN
|
||||
for _, in := range inConstraints {
|
||||
found := false
|
||||
for _, v := range in.Values {
|
||||
if fmt.Sprintf("%v", eq.Value) == fmt.Sprintf("%v", v) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' equals '%v' but is not in IN list", field, eq.Value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check IN vs NOT IN overlap
|
||||
for _, in := range inConstraints {
|
||||
for _, notIn := range notInConstraints {
|
||||
overlap := []string{}
|
||||
for _, inVal := range in.Values {
|
||||
for _, notInVal := range notIn.Values {
|
||||
if fmt.Sprintf("%v", inVal) == fmt.Sprintf("%v", notInVal) {
|
||||
overlap = append(overlap, fmt.Sprintf("%v", inVal))
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(overlap) > 0 {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' has overlapping IN and NOT IN values: %v", field, overlap))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check range contradictions
|
||||
if len(rangeConstraints) > 0 {
|
||||
if impossible := d.checkRangeContradictions(rangeConstraints); impossible {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' has contradictory range constraints", field))
|
||||
}
|
||||
}
|
||||
|
||||
// Check equality vs range
|
||||
for _, eq := range equalConstraints {
|
||||
if !d.valuesSatisfyRanges(eq.Value, rangeConstraints) {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' equals '%v' which violates range constraints", field, eq.Value))
|
||||
}
|
||||
}
|
||||
|
||||
// Check EXISTS contradictions
|
||||
if len(existsConstraints) > 0 && len(notExistsConstraints) > 0 {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' cannot both exist and not exist", field))
|
||||
}
|
||||
|
||||
// Check if NOT EXISTS contradicts with operators that imply existence
|
||||
if len(notExistsConstraints) > 0 {
|
||||
for _, c := range constraints {
|
||||
if c.Operator.AddDefaultExistsFilter() && !isNegativeOperator(c.Operator) {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' has NOT EXISTS but also has %v which implies existence",
|
||||
field, c.Operator))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check BETWEEN contradictions - need to check if ALL ranges have a common intersection
|
||||
if len(betweenConstraints) >= 2 {
|
||||
if !d.hasCommonIntersection(betweenConstraints) {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' has non-overlapping BETWEEN ranges", field))
|
||||
}
|
||||
}
|
||||
|
||||
// Check BETWEEN vs equality
|
||||
for _, eq := range equalConstraints {
|
||||
satisfiesAny := false
|
||||
for _, between := range betweenConstraints {
|
||||
if d.valueSatisfiesBetween(eq.Value, between) {
|
||||
satisfiesAny = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(betweenConstraints) > 0 && !satisfiesAny {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' equals '%v' which is outside BETWEEN range(s)", field, eq.Value))
|
||||
}
|
||||
}
|
||||
|
||||
// Check NOT BETWEEN vs equality
|
||||
for _, eq := range equalConstraints {
|
||||
for _, notBetween := range notBetweenConstraints {
|
||||
if d.valueSatisfiesBetween(eq.Value, notBetween) {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' equals '%v' which is excluded by NOT BETWEEN range", field, eq.Value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if BETWEEN and NOT BETWEEN ranges make it impossible to have any value
|
||||
if len(betweenConstraints) > 0 && len(notBetweenConstraints) > 0 {
|
||||
// Check if the NOT BETWEEN completely covers the BETWEEN range
|
||||
for _, between := range betweenConstraints {
|
||||
if len(between.Values) == 2 {
|
||||
bMin, err1 := parseNumericValue(between.Values[0])
|
||||
bMax, err2 := parseNumericValue(between.Values[1])
|
||||
if err1 == nil && err2 == nil {
|
||||
// Check if this BETWEEN range has any values not excluded by NOT BETWEEN
|
||||
hasValidValue := false
|
||||
// Simple check: see if the endpoints or midpoint are valid
|
||||
testValues := []float64{bMin, bMax, (bMin + bMax) / 2}
|
||||
for _, testVal := range testValues {
|
||||
valid := true
|
||||
for _, notBetween := range notBetweenConstraints {
|
||||
if d.valueSatisfiesBetween(testVal, notBetween) {
|
||||
valid = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if valid {
|
||||
hasValidValue = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !hasValidValue {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' has BETWEEN and NOT BETWEEN ranges that exclude all values", field))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check LIKE pattern contradictions with exact values
|
||||
for _, eq := range equalConstraints {
|
||||
for _, like := range likeConstraints {
|
||||
if like.Operator == qbtypes.FilterOperatorLike || like.Operator == qbtypes.FilterOperatorILike {
|
||||
pattern := fmt.Sprintf("%v", like.Value)
|
||||
value := fmt.Sprintf("%v", eq.Value)
|
||||
if !d.matchesLikePattern(value, pattern) {
|
||||
contradictions = append(contradictions,
|
||||
fmt.Sprintf("Field '%s' equals '%v' which doesn't match LIKE pattern '%v'",
|
||||
field, eq.Value, like.Value))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return contradictions
|
||||
}
|
||||
|
||||
// hasCommonIntersection checks if all BETWEEN ranges have a common intersection
|
||||
func (d *LogicalContradictionDetector) hasCommonIntersection(betweens []FieldConstraint) bool {
|
||||
if len(betweens) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Find the intersection of all ranges
|
||||
var intersectionMin, intersectionMax float64
|
||||
initialized := false
|
||||
|
||||
for _, b := range betweens {
|
||||
if len(b.Values) != 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
min, err1 := parseNumericValue(b.Values[0])
|
||||
max, err2 := parseNumericValue(b.Values[1])
|
||||
if err1 != nil || err2 != nil {
|
||||
continue // Skip non-numeric ranges
|
||||
}
|
||||
|
||||
if !initialized {
|
||||
intersectionMin = min
|
||||
intersectionMax = max
|
||||
initialized = true
|
||||
} else {
|
||||
// Update intersection
|
||||
if min > intersectionMin {
|
||||
intersectionMin = min
|
||||
}
|
||||
if max < intersectionMax {
|
||||
intersectionMax = max
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If intersection is empty, ranges don't all overlap
|
||||
return !initialized || intersectionMin <= intersectionMax
|
||||
}
|
||||
|
||||
// checkRangeContradictions checks if range constraints are satisfiable
|
||||
func (d *LogicalContradictionDetector) checkRangeContradictions(constraints []FieldConstraint) bool {
|
||||
// We need to find if there's any value that satisfies all constraints
|
||||
|
||||
var lowerBounds []struct {
|
||||
value float64
|
||||
inclusive bool
|
||||
}
|
||||
var upperBounds []struct {
|
||||
value float64
|
||||
inclusive bool
|
||||
}
|
||||
|
||||
for _, c := range constraints {
|
||||
val, err := parseNumericValue(c.Value)
|
||||
if err != nil {
|
||||
continue // Skip non-numeric values
|
||||
}
|
||||
|
||||
switch c.Operator {
|
||||
case qbtypes.FilterOperatorGreaterThan:
|
||||
lowerBounds = append(lowerBounds, struct {
|
||||
value float64
|
||||
inclusive bool
|
||||
}{val, false})
|
||||
case qbtypes.FilterOperatorGreaterThanOrEq:
|
||||
lowerBounds = append(lowerBounds, struct {
|
||||
value float64
|
||||
inclusive bool
|
||||
}{val, true})
|
||||
case qbtypes.FilterOperatorLessThan:
|
||||
upperBounds = append(upperBounds, struct {
|
||||
value float64
|
||||
inclusive bool
|
||||
}{val, false})
|
||||
case qbtypes.FilterOperatorLessThanOrEq:
|
||||
upperBounds = append(upperBounds, struct {
|
||||
value float64
|
||||
inclusive bool
|
||||
}{val, true})
|
||||
}
|
||||
}
|
||||
|
||||
// Find the most restrictive lower bound
|
||||
var effectiveLower *float64
|
||||
lowerInclusive := false
|
||||
for _, lb := range lowerBounds {
|
||||
if effectiveLower == nil || lb.value > *effectiveLower ||
|
||||
(lb.value == *effectiveLower && !lb.inclusive && lowerInclusive) {
|
||||
effectiveLower = &lb.value
|
||||
lowerInclusive = lb.inclusive
|
||||
}
|
||||
}
|
||||
|
||||
// Find the most restrictive upper bound
|
||||
var effectiveUpper *float64
|
||||
upperInclusive := false
|
||||
for _, ub := range upperBounds {
|
||||
if effectiveUpper == nil || ub.value < *effectiveUpper ||
|
||||
(ub.value == *effectiveUpper && !ub.inclusive && upperInclusive) {
|
||||
effectiveUpper = &ub.value
|
||||
upperInclusive = ub.inclusive
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we have both bounds and they're contradictory
|
||||
if effectiveLower != nil && effectiveUpper != nil {
|
||||
if *effectiveLower > *effectiveUpper {
|
||||
return true
|
||||
}
|
||||
if *effectiveLower == *effectiveUpper && (!lowerInclusive || !upperInclusive) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// valuesSatisfyRanges checks if a value satisfies all range constraints
|
||||
func (d *LogicalContradictionDetector) valuesSatisfyRanges(value interface{}, constraints []FieldConstraint) bool {
|
||||
val, err := parseNumericValue(value)
|
||||
if err != nil {
|
||||
return true // If not numeric, we can't check
|
||||
}
|
||||
|
||||
for _, c := range constraints {
|
||||
cVal, err := parseNumericValue(c.Value)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
switch c.Operator {
|
||||
case qbtypes.FilterOperatorGreaterThan:
|
||||
if val <= cVal {
|
||||
return false
|
||||
}
|
||||
case qbtypes.FilterOperatorGreaterThanOrEq:
|
||||
if val < cVal {
|
||||
return false
|
||||
}
|
||||
case qbtypes.FilterOperatorLessThan:
|
||||
if val >= cVal {
|
||||
return false
|
||||
}
|
||||
case qbtypes.FilterOperatorLessThanOrEq:
|
||||
if val > cVal {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// valueSatisfiesBetween checks if a value is within a BETWEEN range
|
||||
func (d *LogicalContradictionDetector) valueSatisfiesBetween(value interface{}, between FieldConstraint) bool {
|
||||
if len(between.Values) != 2 {
|
||||
return false
|
||||
}
|
||||
|
||||
val, err := parseNumericValue(value)
|
||||
if err != nil {
|
||||
return true // Can't check non-numeric
|
||||
}
|
||||
|
||||
min, err1 := parseNumericValue(between.Values[0])
|
||||
max, err2 := parseNumericValue(between.Values[1])
|
||||
|
||||
if err1 != nil || err2 != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
return val >= min && val <= max
|
||||
}
|
||||
|
||||
// matchesLikePattern is a simple pattern matcher for LIKE
|
||||
func (d *LogicalContradictionDetector) matchesLikePattern(value, pattern string) bool {
|
||||
// Simple implementation - just check prefix/suffix with %
|
||||
if strings.HasPrefix(pattern, "%") && strings.HasSuffix(pattern, "%") {
|
||||
return strings.Contains(value, pattern[1:len(pattern)-1])
|
||||
} else if strings.HasPrefix(pattern, "%") {
|
||||
return strings.HasSuffix(value, pattern[1:])
|
||||
} else if strings.HasSuffix(pattern, "%") {
|
||||
return strings.HasPrefix(value, pattern[:len(pattern)-1])
|
||||
}
|
||||
return value == pattern
|
||||
}
|
||||
|
||||
// cloneConstraintSet creates a deep copy of a constraint set
|
||||
func (d *LogicalContradictionDetector) cloneConstraintSet(set *ConstraintSet) *ConstraintSet {
|
||||
newSet := &ConstraintSet{
|
||||
Constraints: make(map[string][]FieldConstraint),
|
||||
}
|
||||
|
||||
for field, constraints := range set.Constraints {
|
||||
newConstraints := make([]FieldConstraint, len(constraints))
|
||||
copy(newConstraints, constraints)
|
||||
newSet.Constraints[field] = newConstraints
|
||||
}
|
||||
|
||||
return newSet
|
||||
}
|
||||
|
||||
// parseNumericValue attempts to parse a value as a number
|
||||
func parseNumericValue(value interface{}) (float64, error) {
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
return v, nil
|
||||
case int:
|
||||
return float64(v), nil
|
||||
case string:
|
||||
return strconv.ParseFloat(v, 64)
|
||||
default:
|
||||
return 0, fmt.Errorf("not a numeric value")
|
||||
}
|
||||
}
|
||||
|
||||
// negateOperator returns the negated version of an operator
|
||||
func negateOperator(op qbtypes.FilterOperator) qbtypes.FilterOperator {
|
||||
switch op {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return qbtypes.FilterOperatorNotEqual
|
||||
case qbtypes.FilterOperatorNotEqual:
|
||||
return qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorLessThan:
|
||||
return qbtypes.FilterOperatorGreaterThanOrEq
|
||||
case qbtypes.FilterOperatorLessThanOrEq:
|
||||
return qbtypes.FilterOperatorGreaterThan
|
||||
case qbtypes.FilterOperatorGreaterThan:
|
||||
return qbtypes.FilterOperatorLessThanOrEq
|
||||
case qbtypes.FilterOperatorGreaterThanOrEq:
|
||||
return qbtypes.FilterOperatorLessThan
|
||||
case qbtypes.FilterOperatorIn:
|
||||
return qbtypes.FilterOperatorNotIn
|
||||
case qbtypes.FilterOperatorNotIn:
|
||||
return qbtypes.FilterOperatorIn
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return qbtypes.FilterOperatorNotExists
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return qbtypes.FilterOperatorExists
|
||||
case qbtypes.FilterOperatorLike:
|
||||
return qbtypes.FilterOperatorNotLike
|
||||
case qbtypes.FilterOperatorNotLike:
|
||||
return qbtypes.FilterOperatorLike
|
||||
case qbtypes.FilterOperatorILike:
|
||||
return qbtypes.FilterOperatorNotILike
|
||||
case qbtypes.FilterOperatorNotILike:
|
||||
return qbtypes.FilterOperatorILike
|
||||
case qbtypes.FilterOperatorBetween:
|
||||
return qbtypes.FilterOperatorNotBetween
|
||||
case qbtypes.FilterOperatorNotBetween:
|
||||
return qbtypes.FilterOperatorBetween
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
return qbtypes.FilterOperatorNotRegexp
|
||||
case qbtypes.FilterOperatorNotRegexp:
|
||||
return qbtypes.FilterOperatorRegexp
|
||||
case qbtypes.FilterOperatorContains:
|
||||
return qbtypes.FilterOperatorNotContains
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return qbtypes.FilterOperatorContains
|
||||
default:
|
||||
return op
|
||||
}
|
||||
}
|
||||
|
||||
// isRangeOperator returns true if the operator is a range comparison operator
|
||||
func isRangeOperator(op qbtypes.FilterOperator) bool {
|
||||
switch op {
|
||||
case qbtypes.FilterOperatorLessThan,
|
||||
qbtypes.FilterOperatorLessThanOrEq,
|
||||
qbtypes.FilterOperatorGreaterThan,
|
||||
qbtypes.FilterOperatorGreaterThanOrEq:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// isNegativeOperator returns true if the operator is a negative/exclusion operator
|
||||
func isNegativeOperator(op qbtypes.FilterOperator) bool {
|
||||
switch op {
|
||||
case qbtypes.FilterOperatorNotEqual,
|
||||
qbtypes.FilterOperatorNotIn,
|
||||
qbtypes.FilterOperatorNotExists,
|
||||
qbtypes.FilterOperatorNotLike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorNotBetween,
|
||||
qbtypes.FilterOperatorNotRegexp,
|
||||
qbtypes.FilterOperatorNotContains:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
395
pkg/querybuilder/never_true_test.go
Normal file
395
pkg/querybuilder/never_true_test.go
Normal file
@@ -0,0 +1,395 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestContradictionDetection(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
hasContradiction bool
|
||||
expectedErrors []string
|
||||
}{
|
||||
{
|
||||
name: "Simple equality contradiction",
|
||||
query: `service.name = 'redis' service.name='route' http.status_code=200`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name"},
|
||||
},
|
||||
{
|
||||
name: "Equal and not equal same value",
|
||||
query: `service.name = 'redis' AND service.name != 'redis'`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name"},
|
||||
},
|
||||
{
|
||||
name: "Range contradiction",
|
||||
query: `http.status_code > 500 AND http.status_code < 400`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"http.status_code"},
|
||||
},
|
||||
{
|
||||
name: "IN and NOT IN overlap",
|
||||
query: `service.name IN ('redis', 'mysql') AND service.name NOT IN ('redis', 'postgres')`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name"},
|
||||
},
|
||||
{
|
||||
name: "EXISTS and NOT EXISTS",
|
||||
query: `custom.tag EXISTS AND custom.tag NOT EXISTS`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"custom.tag"},
|
||||
},
|
||||
{
|
||||
name: "Equal and NOT IN containing value",
|
||||
query: `service.name = 'redis' AND service.name NOT IN ('redis', 'mysql')`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name"},
|
||||
},
|
||||
{
|
||||
name: "Non-overlapping BETWEEN ranges",
|
||||
query: `http.status_code BETWEEN 200 AND 299 AND http.status_code BETWEEN 400 AND 499`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"http.status_code"},
|
||||
},
|
||||
{
|
||||
name: "Valid query with no contradictions",
|
||||
query: `service.name = 'redis' AND http.status_code >= 200 AND http.status_code < 300`,
|
||||
hasContradiction: false,
|
||||
expectedErrors: []string{},
|
||||
},
|
||||
{
|
||||
name: "OR expression - no contradiction",
|
||||
query: `service.name = 'redis' OR service.name = 'mysql'`,
|
||||
hasContradiction: false,
|
||||
expectedErrors: []string{},
|
||||
},
|
||||
{
|
||||
name: "Complex valid query",
|
||||
query: `(service.name = 'redis' OR service.name = 'mysql') AND http.status_code = 200`,
|
||||
hasContradiction: false,
|
||||
expectedErrors: []string{},
|
||||
},
|
||||
{
|
||||
name: "Negated contradiction",
|
||||
query: `NOT (service.name = 'redis') AND service.name = 'redis'`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name"},
|
||||
},
|
||||
{
|
||||
name: "Multiple field contradictions",
|
||||
query: `service.name = 'redis' AND service.name = 'mysql' AND http.status_code = 200 AND http.status_code = 404`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name", "http.status_code"},
|
||||
},
|
||||
{
|
||||
name: "Implicit AND with contradiction",
|
||||
query: `service.name='redis' service.name='mysql'`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name"},
|
||||
},
|
||||
{
|
||||
name: "Equal with incompatible range",
|
||||
query: `http.status_code = 200 AND http.status_code > 300`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"http.status_code"},
|
||||
},
|
||||
{
|
||||
name: "Complex nested contradiction",
|
||||
query: `(service.name = 'redis' AND http.status_code = 200) AND (service.name = 'mysql' AND http.status_code = 200)`,
|
||||
hasContradiction: true,
|
||||
expectedErrors: []string{"service.name"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
contradictions, err := DetectContradictions(tt.query)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
hasContradiction := len(contradictions) > 0
|
||||
|
||||
if hasContradiction != tt.hasContradiction {
|
||||
t.Errorf("expected hasContradiction=%v, got %v. Contradictions: %v",
|
||||
tt.hasContradiction, hasContradiction, contradictions)
|
||||
}
|
||||
|
||||
if tt.hasContradiction {
|
||||
// Check that we found contradictions for expected fields
|
||||
for _, expectedField := range tt.expectedErrors {
|
||||
found := false
|
||||
for _, contradiction := range contradictions {
|
||||
if strings.Contains(contradiction, expectedField) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("expected contradiction for field %s, but not found. Got: %v",
|
||||
expectedField, contradictions)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestComplexNestedContradictions(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
hasContradiction bool
|
||||
expectedFields []string
|
||||
description string
|
||||
}{
|
||||
// Complex nested AND/OR combinations
|
||||
{
|
||||
name: "Nested AND with contradiction in inner expression",
|
||||
query: `(service.name = 'redis' AND http.status_code = 200) AND (service.name = 'mysql' AND http.status_code = 200)`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"service.name"},
|
||||
description: "Inner ANDs both valid, but combined they contradict on service.name",
|
||||
},
|
||||
{
|
||||
name: "OR with contradictory AND branches - no contradiction",
|
||||
query: `(service.name = 'redis' AND service.name = 'mysql') OR (http.status_code = 200)`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"service.name"},
|
||||
description: "First branch impossible",
|
||||
},
|
||||
{
|
||||
name: "Deeply nested contradiction",
|
||||
query: `((service.name = 'redis' AND (http.status_code > 200 AND http.status_code < 200)) AND region = 'us-east')`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"http.status_code"},
|
||||
description: "Nested impossible range condition",
|
||||
},
|
||||
{
|
||||
name: "Multiple field contradictions in nested structure",
|
||||
query: `(service.name = 'redis' AND service.name != 'redis') AND (http.status_code = 200 AND http.status_code = 404)`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"service.name", "http.status_code"},
|
||||
description: "Both nested expressions have contradictions",
|
||||
},
|
||||
|
||||
// Complex BETWEEN contradictions
|
||||
{
|
||||
name: "BETWEEN with overlapping ranges - valid",
|
||||
query: `http.status_code BETWEEN 200 AND 299 AND http.status_code BETWEEN 250 AND 350`,
|
||||
hasContradiction: false,
|
||||
expectedFields: []string{},
|
||||
description: "Ranges overlap at 250-299, so valid",
|
||||
},
|
||||
{
|
||||
name: "BETWEEN with exact value outside range",
|
||||
query: `http.status_code = 500 AND http.status_code BETWEEN 200 AND 299`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"http.status_code"},
|
||||
description: "Exact value outside BETWEEN range",
|
||||
},
|
||||
{
|
||||
name: "Multiple BETWEEN with no overlap",
|
||||
query: `(latency BETWEEN 100 AND 200) AND (latency BETWEEN 300 AND 400) AND (latency BETWEEN 500 AND 600)`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"latency"},
|
||||
description: "Three non-overlapping ranges",
|
||||
},
|
||||
|
||||
// Complex IN/NOT IN combinations
|
||||
{
|
||||
name: "IN with nested NOT IN contradiction",
|
||||
query: `service.name IN ('redis', 'mysql', 'postgres') AND (service.name NOT IN ('mysql', 'postgres') AND service.name NOT IN ('redis'))`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"service.name"},
|
||||
description: "Combined NOT IN excludes all values from IN",
|
||||
},
|
||||
{
|
||||
name: "Complex valid IN/NOT IN",
|
||||
query: `service.name IN ('redis', 'mysql', 'postgres') AND service.name NOT IN ('mongodb', 'cassandra')`,
|
||||
hasContradiction: false,
|
||||
expectedFields: []string{},
|
||||
description: "Non-overlapping IN and NOT IN lists",
|
||||
},
|
||||
|
||||
// Implicit AND with complex expressions
|
||||
{
|
||||
name: "Implicit AND with nested contradiction",
|
||||
query: `service.name='redis' (http.status_code > 500 http.status_code < 400)`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"http.status_code"},
|
||||
description: "Implicit AND creates impossible range",
|
||||
},
|
||||
{
|
||||
name: "Mixed implicit and explicit AND",
|
||||
query: `service.name='redis' service.name='mysql' AND http.status_code=200`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"service.name"},
|
||||
description: "Implicit AND between service names creates contradiction",
|
||||
},
|
||||
|
||||
// NOT operator complexities
|
||||
{
|
||||
name: "Double negation with contradiction",
|
||||
query: `NOT (NOT (service.name = 'redis')) AND service.name = 'mysql'`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"service.name"},
|
||||
description: "Double NOT cancels out, creating contradiction",
|
||||
},
|
||||
|
||||
// Range conditions with multiple operators
|
||||
{
|
||||
name: "Chained range conditions creating impossible range",
|
||||
query: `value > 100 AND value < 200 AND value > 300 AND value < 400`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"value"},
|
||||
description: "Multiple ranges that cannot be satisfied simultaneously",
|
||||
},
|
||||
{
|
||||
name: "Valid narrowing range",
|
||||
query: `value > 100 AND value < 400 AND value > 200 AND value < 300`,
|
||||
hasContradiction: false,
|
||||
expectedFields: []string{},
|
||||
description: "Ranges narrow down to valid 200-300 range",
|
||||
},
|
||||
|
||||
// Mixed operator types
|
||||
{
|
||||
name: "LIKE pattern with exact value contradiction",
|
||||
query: `service.name = 'redis-cache-01' AND service.name LIKE 'mysql%'`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"service.name"},
|
||||
description: "Exact value doesn't match LIKE pattern",
|
||||
},
|
||||
{
|
||||
name: "EXISTS with value contradiction",
|
||||
query: `custom.tag EXISTS AND custom.tag = 'value' AND custom.tag NOT EXISTS`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"custom.tag"},
|
||||
description: "Field both exists with value and doesn't exist",
|
||||
},
|
||||
|
||||
// Edge cases
|
||||
{
|
||||
name: "Same field different types",
|
||||
query: `http.status_code = '200' AND http.status_code = 200`,
|
||||
hasContradiction: false, // Depends on type coercion
|
||||
expectedFields: []string{},
|
||||
description: "Same value different types - implementation dependent",
|
||||
},
|
||||
{
|
||||
name: "Complex parentheses with valid expression",
|
||||
query: `((((service.name = 'redis')))) AND ((((http.status_code = 200))))`,
|
||||
hasContradiction: false,
|
||||
expectedFields: []string{},
|
||||
description: "Multiple parentheses levels but valid expression",
|
||||
},
|
||||
|
||||
// Real-world complex scenarios
|
||||
{
|
||||
name: "Monitoring query with impossible conditions",
|
||||
query: `service.name = 'api-gateway' AND
|
||||
http.status_code >= 500 AND
|
||||
http.status_code < 500 AND
|
||||
region IN ('us-east-1', 'us-west-2') AND
|
||||
region NOT IN ('us-east-1', 'us-west-2', 'eu-west-1')`,
|
||||
hasContradiction: true,
|
||||
expectedFields: []string{"http.status_code", "region"},
|
||||
description: "Multiple contradictions in monitoring query",
|
||||
},
|
||||
{
|
||||
name: "Valid complex monitoring query",
|
||||
query: `(service.name = 'api-gateway' OR service.name = 'web-server') AND
|
||||
http.status_code >= 400 AND
|
||||
http.status_code < 500 AND
|
||||
region IN ('us-east-1', 'us-west-2') AND
|
||||
latency > 1000`,
|
||||
hasContradiction: false,
|
||||
expectedFields: []string{},
|
||||
description: "Complex but valid monitoring conditions",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
contradictions, err := DetectContradictions(tt.query)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
hasContradiction := len(contradictions) > 0
|
||||
|
||||
if hasContradiction != tt.hasContradiction {
|
||||
t.Errorf("Test: %s\nDescription: %s\nExpected hasContradiction=%v, got %v\nContradictions: %v",
|
||||
tt.name, tt.description, tt.hasContradiction, hasContradiction, contradictions)
|
||||
}
|
||||
|
||||
if tt.hasContradiction {
|
||||
// Check that we found contradictions for expected fields
|
||||
for _, expectedField := range tt.expectedFields {
|
||||
found := false
|
||||
for _, contradiction := range contradictions {
|
||||
if strings.Contains(contradiction, expectedField) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Test: %s\nExpected contradiction for field %s, but not found.\nGot: %v",
|
||||
tt.name, expectedField, contradictions)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpressionLevelHandling(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
hasContradiction bool
|
||||
description string
|
||||
}{
|
||||
{
|
||||
name: "OR at top level - no contradiction",
|
||||
query: `service.name = 'redis' OR service.name = 'mysql'`,
|
||||
hasContradiction: false,
|
||||
description: "Top level OR should not check for contradictions",
|
||||
},
|
||||
{
|
||||
name: "AND within OR - contradiction only in AND branch",
|
||||
query: `(service.name = 'redis' AND service.name = 'mysql') OR http.status_code = 200`,
|
||||
hasContradiction: true,
|
||||
description: "Contradiction in one OR branch doesn't make whole expression contradictory",
|
||||
},
|
||||
{
|
||||
name: "Nested OR within AND - valid",
|
||||
query: `http.status_code = 200 AND (service.name = 'redis' OR service.name = 'mysql')`,
|
||||
hasContradiction: false,
|
||||
description: "OR within AND is valid",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
contradictions, err := DetectContradictions(tt.query)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
hasContradiction := len(contradictions) > 0
|
||||
|
||||
if hasContradiction != tt.hasContradiction {
|
||||
t.Errorf("Test: %s\nDescription: %s\nExpected hasContradiction=%v, got %v\nContradictions: %v",
|
||||
tt.name, tt.description, tt.hasContradiction, hasContradiction, contradictions)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -95,7 +95,6 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
||||
end uint64,
|
||||
requestType qbtypes.RequestType,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
config, exists := signalConfigs[b.signal]
|
||||
if !exists {
|
||||
@@ -112,7 +111,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := b.addConditions(ctx, q, start, end, query, keys, variables); err != nil {
|
||||
if err := b.addConditions(ctx, q, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -130,7 +129,6 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) error {
|
||||
// Add filter condition if present
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
@@ -141,7 +139,6 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
ConditionBuilder: b.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
SkipFullTextFilter: true,
|
||||
Variables: variables,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -1,16 +1,10 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"math"
|
||||
)
|
||||
import "math"
|
||||
|
||||
const (
|
||||
NsToSeconds = 1000000000
|
||||
BucketAdjustment = 1800 // 30 minutes
|
||||
|
||||
RecommendedNumberOfPoints = 300
|
||||
MaxAllowedNumberofPoints = 1500
|
||||
MaxAllowedSeries = 3000
|
||||
)
|
||||
|
||||
// ToNanoSecs takes epoch and returns it in ns
|
||||
@@ -27,61 +21,3 @@ func ToNanoSecs(epoch uint64) uint64 {
|
||||
}
|
||||
return temp * uint64(math.Pow(10, float64(19-count)))
|
||||
}
|
||||
|
||||
func RecommendedStepInterval(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / RecommendedNumberOfPoints / 1e9
|
||||
|
||||
if step < 5 {
|
||||
return 5
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 5
|
||||
return step - step%5
|
||||
}
|
||||
|
||||
func MinAllowedStepInterval(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / MaxAllowedNumberofPoints / 1e9
|
||||
|
||||
if step < 5 {
|
||||
return 5
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 5
|
||||
return step - step%5
|
||||
}
|
||||
|
||||
func RecommendedStepIntervalForMetric(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / RecommendedNumberOfPoints / 1e9
|
||||
|
||||
// TODO: make this make use of the reporting frequency and remove the hard coded step
|
||||
if step < 60 {
|
||||
return 60
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 60
|
||||
return step - step%60
|
||||
}
|
||||
|
||||
func MinAllowedStepIntervalForMetric(start, end uint64) uint64 {
|
||||
start = ToNanoSecs(start)
|
||||
end = ToNanoSecs(end)
|
||||
|
||||
step := (end - start) / RecommendedNumberOfPoints / 1e9
|
||||
|
||||
// TODO: make this make use of the reporting frequency and remove the hard coded step
|
||||
if step < 60 {
|
||||
return 60
|
||||
}
|
||||
|
||||
// return the nearest lower multiple of 60
|
||||
return step - step%60
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user