Compare commits
33 Commits
v0.49.0-cl
...
v0.49.0-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
02106277a6 | ||
|
|
b34509215e | ||
|
|
fd603b8fdf | ||
|
|
c5d23336a7 | ||
|
|
53c6288025 | ||
|
|
4f2c314f39 | ||
|
|
1ad61615c6 | ||
|
|
7ddfadfb18 | ||
|
|
a7e02af8b0 | ||
|
|
da3f6fd7fd | ||
|
|
a453471b51 | ||
|
|
13df87ed69 | ||
|
|
f23ceea54e | ||
|
|
46b4c8a004 | ||
|
|
580198ca7a | ||
|
|
2fb5b16840 | ||
|
|
de571aa69a | ||
|
|
daa5a05677 | ||
|
|
4f69996b9d | ||
|
|
6c402d9e46 | ||
|
|
51032f6caa | ||
|
|
41f91db622 | ||
|
|
52e0303997 | ||
|
|
5df25e83d1 | ||
|
|
873280abea | ||
|
|
8ccdc71eaf | ||
|
|
d5f156a6e9 | ||
|
|
cc7559ddee | ||
|
|
415057c260 | ||
|
|
89b67b8880 | ||
|
|
878cb7c0a6 | ||
|
|
0375fc47a7 | ||
|
|
a7a160df76 |
1
.github/workflows/push.yaml
vendored
1
.github/workflows/push.yaml
vendored
@@ -158,6 +158,7 @@ jobs:
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
- name: Install dependencies
|
||||
working-directory: frontend
|
||||
run: yarn install
|
||||
|
||||
@@ -347,7 +347,7 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
|
||||
```bash
|
||||
kubectl -n sample-application run strzal --image=djbingham/curl \
|
||||
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
|
||||
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
|
||||
'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm
|
||||
```
|
||||
|
||||
**5.1.3 To stop the load generation:**
|
||||
|
||||
1
Makefile
1
Makefile
@@ -188,3 +188,4 @@ test:
|
||||
go test ./pkg/query-service/tests/integration/...
|
||||
go test ./pkg/query-service/rules/...
|
||||
go test ./pkg/query-service/collectorsimulator/...
|
||||
go test ./pkg/query-service/postprocess/...
|
||||
|
||||
@@ -389,7 +389,7 @@ trap bye EXIT
|
||||
|
||||
URL="https://api.segment.io/v1/track"
|
||||
HEADER_1="Content-Type: application/json"
|
||||
HEADER_2="Authorization: Basic NEdtb2E0aXhKQVVIeDJCcEp4c2p3QTFiRWZud0VlUno6"
|
||||
HEADER_2="Authorization: Basic OWtScko3b1BDR1BFSkxGNlFqTVBMdDVibGpGaFJRQnI="
|
||||
|
||||
send_event() {
|
||||
error=""
|
||||
|
||||
@@ -24,7 +24,6 @@ import (
|
||||
type APIHandlerOptions struct {
|
||||
DataConnector interfaces.DataConnector
|
||||
SkipConfig *basemodel.SkipConfig
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
@@ -53,7 +52,6 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
SkipConfig: opts.SkipConfig,
|
||||
PerferDelta: opts.PreferDelta,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
MaxIdleConns: opts.MaxIdleConns,
|
||||
MaxOpenConns: opts.MaxOpenConns,
|
||||
|
||||
@@ -64,7 +64,6 @@ type ServerOptions struct {
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
@@ -256,7 +255,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
DataConnector: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PreferDelta: serverOptions.PreferDelta,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
MaxIdleConns: serverOptions.MaxIdleConns,
|
||||
MaxOpenConns: serverOptions.MaxOpenConns,
|
||||
|
||||
@@ -89,7 +89,6 @@ func main() {
|
||||
|
||||
var cacheConfigPath, fluxInterval string
|
||||
var enableQueryServiceLogOTLPExport bool
|
||||
var preferDelta bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
@@ -100,14 +99,13 @@ func main() {
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
|
||||
@@ -125,7 +123,6 @@ func main() {
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferDelta: preferDelta,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
|
||||
@@ -88,6 +88,7 @@
|
||||
"lucide-react": "0.379.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.140.1",
|
||||
"rc-tween-one": "3.0.6",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
|
||||
@@ -17,6 +17,7 @@ import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import { identity, pick, pickBy } from 'lodash-es';
|
||||
import posthog from 'posthog-js';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useEffect, useState } from 'react';
|
||||
@@ -38,7 +39,7 @@ import defaultRoutes, {
|
||||
|
||||
function App(): JSX.Element {
|
||||
const themeConfig = useThemeConfig();
|
||||
const { data } = useLicense();
|
||||
const { data: licenseData } = useLicense();
|
||||
const [routes, setRoutes] = useState<AppRoutes[]>(defaultRoutes);
|
||||
const { role, isLoggedIn: isLoggedInState, user, org } = useSelector<
|
||||
AppState,
|
||||
@@ -92,10 +93,10 @@ function App(): JSX.Element {
|
||||
});
|
||||
|
||||
const isOnBasicPlan =
|
||||
data?.payload?.licenses?.some(
|
||||
licenseData?.payload?.licenses?.some(
|
||||
(license) =>
|
||||
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
|
||||
) || data?.payload?.licenses === null;
|
||||
) || licenseData?.payload?.licenses === null;
|
||||
|
||||
const enableAnalytics = (user: User): void => {
|
||||
const orgName =
|
||||
@@ -112,9 +113,7 @@ function App(): JSX.Element {
|
||||
};
|
||||
|
||||
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
|
||||
|
||||
const domain = extractDomain(email);
|
||||
|
||||
const hostNameParts = hostname.split('.');
|
||||
|
||||
const groupTraits = {
|
||||
@@ -127,10 +126,30 @@ function App(): JSX.Element {
|
||||
};
|
||||
|
||||
window.analytics.identify(email, sanitizedIdentifyPayload);
|
||||
|
||||
window.analytics.group(domain, groupTraits);
|
||||
|
||||
window.clarity('identify', email, name);
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
name,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
posthog?.group('company', domain, {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
@@ -144,10 +163,6 @@ function App(): JSX.Element {
|
||||
!isIdentifiedUser
|
||||
) {
|
||||
setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true');
|
||||
|
||||
if (isCloudUserVal) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
@@ -195,6 +210,11 @@ function App(): JSX.Element {
|
||||
console.error('Failed to parse local storage theme analytics event');
|
||||
}
|
||||
}
|
||||
|
||||
if (isCloudUserVal && user && user.email) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [user]);
|
||||
|
||||
|
||||
@@ -5,7 +5,13 @@ import { Button, Dropdown, MenuProps } from 'antd';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useState } from 'react';
|
||||
|
||||
function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
|
||||
function DropDown({
|
||||
element,
|
||||
onDropDownItemClick,
|
||||
}: {
|
||||
element: JSX.Element[];
|
||||
onDropDownItemClick?: MenuProps['onClick'];
|
||||
}): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const items: MenuProps['items'] = element.map(
|
||||
@@ -23,6 +29,7 @@ function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
|
||||
items,
|
||||
onMouseEnter: (): void => setDdOpen(true),
|
||||
onMouseLeave: (): void => setDdOpen(false),
|
||||
onClick: (item): void => onDropDownItemClick?.(item),
|
||||
}}
|
||||
open={isDdOpen}
|
||||
>
|
||||
@@ -40,4 +47,8 @@ function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
DropDown.defaultProps = {
|
||||
onDropDownItemClick: (): void => {},
|
||||
};
|
||||
|
||||
export default DropDown;
|
||||
|
||||
@@ -62,8 +62,6 @@ function RawLogView({
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
|
||||
|
||||
const severityText = data.severity_text ? `${data.severity_text} |` : '';
|
||||
|
||||
const logType = getLogIndicatorType(data);
|
||||
|
||||
const updatedSelecedFields = useMemo(
|
||||
@@ -88,17 +86,16 @@ function RawLogView({
|
||||
attributesText += ' | ';
|
||||
}
|
||||
|
||||
const text = useMemo(
|
||||
() =>
|
||||
const text = useMemo(() => {
|
||||
const date =
|
||||
typeof data.timestamp === 'string'
|
||||
? `${dayjs(data.timestamp).format(
|
||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
||||
)} | ${attributesText} ${severityText} ${data.body}`
|
||||
: `${dayjs(data.timestamp / 1e6).format(
|
||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
||||
)} | ${attributesText} ${severityText} ${data.body}`,
|
||||
[data.timestamp, data.body, severityText, attributesText],
|
||||
);
|
||||
? dayjs(data.timestamp)
|
||||
: dayjs(data.timestamp / 1e6);
|
||||
|
||||
return `${date.format('YYYY-MM-DD HH:mm:ss.SSS')} | ${attributesText} ${
|
||||
data.body
|
||||
}`;
|
||||
}, [data.timestamp, data.body, attributesText]);
|
||||
|
||||
const handleClickExpand = useCallback(() => {
|
||||
if (activeContextLog || isReadOnly) return;
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
import './DynamicColumnTable.syles.scss';
|
||||
|
||||
import { Button, Dropdown, Flex, MenuProps, Switch } from 'antd';
|
||||
import { ColumnGroupType, ColumnType } from 'antd/es/table';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||
import { SlidersHorizontal } from 'lucide-react';
|
||||
import { memo, useEffect, useState } from 'react';
|
||||
@@ -22,6 +24,7 @@ function DynamicColumnTable({
|
||||
dynamicColumns,
|
||||
onDragColumn,
|
||||
facingIssueBtn,
|
||||
shouldSendAlertsLogEvent,
|
||||
...restProps
|
||||
}: DynamicColumnTableProps): JSX.Element {
|
||||
const [columnsData, setColumnsData] = useState<ColumnsType | undefined>(
|
||||
@@ -47,11 +50,18 @@ function DynamicColumnTable({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [columns, dynamicColumns]);
|
||||
|
||||
const onToggleHandler = (index: number) => (
|
||||
checked: boolean,
|
||||
event: React.MouseEvent<HTMLButtonElement>,
|
||||
): void => {
|
||||
const onToggleHandler = (
|
||||
index: number,
|
||||
column: ColumnGroupType<any> | ColumnType<any>,
|
||||
) => (checked: boolean, event: React.MouseEvent<HTMLButtonElement>): void => {
|
||||
event.stopPropagation();
|
||||
|
||||
if (shouldSendAlertsLogEvent) {
|
||||
logEvent('Alert: Column toggled', {
|
||||
column: column?.title,
|
||||
action: checked ? 'Enable' : 'Disable',
|
||||
});
|
||||
}
|
||||
setVisibleColumns({
|
||||
tablesource,
|
||||
dynamicColumns,
|
||||
@@ -75,7 +85,7 @@ function DynamicColumnTable({
|
||||
<div>{column.title?.toString()}</div>
|
||||
<Switch
|
||||
checked={columnsData?.findIndex((c) => c.key === column.key) !== -1}
|
||||
onChange={onToggleHandler(index)}
|
||||
onChange={onToggleHandler(index, column)}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
|
||||
@@ -20,6 +20,7 @@ import { ResizeTableProps } from './types';
|
||||
function ResizeTable({
|
||||
columns,
|
||||
onDragColumn,
|
||||
pagination,
|
||||
...restProps
|
||||
}: ResizeTableProps): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
||||
@@ -63,8 +64,9 @@ function ResizeTable({
|
||||
...restProps,
|
||||
components: { header: { cell: ResizableHeader } },
|
||||
columns: mergedColumns,
|
||||
pagination: { ...pagination, hideOnSinglePage: true },
|
||||
}),
|
||||
[mergedColumns, restProps],
|
||||
[mergedColumns, pagination, restProps],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -14,6 +14,7 @@ export interface DynamicColumnTableProps extends TableProps<any> {
|
||||
dynamicColumns: TableProps<any>['columns'];
|
||||
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
||||
facingIssueBtn?: FacingIssueBtnProps;
|
||||
shouldSendAlertsLogEvent?: boolean;
|
||||
}
|
||||
|
||||
export type GetVisibleColumnsFunction = (
|
||||
|
||||
@@ -9,7 +9,6 @@ import { Tooltip } from 'antd';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useMemo } from 'react';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { style } from './constant';
|
||||
|
||||
@@ -64,7 +63,7 @@ function TextToolTip({
|
||||
);
|
||||
|
||||
return (
|
||||
<Tooltip getTooltipContainer={popupContainer} overlay={overlay}>
|
||||
<Tooltip overlay={overlay}>
|
||||
{useFilledIcon ? (
|
||||
<QuestionCircleFilled style={iconStyle} />
|
||||
) : (
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import getAll from 'api/channels/getAll';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import Spinner from 'components/Spinner';
|
||||
import TextToolTip from 'components/TextToolTip';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import useFetch from 'hooks/useFetch';
|
||||
import history from 'lib/history';
|
||||
import { useCallback } from 'react';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useCallback, useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -31,6 +33,14 @@ function AlertChannels(): JSX.Element {
|
||||
|
||||
const { loading, payload, error, errorMessage } = useFetch(getAll);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isUndefined(payload)) {
|
||||
logEvent('Alert Channel: Channel list page visited', {
|
||||
number: payload?.length,
|
||||
});
|
||||
}
|
||||
}, [payload]);
|
||||
|
||||
if (error) {
|
||||
return <Typography>{errorMessage}</Typography>;
|
||||
}
|
||||
|
||||
@@ -11,11 +11,12 @@ import testOpsGenie from 'api/channels/testOpsgenie';
|
||||
import testPagerApi from 'api/channels/testPager';
|
||||
import testSlackApi from 'api/channels/testSlack';
|
||||
import testWebhookApi from 'api/channels/testWebhook';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ROUTES from 'constants/routes';
|
||||
import FormAlertChannels from 'container/FormAlertChannels';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import {
|
||||
@@ -43,6 +44,10 @@ function CreateAlertChannels({
|
||||
|
||||
const [formInstance] = Form.useForm();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('Alert Channel: Create channel page visited', {});
|
||||
}, []);
|
||||
|
||||
const [selectedConfig, setSelectedConfig] = useState<
|
||||
Partial<
|
||||
SlackChannel &
|
||||
@@ -139,19 +144,25 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareSlackRequest, t, notifications]);
|
||||
|
||||
const prepareWebhookRequest = useCallback(() => {
|
||||
@@ -200,19 +211,25 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareWebhookRequest, t, notifications]);
|
||||
|
||||
const preparePagerRequest = useCallback(() => {
|
||||
@@ -245,8 +262,8 @@ function CreateAlertChannels({
|
||||
setSavingState(true);
|
||||
const request = preparePagerRequest();
|
||||
|
||||
if (request) {
|
||||
try {
|
||||
try {
|
||||
if (request) {
|
||||
const response = await createPagerApi(request);
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
@@ -255,20 +272,31 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
}
|
||||
} catch (e) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [t, notifications, preparePagerRequest]);
|
||||
|
||||
const prepareOpsgenieRequest = useCallback(
|
||||
@@ -295,19 +323,25 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareOpsgenieRequest, t, notifications]);
|
||||
|
||||
const prepareEmailRequest = useCallback(
|
||||
@@ -332,19 +366,25 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareEmailRequest, t, notifications]);
|
||||
|
||||
const prepareMsTeamsRequest = useCallback(
|
||||
@@ -370,19 +410,25 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareMsTeamsRequest, t, notifications]);
|
||||
|
||||
const onSaveHandler = useCallback(
|
||||
@@ -400,7 +446,15 @@ function CreateAlertChannels({
|
||||
const functionToCall = functionMapper[value as keyof typeof functionMapper];
|
||||
|
||||
if (functionToCall) {
|
||||
functionToCall();
|
||||
const result = await functionToCall();
|
||||
logEvent('Alert Channel: Save channel', {
|
||||
type: value,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'true',
|
||||
status: result?.status,
|
||||
statusMessage: result?.statusMessage,
|
||||
});
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
@@ -409,6 +463,7 @@ function CreateAlertChannels({
|
||||
}
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
onSlackHandler,
|
||||
onWebhookHandler,
|
||||
@@ -472,14 +527,25 @@ function CreateAlertChannels({
|
||||
description: t('channel_test_failed'),
|
||||
});
|
||||
}
|
||||
|
||||
logEvent('Alert Channel: Test notification', {
|
||||
type: channelType,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'true',
|
||||
status:
|
||||
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_test_unexpected'),
|
||||
});
|
||||
}
|
||||
|
||||
setTestingState(false);
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
prepareWebhookRequest,
|
||||
t,
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { Row, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
@@ -34,6 +36,13 @@ function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
logEvent('Alert: Sample alert link clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[option],
|
||||
link: url,
|
||||
page: 'New alert data source selection page',
|
||||
});
|
||||
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
const renderOptions = useMemo(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Form, Row } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import FormAlertRules from 'container/FormAlertRules';
|
||||
@@ -68,6 +69,8 @@ function CreateRules(): JSX.Element {
|
||||
useEffect(() => {
|
||||
if (alertType) {
|
||||
onSelectType(alertType);
|
||||
} else {
|
||||
logEvent('Alert: New alert data source selection page visited', {});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [alertType]);
|
||||
|
||||
@@ -11,6 +11,7 @@ import testOpsgenie from 'api/channels/testOpsgenie';
|
||||
import testPagerApi from 'api/channels/testPager';
|
||||
import testSlackApi from 'api/channels/testSlack';
|
||||
import testWebhookApi from 'api/channels/testWebhook';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
ChannelType,
|
||||
@@ -89,7 +90,7 @@ function EditAlertChannels({
|
||||
description: t('webhook_url_required'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return;
|
||||
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||
}
|
||||
|
||||
const response = await editSlackApi(prepareSlackRequest());
|
||||
@@ -101,13 +102,17 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareSlackRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const prepareWebhookRequest = useCallback(() => {
|
||||
@@ -136,13 +141,13 @@ function EditAlertChannels({
|
||||
if (selectedConfig?.api_url === '') {
|
||||
showError(t('webhook_url_required'));
|
||||
setSavingState(false);
|
||||
return;
|
||||
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||
}
|
||||
|
||||
if (username && (!password || password === '')) {
|
||||
showError(t('username_no_password'));
|
||||
setSavingState(false);
|
||||
return;
|
||||
return { status: 'failed', statusMessage: t('username_no_password') };
|
||||
}
|
||||
|
||||
const response = await editWebhookApi(prepareWebhookRequest());
|
||||
@@ -154,10 +159,15 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
showError(response.error || t('channel_edit_failed'));
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
}
|
||||
showError(response.error || t('channel_edit_failed'));
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareWebhookRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const prepareEmailRequest = useCallback(
|
||||
@@ -181,13 +191,18 @@ function EditAlertChannels({
|
||||
description: t('channel_edit_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareEmailRequest, t, notifications]);
|
||||
|
||||
const preparePagerRequest = useCallback(
|
||||
@@ -218,7 +233,7 @@ function EditAlertChannels({
|
||||
description: validationError,
|
||||
});
|
||||
setSavingState(false);
|
||||
return;
|
||||
return { status: 'failed', statusMessage: validationError };
|
||||
}
|
||||
const response = await editPagerApi(preparePagerRequest());
|
||||
|
||||
@@ -229,13 +244,18 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [preparePagerRequest, notifications, selectedConfig, t]);
|
||||
|
||||
const prepareOpsgenieRequest = useCallback(
|
||||
@@ -259,7 +279,7 @@ function EditAlertChannels({
|
||||
description: t('api_key_required'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return;
|
||||
return { status: 'failed', statusMessage: t('api_key_required') };
|
||||
}
|
||||
|
||||
const response = await editOpsgenie(prepareOpsgenieRequest());
|
||||
@@ -271,13 +291,18 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareOpsgenieRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const prepareMsTeamsRequest = useCallback(
|
||||
@@ -301,7 +326,7 @@ function EditAlertChannels({
|
||||
description: t('webhook_url_required'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return;
|
||||
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||
}
|
||||
|
||||
const response = await editMsTeamsApi(prepareMsTeamsRequest());
|
||||
@@ -313,31 +338,46 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareMsTeamsRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const onSaveHandler = useCallback(
|
||||
(value: ChannelType) => {
|
||||
async (value: ChannelType) => {
|
||||
let result;
|
||||
if (value === ChannelType.Slack) {
|
||||
onSlackEditHandler();
|
||||
result = await onSlackEditHandler();
|
||||
} else if (value === ChannelType.Webhook) {
|
||||
onWebhookEditHandler();
|
||||
result = await onWebhookEditHandler();
|
||||
} else if (value === ChannelType.Pagerduty) {
|
||||
onPagerEditHandler();
|
||||
result = await onPagerEditHandler();
|
||||
} else if (value === ChannelType.MsTeams) {
|
||||
onMsTeamsEditHandler();
|
||||
result = await onMsTeamsEditHandler();
|
||||
} else if (value === ChannelType.Opsgenie) {
|
||||
onOpsgenieEditHandler();
|
||||
result = await onOpsgenieEditHandler();
|
||||
} else if (value === ChannelType.Email) {
|
||||
onEmailEditHandler();
|
||||
result = await onEmailEditHandler();
|
||||
}
|
||||
logEvent('Alert Channel: Save channel', {
|
||||
type: value,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'false',
|
||||
status: result?.status,
|
||||
statusMessage: result?.statusMessage,
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
onSlackEditHandler,
|
||||
onWebhookEditHandler,
|
||||
@@ -399,6 +439,14 @@ function EditAlertChannels({
|
||||
description: t('channel_test_failed'),
|
||||
});
|
||||
}
|
||||
logEvent('Alert Channel: Test notification', {
|
||||
type: channelType,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'false',
|
||||
status:
|
||||
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
@@ -407,6 +455,7 @@ function EditAlertChannels({
|
||||
}
|
||||
setTestingState(false);
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
t,
|
||||
prepareWebhookRequest,
|
||||
|
||||
@@ -3,6 +3,8 @@ import './FormAlertRules.styles.scss';
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Button, Form, Select, Switch, Tooltip } from 'antd';
|
||||
import getChannels from 'api/channels/getAll';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import useFetch from 'hooks/useFetch';
|
||||
@@ -10,6 +12,7 @@ import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
import { AlertDef, Labels } from 'types/api/alerts/def';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
|
||||
@@ -73,9 +76,24 @@ function BasicInfo({
|
||||
|
||||
const noChannels = channels.payload?.length === 0;
|
||||
const handleCreateNewChannels = useCallback(() => {
|
||||
logEvent('Alert: Create notification channel button clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
ruleId: isNewRule ? 0 : alertDef?.id,
|
||||
});
|
||||
window.open(ROUTES.CHANNELS_NEW, '_blank');
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!channels.loading && isNewRule) {
|
||||
logEvent('Alert: New alert creation page visited', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
numberOfChannels: channels.payload?.length,
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [channels.payload, channels.loading]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading> {t('alert_form_step3')} </StepHeading>
|
||||
|
||||
@@ -2,6 +2,7 @@ import './QuerySection.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Tabs, Tooltip } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import PromQLIcon from 'assets/Dashboard/PromQl';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
@@ -31,6 +32,7 @@ function QuerySection({
|
||||
runQuery,
|
||||
alertDef,
|
||||
panelType,
|
||||
ruleId,
|
||||
}: QuerySectionProps): JSX.Element {
|
||||
// init namespace for translations
|
||||
const { t } = useTranslation('alerts');
|
||||
@@ -158,7 +160,15 @@ function QuerySection({
|
||||
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={runQuery}
|
||||
onClick={(): void => {
|
||||
runQuery();
|
||||
logEvent('Alert: Stage and run query', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: queryCategory,
|
||||
});
|
||||
}}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
@@ -228,6 +238,7 @@ interface QuerySectionProps {
|
||||
runQuery: VoidFunction;
|
||||
alertDef: AlertDef;
|
||||
panelType: PANEL_TYPES;
|
||||
ruleId: number;
|
||||
}
|
||||
|
||||
export default QuerySection;
|
||||
|
||||
@@ -12,8 +12,10 @@ import {
|
||||
} from 'antd';
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import testAlertApi from 'api/alerts/testAlert';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||
import { alertHelpMessage } from 'components/facingIssueBtn/util';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
@@ -338,8 +340,13 @@ function FormAlertRules({
|
||||
return;
|
||||
}
|
||||
const postableAlert = memoizedPreparePostData();
|
||||
|
||||
setLoading(true);
|
||||
|
||||
let logData = {
|
||||
status: 'error',
|
||||
statusMessage: t('unexpected_error'),
|
||||
};
|
||||
|
||||
try {
|
||||
const apiReq =
|
||||
ruleId && ruleId > 0
|
||||
@@ -349,10 +356,15 @@ function FormAlertRules({
|
||||
const response = await saveAlertApi(apiReq);
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
logData = {
|
||||
status: 'success',
|
||||
statusMessage:
|
||||
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
|
||||
};
|
||||
|
||||
notifications.success({
|
||||
message: 'Success',
|
||||
description:
|
||||
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
|
||||
description: logData.statusMessage,
|
||||
});
|
||||
|
||||
// invalidate rule in cache
|
||||
@@ -367,18 +379,42 @@ function FormAlertRules({
|
||||
history.replace(`${ROUTES.LIST_ALL_ALERT}?${urlQuery.toString()}`);
|
||||
}, 2000);
|
||||
} else {
|
||||
logData = {
|
||||
status: 'error',
|
||||
statusMessage: response.error || t('unexpected_error'),
|
||||
};
|
||||
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('unexpected_error'),
|
||||
description: logData.statusMessage,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
logData = {
|
||||
status: 'error',
|
||||
statusMessage: t('unexpected_error'),
|
||||
};
|
||||
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('unexpected_error'),
|
||||
description: logData.statusMessage,
|
||||
});
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
|
||||
logEvent('Alert: Save alert', {
|
||||
...logData,
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[postableAlert?.alertType as AlertTypes],
|
||||
channelNames: postableAlert?.preferredChannels,
|
||||
broadcastToAll: postableAlert?.broadcastToAll,
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
alertId: postableAlert?.id,
|
||||
alertName: postableAlert?.alert,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
isFormValid,
|
||||
memoizedPreparePostData,
|
||||
@@ -414,6 +450,7 @@ function FormAlertRules({
|
||||
}
|
||||
const postableAlert = memoizedPreparePostData();
|
||||
|
||||
let statusResponse = { status: 'failed', message: '' };
|
||||
setLoading(true);
|
||||
try {
|
||||
const response = await testAlertApi({ data: postableAlert });
|
||||
@@ -425,25 +462,43 @@ function FormAlertRules({
|
||||
message: 'Error',
|
||||
description: t('no_alerts_found'),
|
||||
});
|
||||
statusResponse = { status: 'failed', message: t('no_alerts_found') };
|
||||
} else {
|
||||
notifications.success({
|
||||
message: 'Success',
|
||||
description: t('rule_test_fired'),
|
||||
});
|
||||
statusResponse = { status: 'success', message: t('rule_test_fired') };
|
||||
}
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('unexpected_error'),
|
||||
});
|
||||
statusResponse = {
|
||||
status: 'failed',
|
||||
message: response.error || t('unexpected_error'),
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('unexpected_error'),
|
||||
});
|
||||
statusResponse = { status: 'failed', message: t('unexpected_error') };
|
||||
}
|
||||
setLoading(false);
|
||||
logEvent('Alert: Test notification', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
channelNames: postableAlert?.preferredChannels,
|
||||
broadcastToAll: postableAlert?.broadcastToAll,
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
status: statusResponse.status,
|
||||
statusMessage: statusResponse.message,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [t, isFormValid, memoizedPreparePostData, notifications]);
|
||||
|
||||
const renderBasicInfo = (): JSX.Element => (
|
||||
@@ -513,6 +568,16 @@ function FormAlertRules({
|
||||
|
||||
const isRuleCreated = !ruleId || ruleId === 0;
|
||||
|
||||
useEffect(() => {
|
||||
if (!isRuleCreated) {
|
||||
logEvent('Alert: Edit page visited', {
|
||||
ruleId,
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertType as AlertTypes],
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
function handleRedirection(option: AlertTypes): void {
|
||||
let url = '';
|
||||
switch (option) {
|
||||
@@ -535,6 +600,13 @@ function FormAlertRules({
|
||||
default:
|
||||
break;
|
||||
}
|
||||
logEvent('Alert: Check example alert clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
link: url,
|
||||
});
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
|
||||
@@ -572,6 +644,7 @@ function FormAlertRules({
|
||||
alertDef={alertDef}
|
||||
panelType={panelType || PANEL_TYPES.TIME_SERIES}
|
||||
key={currentQuery.queryType}
|
||||
ruleId={ruleId}
|
||||
/>
|
||||
|
||||
<RuleOptions
|
||||
|
||||
@@ -80,6 +80,8 @@ function FullView({
|
||||
query: updatedQuery,
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
fillGaps: widget.fillSpans,
|
||||
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
|
||||
@@ -109,6 +109,7 @@ function GridCardGraph({
|
||||
globalSelectedInterval,
|
||||
variables: getDashboardVariables(variables),
|
||||
fillGaps: widget.fillSpans,
|
||||
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
|
||||
215
frontend/src/container/GridTableComponent/__tests__/response.ts
Normal file
215
frontend/src/container/GridTableComponent/__tests__/response.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
export const tableDataMultipleQueriesSuccessResponse = {
|
||||
columns: [
|
||||
{
|
||||
name: 'service_name',
|
||||
queryName: '',
|
||||
isValueColumn: false,
|
||||
},
|
||||
{
|
||||
name: 'A',
|
||||
queryName: 'A',
|
||||
isValueColumn: true,
|
||||
},
|
||||
{
|
||||
name: 'B',
|
||||
queryName: 'B',
|
||||
isValueColumn: true,
|
||||
},
|
||||
],
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: 4196.71,
|
||||
B: 'n/a',
|
||||
service_name: 'demo-app',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 500.83,
|
||||
B: 'n/a',
|
||||
service_name: 'customer',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 499.5,
|
||||
B: 'n/a',
|
||||
service_name: 'mysql',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 293.22,
|
||||
B: 'n/a',
|
||||
service_name: 'frontend',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 230.03,
|
||||
B: 'n/a',
|
||||
service_name: 'driver',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 67.09,
|
||||
B: 'n/a',
|
||||
service_name: 'route',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 30.96,
|
||||
B: 'n/a',
|
||||
service_name: 'redis',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 'n/a',
|
||||
B: 112.27,
|
||||
service_name: 'n/a',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const widgetQueryWithLegend = {
|
||||
clickhouse_sql: [
|
||||
{
|
||||
name: 'A',
|
||||
legend: '',
|
||||
disabled: false,
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
promql: [
|
||||
{
|
||||
name: 'A',
|
||||
query: '',
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
],
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: 'metrics',
|
||||
queryName: 'A',
|
||||
aggregateOperator: 'count',
|
||||
aggregateAttribute: {
|
||||
dataType: 'float64',
|
||||
id: 'signoz_latency--float64--ExponentialHistogram--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'signoz_latency',
|
||||
type: 'ExponentialHistogram',
|
||||
},
|
||||
timeAggregation: '',
|
||||
spaceAggregation: 'p90',
|
||||
functions: [],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
stepInterval: 60,
|
||||
having: [],
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: 'string',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'service_name',
|
||||
type: 'tag',
|
||||
id: 'service_name--string--tag--false',
|
||||
},
|
||||
],
|
||||
legend: 'p99',
|
||||
reduceTo: 'avg',
|
||||
},
|
||||
{
|
||||
dataSource: 'metrics',
|
||||
queryName: 'B',
|
||||
aggregateOperator: 'rate',
|
||||
aggregateAttribute: {
|
||||
dataType: 'float64',
|
||||
id: 'system_disk_operations--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'system_disk_operations',
|
||||
type: 'Sum',
|
||||
},
|
||||
timeAggregation: 'rate',
|
||||
spaceAggregation: 'sum',
|
||||
functions: [],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
expression: 'B',
|
||||
disabled: false,
|
||||
stepInterval: 60,
|
||||
having: [],
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
groupBy: [],
|
||||
legend: '',
|
||||
reduceTo: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
id: '48ad5a67-9a3c-49d4-a886-d7a34f8b875d',
|
||||
queryType: 'builder',
|
||||
};
|
||||
|
||||
export const expectedOutputWithLegends = {
|
||||
dataSource: [
|
||||
{
|
||||
A: 4196.71,
|
||||
B: 'n/a',
|
||||
service_name: 'demo-app',
|
||||
},
|
||||
{
|
||||
A: 500.83,
|
||||
B: 'n/a',
|
||||
service_name: 'customer',
|
||||
},
|
||||
{
|
||||
A: 499.5,
|
||||
B: 'n/a',
|
||||
service_name: 'mysql',
|
||||
},
|
||||
{
|
||||
A: 293.22,
|
||||
B: 'n/a',
|
||||
service_name: 'frontend',
|
||||
},
|
||||
{
|
||||
A: 230.03,
|
||||
B: 'n/a',
|
||||
service_name: 'driver',
|
||||
},
|
||||
{
|
||||
A: 67.09,
|
||||
B: 'n/a',
|
||||
service_name: 'route',
|
||||
},
|
||||
{
|
||||
A: 30.96,
|
||||
B: 'n/a',
|
||||
service_name: 'redis',
|
||||
},
|
||||
{
|
||||
A: 'n/a',
|
||||
B: 112.27,
|
||||
service_name: 'n/a',
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -0,0 +1,42 @@
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { createColumnsAndDataSource, getQueryLegend } from '../utils';
|
||||
import {
|
||||
expectedOutputWithLegends,
|
||||
tableDataMultipleQueriesSuccessResponse,
|
||||
widgetQueryWithLegend,
|
||||
} from './response';
|
||||
|
||||
describe('Table Panel utils', () => {
|
||||
it('createColumnsAndDataSource function', () => {
|
||||
const data = tableDataMultipleQueriesSuccessResponse;
|
||||
const query = widgetQueryWithLegend as Query;
|
||||
|
||||
const { columns, dataSource } = createColumnsAndDataSource(data, query);
|
||||
|
||||
expect(dataSource).toStrictEqual(expectedOutputWithLegends.dataSource);
|
||||
|
||||
// this makes sure that the columns are rendered in the same order as response
|
||||
expect(columns[0].title).toBe('service_name');
|
||||
// the next specifically makes sure that the legends are properly applied in multiple queries
|
||||
expect(columns[1].title).toBe('p99');
|
||||
// this makes sure that the query without a legend takes the title from the query response
|
||||
expect(columns[2].title).toBe('B');
|
||||
|
||||
// this is to ensure that the rows properly map to the column data indexes as the dataIndex should be equal to name of the columns
|
||||
// returned in the response as the rows will be mapped with them
|
||||
expect((columns[0] as any).dataIndex).toBe('service_name');
|
||||
expect((columns[1] as any).dataIndex).toBe('A');
|
||||
expect((columns[2] as any).dataIndex).toBe('B');
|
||||
});
|
||||
|
||||
it('getQueryLegend function', () => {
|
||||
const query = widgetQueryWithLegend as Query;
|
||||
|
||||
// query A has a legend of p99
|
||||
expect(getQueryLegend(query, 'A')).toBe('p99');
|
||||
|
||||
// should return undefined when legend not present
|
||||
expect(getQueryLegend(query, 'B')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
@@ -3,10 +3,7 @@ import { Space, Tooltip } from 'antd';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import { Events } from 'constants/events';
|
||||
import { QueryTable } from 'container/QueryTable';
|
||||
import {
|
||||
createTableColumnsFromQuery,
|
||||
RowData,
|
||||
} from 'lib/query/createTableColumnsFromQuery';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep, get, isEmpty, set } from 'lodash-es';
|
||||
import { memo, ReactNode, useCallback, useEffect, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -14,7 +11,11 @@ import { eventEmitter } from 'utils/getEventEmitter';
|
||||
|
||||
import { WrapperStyled } from './styles';
|
||||
import { GridTableComponentProps } from './types';
|
||||
import { findMatchingThreshold } from './utils';
|
||||
import {
|
||||
createColumnsAndDataSource,
|
||||
findMatchingThreshold,
|
||||
TableData,
|
||||
} from './utils';
|
||||
|
||||
function GridTableComponent({
|
||||
data,
|
||||
@@ -25,28 +26,26 @@ function GridTableComponent({
|
||||
...props
|
||||
}: GridTableComponentProps): JSX.Element {
|
||||
const { t } = useTranslation(['valueGraph']);
|
||||
|
||||
// create columns and dataSource in the ui friendly structure
|
||||
// use the query from the widget here to extract the legend information
|
||||
const { columns, dataSource: originalDataSource } = useMemo(
|
||||
() =>
|
||||
createTableColumnsFromQuery({
|
||||
query,
|
||||
queryTableData: data,
|
||||
}),
|
||||
[data, query],
|
||||
() => createColumnsAndDataSource((data as unknown) as TableData, query),
|
||||
[query, data],
|
||||
);
|
||||
|
||||
const createDataInCorrectFormat = useCallback(
|
||||
(dataSource: RowData[]): RowData[] =>
|
||||
dataSource.map((d) => {
|
||||
const finalObject = {};
|
||||
const keys = Object.keys(d);
|
||||
keys.forEach((k) => {
|
||||
const label = get(
|
||||
columns.find((c) => get(c, 'dataIndex', '') === k) || {},
|
||||
'title',
|
||||
'',
|
||||
|
||||
// we use the order of the columns here to have similar download as the user view
|
||||
columns.forEach((k) => {
|
||||
set(
|
||||
finalObject,
|
||||
get(k, 'title', '') as string,
|
||||
get(d, get(k, 'dataIndex', ''), 'n/a'),
|
||||
);
|
||||
if (label) {
|
||||
set(finalObject, label as string, d[k]);
|
||||
}
|
||||
});
|
||||
return finalObject as RowData;
|
||||
}),
|
||||
@@ -65,7 +64,11 @@ function GridTableComponent({
|
||||
const newValue = { ...val };
|
||||
Object.keys(val).forEach((k) => {
|
||||
if (columnUnits[k]) {
|
||||
newValue[k] = getYAxisFormattedValue(String(val[k]), columnUnits[k]);
|
||||
// the check below takes care of not adding units for rows that have n/a values
|
||||
newValue[k] =
|
||||
val[k] !== 'n/a'
|
||||
? getYAxisFormattedValue(String(val[k]), columnUnits[k])
|
||||
: val[k];
|
||||
newValue[`${k}_without_unit`] = val[k];
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { QUERY_TABLE_CONFIG } from 'container/QueryTable/config';
|
||||
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { isEmpty, isNaN } from 'lodash-es';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
// Helper function to evaluate the condition based on the operator
|
||||
function evaluateCondition(
|
||||
@@ -56,3 +63,85 @@ export function findMatchingThreshold(
|
||||
hasMultipleMatches,
|
||||
};
|
||||
}
|
||||
|
||||
export interface TableData {
|
||||
columns: { name: string; queryName: string; isValueColumn: boolean }[];
|
||||
rows: { data: any }[];
|
||||
}
|
||||
|
||||
export function getQueryLegend(
|
||||
currentQuery: Query,
|
||||
queryName: string,
|
||||
): string | undefined {
|
||||
let legend: string | undefined;
|
||||
switch (currentQuery.queryType) {
|
||||
case EQueryType.QUERY_BUILDER:
|
||||
// check if the value is present in the queries
|
||||
legend = currentQuery.builder.queryData.find(
|
||||
(query) => query.queryName === queryName,
|
||||
)?.legend;
|
||||
|
||||
if (!legend) {
|
||||
// check if the value is present in the formula
|
||||
legend = currentQuery.builder.queryFormulas.find(
|
||||
(query) => query.queryName === queryName,
|
||||
)?.legend;
|
||||
}
|
||||
break;
|
||||
case EQueryType.CLICKHOUSE:
|
||||
legend = currentQuery.clickhouse_sql.find(
|
||||
(query) => query.name === queryName,
|
||||
)?.legend;
|
||||
break;
|
||||
case EQueryType.PROM:
|
||||
legend = currentQuery.promql.find((query) => query.name === queryName)
|
||||
?.legend;
|
||||
break;
|
||||
default:
|
||||
legend = undefined;
|
||||
break;
|
||||
}
|
||||
|
||||
return legend;
|
||||
}
|
||||
|
||||
export function createColumnsAndDataSource(
|
||||
data: TableData,
|
||||
currentQuery: Query,
|
||||
renderColumnCell?: QueryTableProps['renderColumnCell'],
|
||||
): { columns: ColumnsType<RowData>; dataSource: RowData[] } {
|
||||
const columns: ColumnsType<RowData> =
|
||||
data.columns?.reduce<ColumnsType<RowData>>((acc, item) => {
|
||||
// is the column is the value column then we need to check for the available legend
|
||||
const legend = item.isValueColumn
|
||||
? getQueryLegend(currentQuery, item.queryName)
|
||||
: undefined;
|
||||
|
||||
const column: ColumnType<RowData> = {
|
||||
dataIndex: item.name,
|
||||
// if no legend present then rely on the column name value
|
||||
title: !isEmpty(legend) ? legend : item.name,
|
||||
width: QUERY_TABLE_CONFIG.width,
|
||||
render: renderColumnCell && renderColumnCell[item.name],
|
||||
sorter: (a: RowData, b: RowData): number => {
|
||||
const valueA = Number(a[`${item.name}_without_unit`] ?? a[item.name]);
|
||||
const valueB = Number(b[`${item.name}_without_unit`] ?? b[item.name]);
|
||||
|
||||
if (!isNaN(valueA) && !isNaN(valueB)) {
|
||||
return valueA - valueB;
|
||||
}
|
||||
|
||||
return ((a[item.name] as string) || '').localeCompare(
|
||||
(b[item.name] as string) || '',
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
return [...acc, column];
|
||||
}, []) || [];
|
||||
|
||||
// the rows returned have data encapsulation hence removing the same here
|
||||
const dataSource = data.rows?.map((d) => d.data) || [];
|
||||
|
||||
return { columns, dataSource };
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ import {
|
||||
import { useSelector } from 'react-redux';
|
||||
import { NavLink } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { License } from 'types/api/licenses/def';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { getFormattedDate, getRemainingDays } from 'utils/timeUtils';
|
||||
|
||||
@@ -109,9 +110,13 @@ function HeaderContainer(): JSX.Element {
|
||||
|
||||
const { data: licenseData, isFetching, status: licenseStatus } = useLicense();
|
||||
|
||||
const licensesStatus: string =
|
||||
licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ||
|
||||
'';
|
||||
|
||||
const isLicenseActive =
|
||||
licenseData?.payload?.licenses?.find((e) => e.isCurrent)?.status ===
|
||||
LICENSE_PLAN_STATUS.VALID;
|
||||
licensesStatus?.toLocaleLowerCase() ===
|
||||
LICENSE_PLAN_STATUS.VALID.toLocaleLowerCase();
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
|
||||
@@ -7,17 +7,20 @@ interface AlertInfoCardProps {
|
||||
header: string;
|
||||
subheader: string;
|
||||
link: string;
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
function AlertInfoCard({
|
||||
header,
|
||||
subheader,
|
||||
link,
|
||||
onClick,
|
||||
}: AlertInfoCardProps): JSX.Element {
|
||||
return (
|
||||
<div
|
||||
className="alert-info-card"
|
||||
onClick={(): void => {
|
||||
onClick();
|
||||
window.open(link, '_blank');
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -2,6 +2,7 @@ import './AlertsEmptyState.styles.scss';
|
||||
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Button, Divider, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
@@ -10,12 +11,26 @@ import { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
import AlertInfoCard from './AlertInfoCard';
|
||||
import { ALERT_CARDS, ALERT_INFO_LINKS } from './alertLinks';
|
||||
import InfoLinkText from './InfoLinkText';
|
||||
|
||||
const alertLogEvents = (
|
||||
title: string,
|
||||
link: string,
|
||||
dataSource?: DataSource,
|
||||
): void => {
|
||||
const attributes = {
|
||||
link,
|
||||
page: 'Alert empty state page',
|
||||
};
|
||||
|
||||
logEvent(title, dataSource ? { ...attributes, dataSource } : attributes);
|
||||
};
|
||||
|
||||
export function AlertsEmptyState(): JSX.Element {
|
||||
const { t } = useTranslation('common');
|
||||
const { role, featureResponse } = useSelector<AppState, AppReducer>(
|
||||
@@ -91,18 +106,33 @@ export function AlertsEmptyState(): JSX.Element {
|
||||
link="https://youtu.be/xjxNIqiv4_M"
|
||||
leftIconVisible
|
||||
rightIconVisible
|
||||
onClick={(): void =>
|
||||
alertLogEvents(
|
||||
'Alert: Video tutorial link clicked',
|
||||
'https://youtu.be/xjxNIqiv4_M',
|
||||
)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{ALERT_INFO_LINKS.map((info) => (
|
||||
<InfoLinkText
|
||||
key={info.link}
|
||||
infoText={info.infoText}
|
||||
link={info.link}
|
||||
leftIconVisible={info.leftIconVisible}
|
||||
rightIconVisible={info.rightIconVisible}
|
||||
/>
|
||||
))}
|
||||
{ALERT_INFO_LINKS.map((info) => {
|
||||
const logEventTriggered = (): void =>
|
||||
alertLogEvents(
|
||||
'Alert: Tutorial doc link clicked',
|
||||
info.link,
|
||||
info.dataSource,
|
||||
);
|
||||
return (
|
||||
<InfoLinkText
|
||||
key={info.link}
|
||||
infoText={info.infoText}
|
||||
link={info.link}
|
||||
leftIconVisible={info.leftIconVisible}
|
||||
rightIconVisible={info.rightIconVisible}
|
||||
onClick={logEventTriggered}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</section>
|
||||
<div className="get-started-text">
|
||||
@@ -113,14 +143,23 @@ export function AlertsEmptyState(): JSX.Element {
|
||||
</Divider>
|
||||
</div>
|
||||
|
||||
{ALERT_CARDS.map((card) => (
|
||||
<AlertInfoCard
|
||||
key={card.link}
|
||||
header={card.header}
|
||||
subheader={card.subheader}
|
||||
link={card.link}
|
||||
/>
|
||||
))}
|
||||
{ALERT_CARDS.map((card) => {
|
||||
const logEventTriggered = (): void =>
|
||||
alertLogEvents(
|
||||
'Alert: Sample alert link clicked',
|
||||
card.link,
|
||||
card.dataSource,
|
||||
);
|
||||
return (
|
||||
<AlertInfoCard
|
||||
key={card.link}
|
||||
header={card.header}
|
||||
subheader={card.subheader}
|
||||
link={card.link}
|
||||
onClick={logEventTriggered}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -6,6 +6,7 @@ interface InfoLinkTextProps {
|
||||
link: string;
|
||||
leftIconVisible: boolean;
|
||||
rightIconVisible: boolean;
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
function InfoLinkText({
|
||||
@@ -13,10 +14,12 @@ function InfoLinkText({
|
||||
link,
|
||||
leftIconVisible,
|
||||
rightIconVisible,
|
||||
onClick,
|
||||
}: InfoLinkTextProps): JSX.Element {
|
||||
return (
|
||||
<Flex
|
||||
onClick={(): void => {
|
||||
onClick();
|
||||
window.open(link, '_blank');
|
||||
}}
|
||||
className="info-link-container"
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export const ALERT_INFO_LINKS = [
|
||||
{
|
||||
infoText: 'How to create Metrics-based alerts',
|
||||
@@ -5,6 +7,7 @@ export const ALERT_INFO_LINKS = [
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||
leftIconVisible: false,
|
||||
rightIconVisible: true,
|
||||
dataSource: DataSource.METRICS,
|
||||
},
|
||||
{
|
||||
infoText: 'How to create Log-based alerts',
|
||||
@@ -12,6 +15,7 @@ export const ALERT_INFO_LINKS = [
|
||||
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||
leftIconVisible: false,
|
||||
rightIconVisible: true,
|
||||
dataSource: DataSource.LOGS,
|
||||
},
|
||||
{
|
||||
infoText: 'How to create Trace-based alerts',
|
||||
@@ -19,6 +23,7 @@ export const ALERT_INFO_LINKS = [
|
||||
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||
leftIconVisible: false,
|
||||
rightIconVisible: true,
|
||||
dataSource: DataSource.TRACES,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -26,24 +31,28 @@ export const ALERT_CARDS = [
|
||||
{
|
||||
header: 'Alert on high memory usage',
|
||||
subheader: "Monitor your host's memory usage",
|
||||
dataSource: DataSource.METRICS,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-memory-usage-for-host-goes-above-400-mb-or-any-fixed-memory',
|
||||
},
|
||||
{
|
||||
header: 'Alert on slow external API calls',
|
||||
subheader: 'Monitor your external API calls',
|
||||
dataSource: DataSource.TRACES,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-external-api-latency-p90-is-over-1-second-for-last-5-mins',
|
||||
},
|
||||
{
|
||||
header: 'Alert on high percentage of timeout errors in logs',
|
||||
subheader: 'Monitor your logs for errors',
|
||||
dataSource: DataSource.LOGS,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-percentage-of-redis-timeout-error-logs-greater-than-7-in-last-5-mins',
|
||||
},
|
||||
{
|
||||
header: 'Alert on high error percentage of an endpoint',
|
||||
subheader: 'Monitor your API endpoint',
|
||||
dataSource: DataSource.METRICS,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#3-alert-when-the-error-percentage-for-an-endpoint-exceeds-5',
|
||||
},
|
||||
|
||||
@@ -3,6 +3,7 @@ import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Input, Typography } from 'antd';
|
||||
import type { ColumnsType } from 'antd/es/table/interface';
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import DropDown from 'components/DropDown/DropDown';
|
||||
import { listAlertMessage } from 'components/facingIssueBtn/util';
|
||||
import {
|
||||
@@ -41,7 +42,7 @@ import {
|
||||
} from './styles';
|
||||
import Status from './TableComponents/Status';
|
||||
import ToggleAlertState from './ToggleAlertState';
|
||||
import { filterAlerts } from './utils';
|
||||
import { alertActionLogEvent, filterAlerts } from './utils';
|
||||
|
||||
const { Search } = Input;
|
||||
|
||||
@@ -107,12 +108,16 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
}, [notificationsApi, t]);
|
||||
|
||||
const onClickNewAlertHandler = useCallback(() => {
|
||||
logEvent('Alert: New alert button clicked', {
|
||||
number: allAlertRules?.length,
|
||||
});
|
||||
featureResponse
|
||||
.refetch()
|
||||
.then(() => {
|
||||
history.push(ROUTES.ALERTS_NEW);
|
||||
})
|
||||
.catch(handleError);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [featureResponse, handleError]);
|
||||
|
||||
const onEditHandler = (record: GettableAlert) => (): void => {
|
||||
@@ -321,6 +326,7 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
width: 10,
|
||||
render: (id: GettableAlert['id'], record): JSX.Element => (
|
||||
<DropDown
|
||||
onDropDownItemClick={(item): void => alertActionLogEvent(item.key, record)}
|
||||
element={[
|
||||
<ToggleAlertState
|
||||
key="1"
|
||||
@@ -356,6 +362,9 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
const paginationConfig = {
|
||||
defaultCurrent: Number(paginationParam) || 1,
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<SearchContainer>
|
||||
@@ -385,11 +394,10 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
columns={columns}
|
||||
rowKey="id"
|
||||
dataSource={data}
|
||||
shouldSendAlertsLogEvent
|
||||
dynamicColumns={dynamicColumns}
|
||||
onChange={handleChange}
|
||||
pagination={{
|
||||
defaultCurrent: Number(paginationParam) || 1,
|
||||
}}
|
||||
pagination={paginationConfig}
|
||||
facingIssueBtn={{
|
||||
attributes: {
|
||||
screen: 'Alert list page',
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { Space } from 'antd';
|
||||
import getAll from 'api/alerts/getAll';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ReleaseNote from 'components/ReleaseNote';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useEffect } from 'react';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -19,8 +21,19 @@ function ListAlertRules(): JSX.Element {
|
||||
cacheTime: 0,
|
||||
});
|
||||
|
||||
const logEventCalledRef = useRef(false);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
useEffect(() => {
|
||||
if (!logEventCalledRef.current && !isUndefined(data?.payload)) {
|
||||
logEvent('Alert: List page visited', {
|
||||
number: data?.payload?.length,
|
||||
});
|
||||
logEventCalledRef.current = true;
|
||||
}
|
||||
}, [data?.payload]);
|
||||
|
||||
useEffect(() => {
|
||||
if (status === 'error' || (status === 'success' && data.statusCode >= 400)) {
|
||||
notifications.error({
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
import { GettableAlert } from 'types/api/alerts/get';
|
||||
|
||||
export const filterAlerts = (
|
||||
@@ -23,3 +26,32 @@ export const filterAlerts = (
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
export const alertActionLogEvent = (
|
||||
action: string,
|
||||
record: GettableAlert,
|
||||
): void => {
|
||||
let actionValue = '';
|
||||
switch (action) {
|
||||
case '0':
|
||||
actionValue = 'Enable/Disable';
|
||||
break;
|
||||
case '1':
|
||||
actionValue = 'Edit';
|
||||
break;
|
||||
case '2':
|
||||
actionValue = 'Clone';
|
||||
break;
|
||||
case '3':
|
||||
actionValue = 'Delete';
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
logEvent('Alert: Action', {
|
||||
ruleId: record.id,
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[record.alertType as AlertTypes],
|
||||
name: record.alert,
|
||||
action: actionValue,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -609,6 +609,16 @@ function DashboardsList(): JSX.Element {
|
||||
</>
|
||||
);
|
||||
|
||||
const paginationConfig = data.length > 20 && {
|
||||
pageSize: 20,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
onChange: (page: any): void => handlePageSizeUpdate(page),
|
||||
current: Number(sortOrder.pagination),
|
||||
defaultCurrent: Number(sortOrder.pagination) || 1,
|
||||
hideOnSinglePage: true,
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="dashboards-list-container">
|
||||
<div className="dashboards-list-view-content">
|
||||
@@ -822,16 +832,7 @@ function DashboardsList(): JSX.Element {
|
||||
showSorterTooltip
|
||||
loading={isDashboardListLoading || isFilteringDashboards}
|
||||
showHeader={false}
|
||||
pagination={
|
||||
data.length > 20 && {
|
||||
pageSize: 20,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
onChange: (page): void => handlePageSizeUpdate(page),
|
||||
current: Number(sortOrder.pagination),
|
||||
defaultCurrent: Number(sortOrder.pagination) || 1,
|
||||
}
|
||||
}
|
||||
pagination={paginationConfig}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
} from 'hooks/useResourceAttribute/utils';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
@@ -93,6 +94,26 @@ function External(): JSX.Element {
|
||||
[servicename, tagFilterItems],
|
||||
);
|
||||
|
||||
const errorApmToTraceQuery = useGetAPMToTracesQueries({
|
||||
servicename,
|
||||
isExternalCall: true,
|
||||
filters: [
|
||||
{
|
||||
id: uuid().slice(0, 8),
|
||||
key: {
|
||||
key: 'hasError',
|
||||
dataType: DataTypes.bool,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'hasError--bool--tag--true',
|
||||
},
|
||||
op: 'in',
|
||||
value: ['true'],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const externalCallRPSWidget = useMemo(
|
||||
() =>
|
||||
getWidgetQueryBuilder({
|
||||
@@ -156,7 +177,7 @@ function External(): JSX.Element {
|
||||
servicename,
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery,
|
||||
apmToTraceQuery: errorApmToTraceQuery,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
|
||||
@@ -2,8 +2,6 @@ import { Card, Typography } from 'antd';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { WidgetGraphContainerProps } from 'container/NewWidget/types';
|
||||
// import useUrlQuery from 'hooks/useUrlQuery';
|
||||
// import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
||||
|
||||
import { NotFoundContainer } from './styles';
|
||||
@@ -14,6 +12,7 @@ function WidgetGraphContainer({
|
||||
queryResponse,
|
||||
setRequestData,
|
||||
selectedWidget,
|
||||
isLoadingPanelData,
|
||||
}: WidgetGraphContainerProps): JSX.Element {
|
||||
if (queryResponse.data && selectedGraph === PANEL_TYPES.BAR) {
|
||||
const sortedSeriesData = getSortedSeriesData(
|
||||
@@ -38,6 +37,10 @@ function WidgetGraphContainer({
|
||||
return <Spinner size="large" tip="Loading..." />;
|
||||
}
|
||||
|
||||
if (isLoadingPanelData) {
|
||||
return <Spinner size="large" tip="Loading..." />;
|
||||
}
|
||||
|
||||
if (
|
||||
selectedGraph !== PANEL_TYPES.LIST &&
|
||||
queryResponse.data?.payload.data?.result?.length === 0
|
||||
@@ -59,6 +62,14 @@ function WidgetGraphContainer({
|
||||
);
|
||||
}
|
||||
|
||||
if (queryResponse.isIdle) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
<Typography>No Data</Typography>
|
||||
</NotFoundContainer>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<WidgetGraph
|
||||
selectedWidget={selectedWidget}
|
||||
|
||||
@@ -17,6 +17,7 @@ function WidgetGraph({
|
||||
queryResponse,
|
||||
setRequestData,
|
||||
selectedWidget,
|
||||
isLoadingPanelData,
|
||||
}: WidgetGraphContainerProps): JSX.Element {
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
@@ -43,6 +44,7 @@ function WidgetGraph({
|
||||
)}
|
||||
|
||||
<WidgetGraphComponent
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
selectedGraph={selectedGraph}
|
||||
queryResponse={queryResponse}
|
||||
setRequestData={setRequestData}
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
import './LeftContainer.styles.scss';
|
||||
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { memo, useEffect, useState } from 'react';
|
||||
import { memo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { getGraphType } from 'utils/getGraphType';
|
||||
|
||||
import { WidgetGraphProps } from '../types';
|
||||
import ExplorerColumnsRenderer from './ExplorerColumnsRenderer';
|
||||
@@ -27,62 +24,17 @@ function LeftContainer({
|
||||
selectedTracesFields,
|
||||
setSelectedTracesFields,
|
||||
selectedWidget,
|
||||
selectedTime,
|
||||
requestData,
|
||||
setRequestData,
|
||||
isLoadingPanelData,
|
||||
}: WidgetGraphProps): JSX.Element {
|
||||
const { stagedQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
|
||||
return {
|
||||
selectedTime: selectedWidget?.timePreferance,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery || initialQueriesMap.metrics,
|
||||
globalSelectedInterval,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
};
|
||||
}
|
||||
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
redirectWithQueryBuilderData(updatedQuery);
|
||||
return {
|
||||
query: updatedQuery,
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
||||
globalSelectedInterval,
|
||||
tableParams: {
|
||||
pagination: {
|
||||
offset: 0,
|
||||
limit: updatedQuery.builder.queryData[0].limit || 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (stagedQuery) {
|
||||
setRequestData((prev) => ({
|
||||
...prev,
|
||||
selectedTime: selectedTime.enum || prev.selectedTime,
|
||||
globalSelectedInterval,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery,
|
||||
fillGaps: selectedWidget.fillSpans || false,
|
||||
}));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
stagedQuery,
|
||||
selectedTime,
|
||||
selectedWidget.fillSpans,
|
||||
globalSelectedInterval,
|
||||
]);
|
||||
|
||||
const queryResponse = useGetQueryRange(
|
||||
requestData,
|
||||
selectedDashboard?.data?.version || DEFAULT_ENTITY_VERSION,
|
||||
@@ -104,6 +56,7 @@ function LeftContainer({
|
||||
queryResponse={queryResponse}
|
||||
setRequestData={setRequestData}
|
||||
selectedWidget={selectedWidget}
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
/>
|
||||
<QueryContainer className="query-section-left-container">
|
||||
<QuerySection selectedGraph={selectedGraph} queryResponse={queryResponse} />
|
||||
|
||||
@@ -7,7 +7,7 @@ import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||
import { chartHelpMessage } from 'components/facingIssueBtn/util';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DashboardShortcuts } from 'constants/shortcuts/DashboardShortcuts';
|
||||
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
||||
@@ -18,6 +18,8 @@ import useAxiosError from 'hooks/useAxiosError';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { MESSAGE, useIsFeatureDisabled } from 'hooks/useFeatureFlag';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import history from 'lib/history';
|
||||
import { defaultTo, isUndefined } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
@@ -38,6 +40,8 @@ import { IField } from 'types/api/logs/fields';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { getGraphType, getGraphTypeForFormat } from 'utils/getGraphType';
|
||||
|
||||
import LeftContainer from './LeftContainer';
|
||||
import QueryTypeTag from './LeftContainer/QueryTypeTag';
|
||||
@@ -83,6 +87,10 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
const { featureResponse } = useSelector<AppState, AppReducer>(
|
||||
(state) => state.app,
|
||||
);
|
||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const { widgets = [] } = selectedDashboard?.data || {};
|
||||
|
||||
@@ -278,6 +286,65 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
|
||||
const handleError = useAxiosError();
|
||||
|
||||
// this loading state is to take care of mismatch in the responses for table and other panels
|
||||
// hence while changing the query contains the older value and the processing logic fails
|
||||
const [isLoadingPanelData, setIsLoadingPanelData] = useState<boolean>(false);
|
||||
|
||||
// request data should be handled by the parent and the child components should consume the same
|
||||
// this has been moved here from the left container
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
|
||||
return {
|
||||
selectedTime: selectedWidget?.timePreferance,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery || initialQueriesMap.metrics,
|
||||
globalSelectedInterval,
|
||||
formatForWeb:
|
||||
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
|
||||
PANEL_TYPES.TABLE,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
};
|
||||
}
|
||||
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
redirectWithQueryBuilderData(updatedQuery);
|
||||
return {
|
||||
query: updatedQuery,
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
||||
globalSelectedInterval,
|
||||
tableParams: {
|
||||
pagination: {
|
||||
offset: 0,
|
||||
limit: updatedQuery.builder.queryData[0].limit || 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (stagedQuery) {
|
||||
setIsLoadingPanelData(false);
|
||||
setRequestData((prev) => ({
|
||||
...prev,
|
||||
selectedTime: selectedTime.enum || prev.selectedTime,
|
||||
globalSelectedInterval,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery,
|
||||
fillGaps: selectedWidget.fillSpans || false,
|
||||
formatForWeb:
|
||||
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
|
||||
PANEL_TYPES.TABLE,
|
||||
}));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
stagedQuery,
|
||||
selectedTime,
|
||||
selectedWidget.fillSpans,
|
||||
globalSelectedInterval,
|
||||
]);
|
||||
|
||||
const onClickSaveHandler = useCallback(() => {
|
||||
if (!selectedDashboard) {
|
||||
return;
|
||||
@@ -402,6 +469,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
}, [dashboardId]);
|
||||
|
||||
const setGraphHandler = (type: PANEL_TYPES): void => {
|
||||
setIsLoadingPanelData(true);
|
||||
const updatedQuery = handleQueryChange(type as any, supersetQuery);
|
||||
setGraphType(type);
|
||||
redirectWithQueryBuilderData(
|
||||
@@ -527,6 +595,9 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
setSelectedTracesFields={setSelectedTracesFields}
|
||||
selectedWidget={selectedWidget}
|
||||
selectedTime={selectedTime}
|
||||
requestData={requestData}
|
||||
setRequestData={setRequestData}
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
/>
|
||||
)}
|
||||
</LeftContainerWrapper>
|
||||
|
||||
@@ -24,6 +24,9 @@ export interface WidgetGraphProps {
|
||||
selectedWidget: Widgets;
|
||||
selectedGraph: PANEL_TYPES;
|
||||
selectedTime: timePreferance;
|
||||
requestData: GetQueryResultsProps;
|
||||
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
||||
isLoadingPanelData: boolean;
|
||||
}
|
||||
|
||||
export type WidgetGraphContainerProps = {
|
||||
@@ -34,4 +37,5 @@ export type WidgetGraphContainerProps = {
|
||||
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
||||
selectedGraph: PANEL_TYPES;
|
||||
selectedWidget: Widgets;
|
||||
isLoadingPanelData: boolean;
|
||||
};
|
||||
|
||||
@@ -4,50 +4,45 @@
|
||||
|
||||
Prior to installation, you must ensure your Kubernetes cluster is ready and that you have the necessary permissions to deploy applications. Follow these steps to use Helm for setting up the Collector:
|
||||
|
||||
|
||||
|
||||
1. **Add the OpenTelemetry Helm repository:**
|
||||
|
||||
```bash
|
||||
helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts
|
||||
```
|
||||
|
||||
|
||||
|
||||
2. **Prepare the `otel-collector-values.yaml` Configuration**
|
||||
|
||||
#### Azure Event Hub Receiver Configuration
|
||||
If you haven't created the logs Event Hub, you can create one by following the steps in the [Azure Event Hubs documentation](../../bootstrapping/data-ingestion).
|
||||
|
||||
|
||||
and replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
|
||||
#### Azure Event Hub Receiver Configuration
|
||||
|
||||
```yaml
|
||||
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
|
||||
```
|
||||
The Event Hub docs have a step to create a SAS policy for the event hub and copy the connection string.
|
||||
Replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
|
||||
|
||||
#### Azure Monitor Receiver Configuration
|
||||
```yaml
|
||||
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
|
||||
```
|
||||
The Event Hub setup have a step to create a SAS policy for the event hub and copy the connection string.
|
||||
|
||||
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
|
||||
|
||||
|
||||
1. Follow the steps in the [Create a service principal Azure Doc](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#register-an-application-with-microsoft-entra-id-and-create-a-service-principal) documentation to create a service principal.
|
||||
You can name it `signoz-central-collector-app` the redirect URI can be empty.
|
||||
2. To add read permissions to Azure Monitor, Follow the [Assign Role](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#assign-a-role-to-the-application) documentation. The read acess can be given to the full subscription.
|
||||
3. There are multiple ways to authenticate the service principal, we will use the client secret option, follow [Creating a client secret](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#option-3-create-a-new-client-secret) and don't forget to copy the client secret. The secret is used in the configuration file as `client_secret`.
|
||||
#### Azure Monitor Receiver Configuration
|
||||
|
||||
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
|
||||
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
|
||||
|
||||
<figure data-zoomable align="center">
|
||||
<img
|
||||
src="/img/docs/azure-monitoring/service-principal-app-overview.webp"
|
||||
alt="Application Overview"
|
||||
/>
|
||||
<figcaption>
|
||||
<i>
|
||||
Application Overview
|
||||
</i>
|
||||
</figcaption>
|
||||
</figure>
|
||||
1. Follow the steps in the [Create a service principal Azure Doc](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#register-an-application-with-microsoft-entra-id-and-create-a-service-principal) documentation to create a service principal.
|
||||
You can name it `signoz-central-collector-app` the redirect URI can be empty.
|
||||
2. To add read permissions to Azure Monitor, Follow the [Assign Role](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#assign-a-role-to-the-application) documentation. The read acess can be given to the full subscription.
|
||||
3. There are multiple ways to authenticate the service principal, we will use the client secret option, follow [Creating a client secret](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#option-3-create-a-new-client-secret) and don't forget to copy the client secret. The secret is used in the configuration file as `client_secret`.
|
||||
|
||||
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
|
||||
|
||||
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
|
||||
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
|
||||
|
||||
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
|
||||
|
||||
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
|
||||
|
||||
|
||||
|
||||
@@ -92,13 +87,15 @@ processors:
|
||||
batch: {}
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "ingest.<region>.signoz.cloud:443"
|
||||
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
"signoz-access-token": "<ingestion-key>"
|
||||
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
|
||||
```
|
||||
|
||||
|
||||
|
||||
3. **Deploy the OpenTelemetry Collector to your Kubernetes cluster:**
|
||||
|
||||
You'll need to prepare a custom configuration file, say `otel-collector-values.yaml`, that matches your environment's specific needs. Replace `<namespace>` with the Kubernetes namespace where you wish to install the Collector.
|
||||
|
||||
@@ -9,7 +9,7 @@ function TablePanelWrapper({
|
||||
tableProcessedDataRef,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const panelData =
|
||||
queryResponse.data?.payload?.data?.newResult?.data?.result || [];
|
||||
(queryResponse.data?.payload?.data?.result?.[0] as any)?.table || [];
|
||||
const { thresholds } = widget;
|
||||
return (
|
||||
<GridTableComponent
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export const historyPagination = {
|
||||
defaultPageSize: 5,
|
||||
hideOnSinglePage: true,
|
||||
};
|
||||
|
||||
@@ -334,6 +334,11 @@ export function PlannedDowntimeList({
|
||||
}
|
||||
}, [downtimeSchedules.error, downtimeSchedules.isError, notifications]);
|
||||
|
||||
const paginationConfig = {
|
||||
pageSize: 5,
|
||||
showSizeChanger: false,
|
||||
hideOnSinglePage: true,
|
||||
};
|
||||
return (
|
||||
<Table<DowntimeSchedulesTableData>
|
||||
columns={columns}
|
||||
@@ -342,7 +347,7 @@ export function PlannedDowntimeList({
|
||||
dataSource={tableData || []}
|
||||
loading={downtimeSchedules.isLoading || downtimeSchedules.isFetching}
|
||||
showHeader={false}
|
||||
pagination={{ pageSize: 5, showSizeChanger: false }}
|
||||
pagination={paginationConfig}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -33,10 +33,12 @@ export const getColumnSearchProps = (
|
||||
record: ServicesList,
|
||||
): boolean => {
|
||||
if (record[dataIndex]) {
|
||||
record[dataIndex]
|
||||
?.toString()
|
||||
.toLowerCase()
|
||||
.includes(value.toString().toLowerCase());
|
||||
return (
|
||||
record[dataIndex]
|
||||
?.toString()
|
||||
.toLowerCase()
|
||||
.includes(value.toString().toLowerCase()) || false
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
@@ -79,6 +79,11 @@ function ServiceMetricTable({
|
||||
}
|
||||
}, [services, licenseData, isFetching, isCloudUserVal]);
|
||||
|
||||
const paginationConfig = {
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
};
|
||||
return (
|
||||
<>
|
||||
{RPS > MAX_RPS_LIMIT && (
|
||||
@@ -92,11 +97,7 @@ function ServiceMetricTable({
|
||||
<ResourceAttributesFilter />
|
||||
|
||||
<ResizeTable
|
||||
pagination={{
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
}}
|
||||
pagination={paginationConfig}
|
||||
columns={tableColumns}
|
||||
loading={isLoading}
|
||||
dataSource={services}
|
||||
|
||||
@@ -36,6 +36,11 @@ function ServiceTraceTable({
|
||||
}
|
||||
}, [services, licenseData, isFetching, isCloudUserVal]);
|
||||
|
||||
const paginationConfig = {
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
};
|
||||
return (
|
||||
<>
|
||||
{RPS > MAX_RPS_LIMIT && (
|
||||
@@ -49,11 +54,7 @@ function ServiceTraceTable({
|
||||
<ResourceAttributesFilter />
|
||||
|
||||
<ResizeTable
|
||||
pagination={{
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
}}
|
||||
pagination={paginationConfig}
|
||||
columns={tableColumns}
|
||||
loading={loading}
|
||||
dataSource={services}
|
||||
|
||||
@@ -152,9 +152,13 @@ function SideNav({
|
||||
|
||||
const { t } = useTranslation('');
|
||||
|
||||
const licenseStatus: string =
|
||||
licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ||
|
||||
'';
|
||||
|
||||
const isLicenseActive =
|
||||
licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ===
|
||||
LICENSE_PLAN_STATUS.VALID;
|
||||
licenseStatus?.toLocaleLowerCase() ===
|
||||
LICENSE_PLAN_STATUS.VALID.toLocaleLowerCase();
|
||||
|
||||
const isEnterprise = licenseData?.payload?.licenses?.some(
|
||||
(license: License) =>
|
||||
|
||||
@@ -19,12 +19,17 @@ import { CardContainer, CustomSubText, styles } from './styles';
|
||||
import Tags from './Tags';
|
||||
|
||||
function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element {
|
||||
const { tree, firstSpanStartTime } = props;
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
tree,
|
||||
firstSpanStartTime,
|
||||
traceStartTime = minTime,
|
||||
traceEndTime = maxTime,
|
||||
} = props;
|
||||
|
||||
const { id: traceId } = useParams<Params>();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -74,7 +79,7 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element {
|
||||
];
|
||||
|
||||
const onLogsHandler = (): void => {
|
||||
const query = getTraceToLogsQuery(traceId, minTime, maxTime);
|
||||
const query = getTraceToLogsQuery(traceId, traceStartTime, traceEndTime);
|
||||
|
||||
history.push(
|
||||
`${ROUTES.LOGS_EXPLORER}?${createQueryParams({
|
||||
@@ -140,10 +145,14 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element {
|
||||
interface SelectedSpanDetailsProps {
|
||||
tree?: ITraceTree;
|
||||
firstSpanStartTime: number;
|
||||
traceStartTime?: number;
|
||||
traceEndTime?: number;
|
||||
}
|
||||
|
||||
SelectedSpanDetails.defaultProps = {
|
||||
tree: undefined,
|
||||
traceStartTime: undefined,
|
||||
traceEndTime: undefined,
|
||||
};
|
||||
|
||||
export interface ModalText {
|
||||
|
||||
@@ -48,6 +48,12 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
|
||||
[response],
|
||||
);
|
||||
|
||||
const traceStartTime = useMemo(() => response[0].startTimestampMillis, [
|
||||
response,
|
||||
]);
|
||||
|
||||
const traceEndTime = useMemo(() => response[0].endTimestampMillis, [response]);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const [spanId] = useState<string | null>(urlQuery.get('spanId'));
|
||||
|
||||
@@ -260,6 +266,8 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
|
||||
<StyledCol styledclass={[styles.selectedSpanDetailContainer]}>
|
||||
<SelectedSpanDetails
|
||||
firstSpanStartTime={firstSpanStartTime}
|
||||
traceStartTime={traceStartTime}
|
||||
traceEndTime={traceEndTime}
|
||||
tree={[
|
||||
...(getSelectedNode.spanTree ? getSelectedNode.spanTree : []),
|
||||
...(getSelectedNode.missingSpanTree
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import getTriggeredApi from 'api/alerts/getTriggered';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import useAxiosError from 'hooks/useAxiosError';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -13,6 +16,8 @@ function TriggeredAlerts(): JSX.Element {
|
||||
(state) => state.app.user?.userId,
|
||||
);
|
||||
|
||||
const hasLoggedEvent = useRef(false); // Track if logEvent has been called
|
||||
|
||||
const handleError = useAxiosError();
|
||||
|
||||
const alertsResponse = useQuery(
|
||||
@@ -29,6 +34,15 @@ function TriggeredAlerts(): JSX.Element {
|
||||
},
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasLoggedEvent.current && !isUndefined(alertsResponse.data?.payload)) {
|
||||
logEvent('Alert: Triggered alert list page visited', {
|
||||
number: alertsResponse.data?.payload?.length,
|
||||
});
|
||||
hasLoggedEvent.current = true;
|
||||
}
|
||||
}, [alertsResponse.data?.payload]);
|
||||
|
||||
if (alertsResponse.error) {
|
||||
return <TriggerComponent allAlerts={[]} />;
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import AppRoutes from 'AppRoutes';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ThemeProvider } from 'hooks/useDarkMode';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
import { createRoot } from 'react-dom/client';
|
||||
import { HelmetProvider } from 'react-helmet-async';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
@@ -33,6 +34,13 @@ const queryClient = new QueryClient({
|
||||
|
||||
const container = document.getElementById('root');
|
||||
|
||||
if (process.env.POSTHOG_KEY) {
|
||||
posthog.init(process.env.POSTHOG_KEY, {
|
||||
api_host: 'https://us.i.posthog.com',
|
||||
person_profiles: 'identified_only', // or 'always' to create profiles for anonymous users as well
|
||||
});
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { Pagination } from 'hooks/queryPagination';
|
||||
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { isEmpty, cloneDeep } from 'lodash-es';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -40,6 +40,10 @@ export async function GetMetricQueryRange(
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
if (props.formatForWeb) {
|
||||
return response;
|
||||
}
|
||||
|
||||
if (response.payload?.data?.result) {
|
||||
const v2Range = convertNewDataToOld(response.payload);
|
||||
|
||||
@@ -76,6 +80,7 @@ export interface GetQueryResultsProps {
|
||||
variables?: Record<string, unknown>;
|
||||
params?: Record<string, unknown>;
|
||||
fillGaps?: boolean;
|
||||
formatForWeb?: boolean;
|
||||
tableParams?: {
|
||||
pagination?: Pagination;
|
||||
selectColumns?: any;
|
||||
|
||||
@@ -16,6 +16,7 @@ export const prepareQueryRangePayload = ({
|
||||
query,
|
||||
globalSelectedInterval,
|
||||
graphType,
|
||||
formatForWeb,
|
||||
selectedTime,
|
||||
tableParams,
|
||||
variables = {},
|
||||
@@ -102,6 +103,7 @@ export const prepareQueryRangePayload = ({
|
||||
inputFormat: 'ns',
|
||||
}),
|
||||
variables,
|
||||
formatForWeb,
|
||||
compositeQuery,
|
||||
...restParams,
|
||||
};
|
||||
|
||||
@@ -583,11 +583,11 @@ export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({
|
||||
q.series?.sort((a, b) => {
|
||||
let labelA = '';
|
||||
let labelB = '';
|
||||
a.labelsArray.forEach((lab) => {
|
||||
a.labelsArray?.forEach((lab) => {
|
||||
labelA += Object.values(lab)[0];
|
||||
});
|
||||
|
||||
b.labelsArray.forEach((lab) => {
|
||||
b.labelsArray?.forEach((lab) => {
|
||||
labelB += Object.values(lab)[0];
|
||||
});
|
||||
|
||||
|
||||
@@ -64,6 +64,10 @@ export interface GetUPlotChartOptions {
|
||||
function getStackedSeries(apiResponse: QueryData[]): QueryData[] {
|
||||
const series = cloneDeep(apiResponse);
|
||||
|
||||
if (!series) {
|
||||
return series;
|
||||
}
|
||||
|
||||
for (let i = series.length - 2; i >= 0; i--) {
|
||||
const { values } = series[i];
|
||||
for (let j = 0; j < values.length; j++) {
|
||||
@@ -84,6 +88,9 @@ function getStackedSeries(apiResponse: QueryData[]): QueryData[] {
|
||||
*/
|
||||
function getStackedSeriesQueryFormat(apiResponse: QueryData[]): QueryData[] {
|
||||
const series = cloneDeep(apiResponse);
|
||||
if (!series) {
|
||||
return apiResponse;
|
||||
}
|
||||
|
||||
for (let i = series.length - 2; i >= 0; i--) {
|
||||
const { values } = series[i];
|
||||
@@ -102,9 +109,12 @@ function getStackedSeriesQueryFormat(apiResponse: QueryData[]): QueryData[] {
|
||||
|
||||
function getStackedSeriesYAxis(apiResponse: QueryDataV3[]): QueryDataV3[] {
|
||||
const series = cloneDeep(apiResponse);
|
||||
if (!series) {
|
||||
return apiResponse;
|
||||
}
|
||||
|
||||
for (let i = 0; i < series.length; i++) {
|
||||
series[i].series = getStackedSeriesQueryFormat(series[i].series);
|
||||
series[i].series = getStackedSeriesQueryFormat(series[i].series || []);
|
||||
}
|
||||
|
||||
return series;
|
||||
|
||||
@@ -46,6 +46,8 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
||||
},
|
||||
];
|
||||
|
||||
const paginationConfig = { pageSize: 20, hideOnSinglePage: true };
|
||||
|
||||
return (
|
||||
<div className="integration-data-collected">
|
||||
<div className="logs-section">
|
||||
@@ -59,7 +61,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
||||
index % 2 === 0 ? 'table-row-dark' : ''
|
||||
}
|
||||
dataSource={logsData}
|
||||
pagination={{ pageSize: 20 }}
|
||||
pagination={paginationConfig}
|
||||
className="logs-section-table"
|
||||
/>
|
||||
</div>
|
||||
@@ -74,7 +76,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
||||
index % 2 === 0 ? 'table-row-dark' : ''
|
||||
}
|
||||
dataSource={metricsData}
|
||||
pagination={{ pageSize: 20 }}
|
||||
pagination={paginationConfig}
|
||||
className="metrics-section-table"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -277,6 +277,8 @@ function SaveView(): JSX.Element {
|
||||
},
|
||||
];
|
||||
|
||||
const paginationConfig = { pageSize: 5, hideOnSinglePage: true };
|
||||
|
||||
return (
|
||||
<div className="save-view-container">
|
||||
<div className="save-view-content">
|
||||
@@ -303,7 +305,7 @@ function SaveView(): JSX.Element {
|
||||
dataSource={dataSource}
|
||||
loading={isLoading || isRefetching}
|
||||
showHeader={false}
|
||||
pagination={{ pageSize: 5 }}
|
||||
pagination={paginationConfig}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ export type QueryRangePayload = {
|
||||
start: number;
|
||||
step: number;
|
||||
variables?: Record<string, unknown>;
|
||||
formatForWeb?: boolean;
|
||||
[param: string]: unknown;
|
||||
};
|
||||
export interface MetricRangePayloadProps {
|
||||
|
||||
@@ -15,6 +15,8 @@ export interface PayloadProps {
|
||||
segmentID: string;
|
||||
columns: string[];
|
||||
isSubTree: boolean;
|
||||
startTimestampMillis: number;
|
||||
endTimestampMillis: number;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -10,3 +10,6 @@ export const getGraphType = (panelType: PANEL_TYPES): PANEL_TYPES => {
|
||||
}
|
||||
return panelType;
|
||||
};
|
||||
|
||||
export const getGraphTypeForFormat = (panelType: PANEL_TYPES): PANEL_TYPES =>
|
||||
panelType;
|
||||
|
||||
@@ -22,6 +22,7 @@ const plugins = [
|
||||
template: 'src/index.html.ejs',
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
@@ -39,6 +40,7 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
|
||||
@@ -27,6 +27,7 @@ const plugins = [
|
||||
template: 'src/index.html.ejs',
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
@@ -49,6 +50,7 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
|
||||
@@ -8776,6 +8776,11 @@ fb-watchman@^2.0.0:
|
||||
dependencies:
|
||||
bser "2.1.1"
|
||||
|
||||
fflate@^0.4.8:
|
||||
version "0.4.8"
|
||||
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
||||
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
||||
|
||||
figures@^3.0.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af"
|
||||
@@ -13700,6 +13705,19 @@ postcss@8.4.38, postcss@^8.0.0, postcss@^8.1.1, postcss@^8.3.7, postcss@^8.4.21,
|
||||
picocolors "^1.0.0"
|
||||
source-map-js "^1.2.0"
|
||||
|
||||
posthog-js@1.140.1:
|
||||
version "1.140.1"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.140.1.tgz#34efc0d326fa5fcf7950106f350fb4f0e73b2da6"
|
||||
integrity sha512-UeKuAtQSvbzmTCzNVaauku8F194EYwAP33WrRrWZlDlMNbMy7GKcZOgKbr7jZqnha7FlVlHrWk+Rpyr1zCFhPQ==
|
||||
dependencies:
|
||||
fflate "^0.4.8"
|
||||
preact "^10.19.3"
|
||||
|
||||
preact@^10.19.3:
|
||||
version "10.22.0"
|
||||
resolved "https://registry.yarnpkg.com/preact/-/preact-10.22.0.tgz#a50f38006ae438d255e2631cbdaf7488e6dd4e16"
|
||||
integrity sha512-RRurnSjJPj4rp5K6XoP45Ui33ncb7e4H7WiOHVpjbkvqvA3U+N8Z6Qbo0AE6leGYBV66n8EhEaFixvIu3SkxFw==
|
||||
|
||||
prelude-ls@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz"
|
||||
|
||||
1
go.mod
1
go.mod
@@ -37,7 +37,6 @@ require (
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f
|
||||
github.com/prometheus/common v0.54.0
|
||||
github.com/prometheus/prometheus v2.5.0+incompatible
|
||||
github.com/rs/cors v1.11.0
|
||||
|
||||
6
go.sum
6
go.sum
@@ -137,7 +137,6 @@ github.com/coreos/go-oidc/v3 v3.10.0 h1:tDnXHnLyiTVyT/2zLDGj09pFPkhND8Gl8lnTRhoE
|
||||
github.com/coreos/go-oidc/v3 v3.10.0/go.mod h1:5j11xcw0D3+SGxn6Z/WFADsgcWVMyNAlSQupk0KK3ac=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -625,8 +624,6 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
|
||||
github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f h1:h0p1aZ9F5d6IXOygysob3g4B07b+HuVUQC0VJKD8wA4=
|
||||
github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f/go.mod h1:oa2sAs9tGai3VldabTV0eWejt/O4/OOD7azP8GaikqU=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c h1:NRoLoZvkBTKvR5gQLgA3e0hqjkY9u1wm+iOL45VN/qI=
|
||||
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
@@ -673,7 +670,6 @@ github.com/russellhaering/gosaml2 v0.9.0 h1:CNMnH42z/GirrKjdmNrSS6bAAs47F9bPdl4P
|
||||
github.com/russellhaering/gosaml2 v0.9.0/go.mod h1:byViER/1YPUa0Puj9ROZblpoq2jsE7h/CJmitzX0geU=
|
||||
github.com/russellhaering/goxmldsig v1.2.0 h1:Y6GTTc9Un5hCxSzVz4UIWQ/zuVwDvzJk80guqzwx6Vg=
|
||||
github.com/russellhaering/goxmldsig v1.2.0/go.mod h1:gM4MDENBQf7M+V824SGfyIUVFWydB7n0KkEubVJl+Tw=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
@@ -697,7 +693,6 @@ github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
|
||||
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
|
||||
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
|
||||
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo=
|
||||
@@ -747,7 +742,6 @@ github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFA
|
||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc=
|
||||
github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4=
|
||||
github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ=
|
||||
|
||||
@@ -1924,6 +1924,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_TRACE_DETAIL_API, data, userEmail, true, false)
|
||||
}
|
||||
|
||||
var startTime, endTime, durationNano uint64
|
||||
var searchScanResponses []model.SearchSpanDBResponseItem
|
||||
|
||||
query := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
|
||||
@@ -1941,7 +1942,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
end := time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
|
||||
searchSpansResult := []model.SearchSpansResult{{
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
|
||||
Events: make([][]interface{}, len(searchScanResponses)),
|
||||
IsSubTree: false,
|
||||
},
|
||||
@@ -1954,6 +1955,15 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
easyjson.Unmarshal([]byte(item.Model), &jsonItem)
|
||||
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano() / 1000000)
|
||||
searchSpanResponses = append(searchSpanResponses, jsonItem)
|
||||
if startTime == 0 || jsonItem.TimeUnixNano < startTime {
|
||||
startTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if endTime == 0 || jsonItem.TimeUnixNano > endTime {
|
||||
endTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if durationNano == 0 || uint64(jsonItem.DurationNano) > durationNano {
|
||||
durationNano = uint64(jsonItem.DurationNano)
|
||||
}
|
||||
}
|
||||
end = time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery unmarshal took: ", zap.Duration("duration", end.Sub(start)))
|
||||
@@ -1983,6 +1993,9 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
}
|
||||
}
|
||||
|
||||
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano / 1000000)
|
||||
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano / 1000000)
|
||||
|
||||
return &searchSpansResult, nil
|
||||
}
|
||||
|
||||
@@ -3219,7 +3232,7 @@ func (r *ClickHouseReader) GetSamplesInfoInLastHeartBeatInterval(ctx context.Con
|
||||
|
||||
var totalSamples uint64
|
||||
|
||||
queryStr := fmt.Sprintf("select count() from %s.%s where metric_name not like 'signoz_%%' and timestamp_ms > toUnixTimestamp(now()-toIntervalMinute(%d))*1000;", signozMetricDBName, signozSampleTableName, int(interval.Minutes()))
|
||||
queryStr := fmt.Sprintf("select count() from %s.%s where metric_name not like 'signoz_%%' and unix_milli > toUnixTimestamp(now()-toIntervalMinute(%d))*1000;", signozMetricDBName, signozSampleTableName, int(interval.Minutes()))
|
||||
|
||||
r.db.QueryRow(ctx, queryStr).Scan(&totalSamples)
|
||||
|
||||
@@ -4421,8 +4434,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
case *time.Time:
|
||||
point.Timestamp = v.UnixMilli()
|
||||
case *float64, *float32:
|
||||
isValidPoint = true
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(reflect.ValueOf(v).Elem().Float())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
|
||||
@@ -4431,9 +4444,24 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())
|
||||
}
|
||||
case **float64, **float32:
|
||||
val := reflect.ValueOf(v)
|
||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||
value := reflect.ValueOf(v).Elem().Elem().Float()
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = value
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", value)
|
||||
}
|
||||
}
|
||||
case *uint, *uint8, *uint64, *uint16, *uint32:
|
||||
isValidPoint = true
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(reflect.ValueOf(v).Elem().Uint())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
|
||||
@@ -4442,9 +4470,24 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
|
||||
}
|
||||
case **uint, **uint8, **uint64, **uint16, **uint32:
|
||||
val := reflect.ValueOf(v)
|
||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||
value := reflect.ValueOf(v).Elem().Elem().Uint()
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(value)
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", value)
|
||||
}
|
||||
}
|
||||
case *int, *int8, *int16, *int32, *int64:
|
||||
isValidPoint = true
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(reflect.ValueOf(v).Elem().Int())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
|
||||
@@ -4453,6 +4496,21 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
|
||||
}
|
||||
case **int, **int8, **int16, **int32, **int64:
|
||||
val := reflect.ValueOf(v)
|
||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||
value := reflect.ValueOf(v).Elem().Elem().Int()
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(value)
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", value)
|
||||
}
|
||||
}
|
||||
case *bool:
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", *v))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
|
||||
@@ -76,7 +76,6 @@ type APIHandler struct {
|
||||
querier interfaces.Querier
|
||||
querierV2 interfaces.Querier
|
||||
queryBuilder *queryBuilder.QueryBuilder
|
||||
preferDelta bool
|
||||
preferSpanMetrics bool
|
||||
|
||||
// temporalityMap is a map of metric name to temporality
|
||||
@@ -106,7 +105,6 @@ type APIHandlerOpts struct {
|
||||
|
||||
SkipConfig *model.SkipConfig
|
||||
|
||||
PerferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
|
||||
MaxIdleConns int
|
||||
@@ -166,7 +164,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
reader: opts.Reader,
|
||||
appDao: opts.AppDao,
|
||||
skipConfig: opts.SkipConfig,
|
||||
preferDelta: opts.PerferDelta,
|
||||
preferSpanMetrics: opts.PreferSpanMetrics,
|
||||
temporalityMap: make(map[string]map[v3.Temporality]bool),
|
||||
maxIdleConns: opts.MaxIdleConns,
|
||||
@@ -3016,6 +3013,7 @@ func (aH *APIHandler) QueryRangeV3Format(w http.ResponseWriter, r *http.Request)
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
queryRangeParams.Version = "v3"
|
||||
|
||||
aH.Respond(w, queryRangeParams)
|
||||
}
|
||||
@@ -3070,6 +3068,14 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
postprocess.FillGaps(result, queryRangeParams)
|
||||
}
|
||||
|
||||
if queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable && queryRangeParams.FormatForWeb {
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
} else if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
result = postprocess.TransformToTableForBuilderQueries(result, queryRangeParams)
|
||||
}
|
||||
}
|
||||
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: result,
|
||||
}
|
||||
@@ -3318,8 +3324,10 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
|
||||
result, err = postprocess.PostProcessResult(result, queryRangeParams)
|
||||
} else if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL &&
|
||||
queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable && queryRangeParams.FormatForWeb {
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@@ -3343,6 +3351,7 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
queryRangeParams.Version = "v4"
|
||||
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.populateTemporality(r.Context(), queryRangeParams)
|
||||
|
||||
@@ -142,6 +142,11 @@ func checkDuplicateString(pipeline []string) bool {
|
||||
for _, processor := range pipeline {
|
||||
name := processor
|
||||
if _, ok := exists[name]; ok {
|
||||
zap.L().Error(
|
||||
"duplicate processor name detected in generated collector config for log pipelines",
|
||||
zap.String("processor", processor),
|
||||
zap.Any("pipeline", pipeline),
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,10 @@ import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var buildProcessorTestData = []struct {
|
||||
@@ -204,3 +207,89 @@ func TestBuildLogsPipeline(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
baseConf := []byte(`
|
||||
receivers:
|
||||
memory:
|
||||
id: in-memory-receiver
|
||||
exporters:
|
||||
memory:
|
||||
id: in-memory-exporter
|
||||
service:
|
||||
pipelines:
|
||||
logs:
|
||||
receivers:
|
||||
- memory
|
||||
processors: []
|
||||
exporters:
|
||||
- memory
|
||||
`)
|
||||
|
||||
makeTestPipeline := func(name string, alias string) Pipeline {
|
||||
return Pipeline{
|
||||
OrderId: 1,
|
||||
Name: name,
|
||||
Alias: alias,
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "method",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "GET",
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "regex",
|
||||
Type: "regex_parser",
|
||||
Enabled: true,
|
||||
Name: "regex parser",
|
||||
ParseFrom: "attributes.test_regex_target",
|
||||
ParseTo: "attributes",
|
||||
Regex: `^\s*(?P<json_data>{.*})\s*$`,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
testPipelines := []Pipeline{
|
||||
makeTestPipeline("test pipeline 1", "pipeline-alias"),
|
||||
makeTestPipeline("test pipeline 2", "pipeline-alias"),
|
||||
}
|
||||
|
||||
recommendedConfYaml, apiErr := GenerateCollectorConfigWithPipelines(
|
||||
baseConf, testPipelines,
|
||||
)
|
||||
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation: %v", apiErr))
|
||||
|
||||
var recommendedConf map[string]interface{}
|
||||
err := yaml.Unmarshal(recommendedConfYaml, &recommendedConf)
|
||||
require.Nil(err, "couldn't unmarshal recommended config")
|
||||
|
||||
logsProcessors := recommendedConf["service"].(map[string]any)["pipelines"].(map[string]any)["logs"].(map[string]any)["processors"].([]any)
|
||||
|
||||
require.Equal(
|
||||
len(logsProcessors), len(testPipelines),
|
||||
"test pipelines not included in recommended config as expected",
|
||||
)
|
||||
|
||||
recommendedConfYaml2, apiErr := GenerateCollectorConfigWithPipelines(
|
||||
baseConf, testPipelines,
|
||||
)
|
||||
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation again: %v", apiErr))
|
||||
require.Equal(
|
||||
string(recommendedConfYaml), string(recommendedConfYaml2),
|
||||
"collector config should not change across recommendations for same set of pipelines",
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ func CollectorConfProcessorName(p Pipeline) string {
|
||||
func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []string, error) {
|
||||
processors := map[string]interface{}{}
|
||||
names := []string{}
|
||||
for _, v := range pipelines {
|
||||
for pipelineIdx, v := range pipelines {
|
||||
if !v.Enabled {
|
||||
continue
|
||||
}
|
||||
@@ -70,6 +70,12 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
|
||||
Operators: v.Config,
|
||||
}
|
||||
name := CollectorConfProcessorName(v)
|
||||
|
||||
// Ensure name is unique
|
||||
if _, nameExists := processors[name]; nameExists {
|
||||
name = fmt.Sprintf("%s-%d", name, pipelineIdx)
|
||||
}
|
||||
|
||||
processors[name] = processor
|
||||
names = append(names, name)
|
||||
}
|
||||
|
||||
@@ -55,7 +55,6 @@ type ServerOptions struct {
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
@@ -172,7 +171,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
apiHandler, err := NewAPIHandler(APIHandlerOpts{
|
||||
Reader: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PerferDelta: serverOptions.PreferDelta,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
MaxIdleConns: serverOptions.MaxIdleConns,
|
||||
MaxOpenConns: serverOptions.MaxOpenConns,
|
||||
|
||||
@@ -37,7 +37,6 @@ func main() {
|
||||
var ruleRepoURL, cacheConfigPath, fluxInterval string
|
||||
var cluster string
|
||||
|
||||
var preferDelta bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
@@ -47,11 +46,10 @@ func main() {
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
// Allow using the consistent naming with the signoz collector
|
||||
flag.StringVar(&cluster, "cluster-name", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
@@ -71,7 +69,6 @@ func main() {
|
||||
HTTPHostPort: constants.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferDelta: preferDelta,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: constants.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
|
||||
@@ -212,9 +212,11 @@ type ServiceOverviewItem struct {
|
||||
}
|
||||
|
||||
type SearchSpansResult struct {
|
||||
Columns []string `json:"columns"`
|
||||
Events [][]interface{} `json:"events"`
|
||||
IsSubTree bool `json:"isSubTree"`
|
||||
StartTimestampMillis uint64 `json:"startTimestampMillis"`
|
||||
EndTimestampMillis uint64 `json:"endTimestampMillis"`
|
||||
Columns []string `json:"columns"`
|
||||
Events [][]interface{} `json:"events"`
|
||||
IsSubTree bool `json:"isSubTree"`
|
||||
}
|
||||
|
||||
type GetFilterSpansResponseItem struct {
|
||||
@@ -250,19 +252,22 @@ type Event struct {
|
||||
|
||||
//easyjson:json
|
||||
type SearchSpanResponseItem struct {
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
DurationNano int64 `json:"durationNano"`
|
||||
SpanID string `json:"spanId"`
|
||||
RootSpanID string `json:"rootSpanId"`
|
||||
TraceID string `json:"traceId"`
|
||||
HasError bool `json:"hasError"`
|
||||
Kind int32 `json:"kind"`
|
||||
ServiceName string `json:"serviceName"`
|
||||
Name string `json:"name"`
|
||||
References []OtelSpanRef `json:"references,omitempty"`
|
||||
TagMap map[string]string `json:"tagMap"`
|
||||
Events []string `json:"event"`
|
||||
RootName string `json:"rootName"`
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
DurationNano int64 `json:"durationNano"`
|
||||
SpanID string `json:"spanId"`
|
||||
RootSpanID string `json:"rootSpanId"`
|
||||
TraceID string `json:"traceId"`
|
||||
HasError bool `json:"hasError"`
|
||||
Kind int32 `json:"kind"`
|
||||
ServiceName string `json:"serviceName"`
|
||||
Name string `json:"name"`
|
||||
References []OtelSpanRef `json:"references,omitempty"`
|
||||
TagMap map[string]string `json:"tagMap"`
|
||||
Events []string `json:"event"`
|
||||
RootName string `json:"rootName"`
|
||||
StatusMessage string `json:"statusMessage"`
|
||||
StatusCodeString string `json:"statusCodeString"`
|
||||
SpanKind string `json:"spanKind"`
|
||||
}
|
||||
|
||||
type OtelSpanRef struct {
|
||||
@@ -299,7 +304,7 @@ func (item *SearchSpanResponseItem) GetValues() []interface{} {
|
||||
keys = append(keys, k)
|
||||
values = append(values, v)
|
||||
}
|
||||
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError}
|
||||
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError, item.StatusMessage, item.StatusCodeString, item.SpanKind}
|
||||
|
||||
return returnArray
|
||||
}
|
||||
|
||||
@@ -118,6 +118,12 @@ func easyjson6ff3ac1dDecodeGoSignozIoSignozPkgQueryServiceModel(in *jlexer.Lexer
|
||||
}
|
||||
case "rootName":
|
||||
out.RootName = string(in.String())
|
||||
case "statusMessage":
|
||||
out.StatusMessage = string(in.String())
|
||||
case "statusCodeString":
|
||||
out.StatusCodeString = string(in.String())
|
||||
case "spanKind":
|
||||
out.SpanKind = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
@@ -233,6 +239,21 @@ func easyjson6ff3ac1dEncodeGoSignozIoSignozPkgQueryServiceModel(out *jwriter.Wri
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.RootName))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"statusMessage\":"
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.StatusMessage))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"statusCodeString\":"
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.StatusCodeString))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"spanKind\":"
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.SpanKind))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
|
||||
|
||||
@@ -354,6 +354,8 @@ type QueryRangeParamsV3 struct {
|
||||
CompositeQuery *CompositeQuery `json:"compositeQuery"`
|
||||
Variables map[string]interface{} `json:"variables,omitempty"`
|
||||
NoCache bool `json:"noCache"`
|
||||
Version string `json:"-"`
|
||||
FormatForWeb bool `json:"formatForWeb,omitempty"`
|
||||
}
|
||||
|
||||
type PromQuery struct {
|
||||
@@ -986,10 +988,30 @@ type QueryRangeResponse struct {
|
||||
Result []*Result `json:"result"`
|
||||
}
|
||||
|
||||
type TableColumn struct {
|
||||
Name string `json:"name"`
|
||||
// QueryName is the name of the query that this column belongs to
|
||||
QueryName string `json:"queryName"`
|
||||
// IsValueColumn is true if this column is a value column
|
||||
// i.e it is the column that contains the actual value that is being plotted
|
||||
IsValueColumn bool `json:"isValueColumn"`
|
||||
}
|
||||
|
||||
type TableRow struct {
|
||||
Data map[string]interface{} `json:"data"`
|
||||
QueryName string `json:"-"`
|
||||
}
|
||||
|
||||
type Table struct {
|
||||
Columns []*TableColumn `json:"columns"`
|
||||
Rows []*TableRow `json:"rows"`
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
QueryName string `json:"queryName"`
|
||||
Series []*Series `json:"series"`
|
||||
List []*Row `json:"list"`
|
||||
QueryName string `json:"queryName,omitempty"`
|
||||
Series []*Series `json:"series,omitempty"`
|
||||
List []*Row `json:"list,omitempty"`
|
||||
Table *Table `json:"table,omitempty"`
|
||||
}
|
||||
|
||||
type LogsLiveTailClient struct {
|
||||
|
||||
@@ -86,6 +86,13 @@ func PostProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParam
|
||||
if queryRangeParams.CompositeQuery.FillGaps {
|
||||
FillGaps(result, queryRangeParams)
|
||||
}
|
||||
|
||||
if queryRangeParams.FormatForWeb &&
|
||||
queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder &&
|
||||
queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable {
|
||||
result = TransformToTableForBuilderQueries(result, queryRangeParams)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
|
||||
301
pkg/query-service/postprocess/table.go
Normal file
301
pkg/query-service/postprocess/table.go
Normal file
@@ -0,0 +1,301 @@
|
||||
package postprocess
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func roundToTwoDecimal(number float64) float64 {
|
||||
// Handle very small numbers
|
||||
if math.Abs(number) < 0.000001 {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Determine the number of decimal places to round to
|
||||
decimalPlaces := 2
|
||||
if math.Abs(number) < 0.01 {
|
||||
decimalPlaces = int(math.Ceil(-math.Log10(math.Abs(number)))) + 1
|
||||
}
|
||||
|
||||
// Round to the determined number of decimal places
|
||||
scale := math.Pow(10, float64(decimalPlaces))
|
||||
return math.Round(number*scale) / scale
|
||||
}
|
||||
|
||||
func TransformToTableForBuilderQueries(results []*v3.Result, params *v3.QueryRangeParamsV3) []*v3.Result {
|
||||
if len(results) == 0 {
|
||||
return []*v3.Result{}
|
||||
}
|
||||
|
||||
// Sort results by QueryName
|
||||
sort.Slice(results, func(i, j int) bool {
|
||||
return results[i].QueryName < results[j].QueryName
|
||||
})
|
||||
|
||||
// Create a map to store all unique labels
|
||||
seen := make(map[string]struct{})
|
||||
labelKeys := []string{}
|
||||
for _, result := range results {
|
||||
for _, series := range result.Series {
|
||||
for _, labels := range series.LabelsArray {
|
||||
for key := range labels {
|
||||
if _, ok := seen[key]; !ok {
|
||||
seen[key] = struct{}{}
|
||||
labelKeys = append(labelKeys, key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create columns
|
||||
// There will be one column for each label key and one column for each query name
|
||||
columns := make([]*v3.TableColumn, 0, len(labelKeys)+len(results))
|
||||
for _, key := range labelKeys {
|
||||
columns = append(columns, &v3.TableColumn{Name: key, IsValueColumn: false})
|
||||
}
|
||||
for _, result := range results {
|
||||
columns = append(columns, &v3.TableColumn{Name: result.QueryName, QueryName: result.QueryName, IsValueColumn: true})
|
||||
}
|
||||
|
||||
// Create a map to store unique rows
|
||||
rowMap := make(map[string]*v3.TableRow)
|
||||
|
||||
for _, result := range results {
|
||||
for _, series := range result.Series {
|
||||
if len(series.Points) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Create a key for the row based on labels
|
||||
var keyParts []string
|
||||
rowData := make(map[string]interface{}, len(columns))
|
||||
for _, key := range labelKeys {
|
||||
value := "n/a"
|
||||
for _, labels := range series.LabelsArray {
|
||||
if v, ok := labels[key]; ok {
|
||||
value = v
|
||||
break
|
||||
}
|
||||
}
|
||||
keyParts = append(keyParts, fmt.Sprintf("%s=%s", key, value))
|
||||
rowData[key] = value
|
||||
}
|
||||
rowKey := strings.Join(keyParts, ",")
|
||||
|
||||
// Get or create the row
|
||||
row, ok := rowMap[rowKey]
|
||||
if !ok {
|
||||
row = &v3.TableRow{Data: rowData, QueryName: result.QueryName}
|
||||
rowMap[rowKey] = row
|
||||
}
|
||||
|
||||
// Add the value for this query
|
||||
for _, col := range columns {
|
||||
if col.Name == result.QueryName {
|
||||
row.Data[col.Name] = roundToTwoDecimal(series.Points[0].Value)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert rowMap to a slice of TableRows
|
||||
rows := make([]*v3.TableRow, 0, len(rowMap))
|
||||
for _, row := range rowMap {
|
||||
rows = append(rows, row)
|
||||
}
|
||||
|
||||
// Get sorted query names
|
||||
queryNames := make([]string, 0, len(params.CompositeQuery.BuilderQueries))
|
||||
for queryName := range params.CompositeQuery.BuilderQueries {
|
||||
queryNames = append(queryNames, queryName)
|
||||
}
|
||||
sort.Strings(queryNames)
|
||||
|
||||
// Sort rows based on OrderBy from BuilderQueries
|
||||
sortRows(rows, params.CompositeQuery.BuilderQueries, queryNames)
|
||||
|
||||
for _, row := range rows {
|
||||
for _, col := range columns {
|
||||
if col.IsValueColumn {
|
||||
if row.Data[col.Name] == nil {
|
||||
row.Data[col.Name] = "n/a"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the final result
|
||||
tableResult := v3.Result{
|
||||
Table: &v3.Table{
|
||||
Columns: columns,
|
||||
Rows: rows,
|
||||
},
|
||||
}
|
||||
|
||||
return []*v3.Result{&tableResult}
|
||||
}
|
||||
|
||||
func sortRows(rows []*v3.TableRow, builderQueries map[string]*v3.BuilderQuery, queryNames []string) {
|
||||
// use reverse order of queryNames
|
||||
for i := len(queryNames) - 1; i >= 0; i-- {
|
||||
queryName := queryNames[i]
|
||||
sort.SliceStable(rows, func(i, j int) bool {
|
||||
query := builderQueries[queryName]
|
||||
orderByList := query.OrderBy
|
||||
if len(orderByList) == 0 {
|
||||
// If no orderBy is specified, sort by value in descending order
|
||||
orderByList = []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}
|
||||
}
|
||||
for _, orderBy := range orderByList {
|
||||
name := orderBy.ColumnName
|
||||
if name == constants.SigNozOrderByValue {
|
||||
name = queryName
|
||||
}
|
||||
|
||||
valI := rows[i].Data[name]
|
||||
valJ := rows[j].Data[name]
|
||||
|
||||
if valI == nil || valJ == nil {
|
||||
return rows[i].QueryName < rows[j].QueryName
|
||||
}
|
||||
|
||||
// Compare based on the data type
|
||||
switch v := valI.(type) {
|
||||
case float64:
|
||||
switch w := valJ.(type) {
|
||||
case float64:
|
||||
if v != w {
|
||||
return (v < w) == (orderBy.Order == "asc")
|
||||
}
|
||||
default:
|
||||
// For any other type, sort float64 first
|
||||
return orderBy.Order == "asc"
|
||||
}
|
||||
case string:
|
||||
switch w := valJ.(type) {
|
||||
case float64:
|
||||
// If types are different, sort numbers before strings
|
||||
return orderBy.Order != "asc"
|
||||
case string:
|
||||
if v != w {
|
||||
return (v < w) == (orderBy.Order == "asc")
|
||||
}
|
||||
default:
|
||||
// For any other type, sort strings before bools
|
||||
return orderBy.Order == "asc"
|
||||
}
|
||||
case bool:
|
||||
switch w := valJ.(type) {
|
||||
case float64, string:
|
||||
// If types are different, sort bools after numbers and strings
|
||||
return orderBy.Order != "asc"
|
||||
case bool:
|
||||
if v != w {
|
||||
return (!v && w) == (orderBy.Order == "asc")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TransformToTableForClickHouseQueries(results []*v3.Result) []*v3.Result {
|
||||
if len(results) == 0 {
|
||||
return []*v3.Result{}
|
||||
}
|
||||
|
||||
// Sort results by QueryName
|
||||
sort.Slice(results, func(i, j int) bool {
|
||||
return results[i].QueryName < results[j].QueryName
|
||||
})
|
||||
|
||||
// Create a map to store all unique labels
|
||||
seen := make(map[string]struct{})
|
||||
labelKeys := []string{}
|
||||
for _, result := range results {
|
||||
for _, series := range result.Series {
|
||||
for _, labels := range series.LabelsArray {
|
||||
for key := range labels {
|
||||
if _, ok := seen[key]; !ok {
|
||||
seen[key] = struct{}{}
|
||||
labelKeys = append(labelKeys, key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create columns
|
||||
// Why don't we have a column for each query name?
|
||||
// Because we don't know if the query is an aggregation query or a non-aggregation query
|
||||
// So we create a column for each query name that has at least one point
|
||||
columns := make([]*v3.TableColumn, 0)
|
||||
for _, key := range labelKeys {
|
||||
columns = append(columns, &v3.TableColumn{Name: key, IsValueColumn: false})
|
||||
}
|
||||
for _, result := range results {
|
||||
if len(result.Series) > 0 && len(result.Series[0].Points) > 0 {
|
||||
columns = append(columns, &v3.TableColumn{Name: result.QueryName, QueryName: result.QueryName, IsValueColumn: true})
|
||||
}
|
||||
}
|
||||
|
||||
rows := make([]*v3.TableRow, 0)
|
||||
for _, result := range results {
|
||||
for _, series := range result.Series {
|
||||
|
||||
// Create a key for the row based on labels
|
||||
rowData := make(map[string]interface{}, len(columns))
|
||||
for _, key := range labelKeys {
|
||||
value := "n/a"
|
||||
for _, labels := range series.LabelsArray {
|
||||
if v, ok := labels[key]; ok {
|
||||
value = v
|
||||
break
|
||||
}
|
||||
}
|
||||
rowData[key] = value
|
||||
}
|
||||
|
||||
// Get or create the row
|
||||
row := &v3.TableRow{Data: rowData, QueryName: result.QueryName}
|
||||
|
||||
// Add the value for this query
|
||||
for _, col := range columns {
|
||||
if col.Name == result.QueryName && len(series.Points) > 0 {
|
||||
row.Data[col.Name] = roundToTwoDecimal(series.Points[0].Value)
|
||||
break
|
||||
}
|
||||
}
|
||||
rows = append(rows, row)
|
||||
}
|
||||
}
|
||||
|
||||
for _, row := range rows {
|
||||
for _, col := range columns {
|
||||
if col.IsValueColumn {
|
||||
if row.Data[col.Name] == nil {
|
||||
row.Data[col.Name] = "n/a"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the final result
|
||||
tableResult := v3.Result{
|
||||
Table: &v3.Table{
|
||||
Columns: columns,
|
||||
Rows: rows,
|
||||
},
|
||||
}
|
||||
|
||||
return []*v3.Result{&tableResult}
|
||||
}
|
||||
612
pkg/query-service/postprocess/table_test.go
Normal file
612
pkg/query-service/postprocess/table_test.go
Normal file
@@ -0,0 +1,612 @@
|
||||
package postprocess
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func TestSortRows(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
rows []*v3.TableRow
|
||||
columns []*v3.TableColumn
|
||||
builderQueries map[string]*v3.BuilderQuery
|
||||
queryNames []string
|
||||
expected []*v3.TableRow
|
||||
}{
|
||||
{
|
||||
name: "Sort by single numeric query, ascending order",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}}},
|
||||
},
|
||||
queryNames: []string{"A"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort by single numeric query, descending order",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}},
|
||||
},
|
||||
queryNames: []string{"A"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort by single string query, ascending order",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||
{Data: map[string]interface{}{"service": "service1", "A": "c"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": "a"}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
||||
},
|
||||
queryNames: []string{"A"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service3", "A": "a"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||
{Data: map[string]interface{}{"service": "service1", "A": "c"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort with n/a values",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "B": 15.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0, "B": 25.0}},
|
||||
{Data: map[string]interface{}{"service": "service4"}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
{Name: "B"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "asc"}}},
|
||||
"B": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}},
|
||||
},
|
||||
queryNames: []string{"A", "B"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0, "B": 25.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "B": 15.0}},
|
||||
{Data: map[string]interface{}{"service": "service4"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort with SigNozOrderByValue",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: constants.SigNozOrderByValue, Order: "desc"}}},
|
||||
},
|
||||
queryNames: []string{"A"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort with all n/a values",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": "n/a", "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "n/a", "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": "n/a", "B": "n/a"}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
{Name: "B"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
||||
"B": {OrderBy: []v3.OrderBy{{ColumnName: "B", Order: "desc"}}},
|
||||
},
|
||||
queryNames: []string{"A", "B"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": "n/a", "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "n/a", "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": "n/a", "B": "n/a"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort with negative numbers",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": -10.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": -30.0}},
|
||||
{Data: map[string]interface{}{"service": "service4", "A": 0.0}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
||||
},
|
||||
queryNames: []string{"A"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service3", "A": -30.0}},
|
||||
{Data: map[string]interface{}{"service": "service1", "A": -10.0}},
|
||||
{Data: map[string]interface{}{"service": "service4", "A": 0.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 20.0}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort with mixed case strings",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": "Apple"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "banana"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": "Cherry"}},
|
||||
{Data: map[string]interface{}{"service": "service4", "A": "date"}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
||||
},
|
||||
queryNames: []string{"A"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": "Apple"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": "Cherry"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "banana"}},
|
||||
{Data: map[string]interface{}{"service": "service4", "A": "date"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Sort with empty strings",
|
||||
rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": ""}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": ""}},
|
||||
{Data: map[string]interface{}{"service": "service4", "A": "a"}},
|
||||
},
|
||||
columns: []*v3.TableColumn{
|
||||
{Name: "service_name"},
|
||||
{Name: "A"},
|
||||
},
|
||||
builderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
||||
},
|
||||
queryNames: []string{"A"},
|
||||
expected: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": ""}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": ""}},
|
||||
{Data: map[string]interface{}{"service": "service4", "A": "a"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": "b"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
sortRows(tt.rows, tt.builderQueries, tt.queryNames)
|
||||
if !reflect.DeepEqual(tt.rows, tt.expected) {
|
||||
exp, _ := json.Marshal(tt.expected)
|
||||
got, _ := json.Marshal(tt.rows)
|
||||
t.Errorf("sortRows() = %v, want %v", string(got), string(exp))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSortRowsWithEmptyQueries(t *testing.T) {
|
||||
rows := []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
}
|
||||
builderQueries := map[string]*v3.BuilderQuery{}
|
||||
queryNames := []string{}
|
||||
|
||||
sortRows(rows, builderQueries, queryNames)
|
||||
|
||||
// Expect the original order to be maintained
|
||||
expected := []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(rows, expected) {
|
||||
t.Errorf("sortRows() with empty queries = %v, want %v", rows, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSortRowsWithInvalidColumnName(t *testing.T) {
|
||||
rows := []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
}
|
||||
builderQueries := map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "InvalidColumn", Order: "asc"}}},
|
||||
}
|
||||
queryNames := []string{"A"}
|
||||
|
||||
sortRows(rows, builderQueries, queryNames)
|
||||
|
||||
// Expect the original order to be maintained
|
||||
expected := []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 20.0}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 30.0}},
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(rows, expected) {
|
||||
t.Errorf("sortRows() with invalid column name = %v, want %v", rows, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSortRowsStability(t *testing.T) {
|
||||
rows := []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0, "B": "a"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0, "B": "b"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 10.0, "B": "c"}},
|
||||
}
|
||||
builderQueries := map[string]*v3.BuilderQuery{
|
||||
"A": {OrderBy: []v3.OrderBy{{ColumnName: "A", Order: "asc"}}},
|
||||
}
|
||||
queryNames := []string{"A"}
|
||||
|
||||
sortRows(rows, builderQueries, queryNames)
|
||||
|
||||
// Expect the original order to be maintained for equal values
|
||||
expected := []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "service1", "A": 10.0, "B": "a"}},
|
||||
{Data: map[string]interface{}{"service": "service2", "A": 10.0, "B": "b"}},
|
||||
{Data: map[string]interface{}{"service": "service3", "A": 10.0, "B": "c"}},
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(rows, expected) {
|
||||
t.Errorf("sortRows() stability test failed = %v, want %v", rows, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTransformToTableForClickHouseQueries(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []*v3.Result
|
||||
expected []*v3.Result
|
||||
}{
|
||||
{
|
||||
name: "Empty input",
|
||||
input: []*v3.Result{},
|
||||
expected: []*v3.Result{},
|
||||
},
|
||||
{
|
||||
name: "Single result with one series",
|
||||
input: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "frontend"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 10.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Columns: []*v3.TableColumn{
|
||||
{Name: "service"},
|
||||
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||
},
|
||||
Rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "frontend", "A": 10.0}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Multiple results with multiple series",
|
||||
input: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "frontend"},
|
||||
{"env": "prod"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 10.0},
|
||||
},
|
||||
},
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "backend"},
|
||||
{"env": "prod"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 20.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
QueryName: "B",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "frontend"},
|
||||
{"env": "prod"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 15.0},
|
||||
},
|
||||
},
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "backend"},
|
||||
{"env": "prod"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 25.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Columns: []*v3.TableColumn{
|
||||
{Name: "service"},
|
||||
{Name: "env"},
|
||||
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||
},
|
||||
Rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "frontend", "env": "prod", "A": 10.0, "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "backend", "env": "prod", "A": 20.0, "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "frontend", "env": "prod", "A": "n/a", "B": 15.0}},
|
||||
{Data: map[string]interface{}{"service": "backend", "env": "prod", "A": "n/a", "B": 25.0}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Results with missing labels",
|
||||
input: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "frontend"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 10.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
QueryName: "B",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"env": "prod"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 20.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Columns: []*v3.TableColumn{
|
||||
{Name: "service"},
|
||||
{Name: "env"},
|
||||
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||
},
|
||||
Rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "frontend", "env": "n/a", "A": 10.0, "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "n/a", "env": "prod", "A": "n/a", "B": 20.0}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Results with empty series",
|
||||
input: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "frontend"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 10.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
QueryName: "B",
|
||||
Series: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
expected: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Columns: []*v3.TableColumn{
|
||||
{Name: "service"},
|
||||
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||
},
|
||||
Rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "frontend", "A": 10.0}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Results with empty points",
|
||||
input: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "frontend"},
|
||||
},
|
||||
Points: []v3.Point{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
QueryName: "B",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "backend"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 20.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Columns: []*v3.TableColumn{
|
||||
{Name: "service"},
|
||||
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||
},
|
||||
Rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "frontend", "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "backend", "B": 20.0}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := TransformToTableForClickHouseQueries(tt.input)
|
||||
exp, _ := json.Marshal(tt.expected)
|
||||
got, _ := json.Marshal(result)
|
||||
if !bytes.Equal(got, exp) {
|
||||
t.Errorf("TransformToTableForClickHouseQueries() = %v, want %v", string(got), string(exp))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTransformToTableForClickHouseQueriesSorting(t *testing.T) {
|
||||
input := []*v3.Result{
|
||||
{
|
||||
QueryName: "B",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "frontend"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 10.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
LabelsArray: []map[string]string{
|
||||
{"service": "backend"},
|
||||
},
|
||||
Points: []v3.Point{
|
||||
{Value: 20.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected := []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Columns: []*v3.TableColumn{
|
||||
{Name: "service"},
|
||||
{Name: "A", QueryName: "A", IsValueColumn: true},
|
||||
{Name: "B", QueryName: "B", IsValueColumn: true},
|
||||
},
|
||||
Rows: []*v3.TableRow{
|
||||
{Data: map[string]interface{}{"service": "backend", "A": 20.0, "B": "n/a"}},
|
||||
{Data: map[string]interface{}{"service": "frontend", "A": "n/a", "B": 10.0}},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := TransformToTableForClickHouseQueries(input)
|
||||
exp, _ := json.Marshal(expected)
|
||||
got, _ := json.Marshal(result)
|
||||
if !bytes.Equal(got, exp) {
|
||||
t.Errorf("TransformToTableForClickHouseQueries() sorting test failed. Got %v, want %v", string(got), string(exp))
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,7 @@ func EnabledPaths() map[string]struct{} {
|
||||
|
||||
func ignoreEvents(event string, attributes map[string]interface{}) bool {
|
||||
|
||||
if event == TELEMETRY_EVENT_ACTIVE_USER || event == TELEMETRY_EVENT_ACTIVE_USER_PH {
|
||||
if event == TELEMETRY_EVENT_ACTIVE_USER {
|
||||
for attr_key, attr_val := range attributes {
|
||||
|
||||
if attr_key == "any" && attr_val.(int8) == 0 {
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
ph "github.com/posthog/posthog-go"
|
||||
"gopkg.in/segmentio/analytics-go.v3"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
@@ -26,7 +25,6 @@ const (
|
||||
TELEMETRY_EVENT_USER = "User"
|
||||
TELEMETRY_EVENT_INPRODUCT_FEEDBACK = "InProduct Feedback Submitted"
|
||||
TELEMETRY_EVENT_NUMBER_OF_SERVICES = "Number of Services"
|
||||
TELEMETRY_EVENT_NUMBER_OF_SERVICES_PH = "Number of Services V2"
|
||||
TELEMETRY_EVENT_HEART_BEAT = "Heart Beat"
|
||||
TELEMETRY_EVENT_ORG_SETTINGS = "Org Settings"
|
||||
DEFAULT_SAMPLING = 0.1
|
||||
@@ -44,7 +42,6 @@ const (
|
||||
TELEMETRY_EVENT_QUERY_RANGE_API = "Query Range API"
|
||||
TELEMETRY_EVENT_DASHBOARDS_ALERTS = "Dashboards/Alerts Info"
|
||||
TELEMETRY_EVENT_ACTIVE_USER = "Active User"
|
||||
TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2"
|
||||
TELEMETRY_EVENT_USER_INVITATION_SENT = "User Invitation Sent"
|
||||
TELEMETRY_EVENT_USER_INVITATION_ACCEPTED = "User Invitation Accepted"
|
||||
TELEMETRY_EVENT_SUCCESSFUL_DASHBOARD_PANEL_QUERY = "Successful Dashboard Panel Query"
|
||||
@@ -69,8 +66,21 @@ var SAAS_EVENTS_LIST = map[string]struct{}{
|
||||
TELEMETRY_EVENT_TRACE_DETAIL_API: {},
|
||||
}
|
||||
|
||||
const api_key = "4Gmoa4ixJAUHx2BpJxsjwA1bEfnwEeRz"
|
||||
const ph_api_key = "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w"
|
||||
var OSS_EVENTS_LIST = map[string]struct{}{
|
||||
TELEMETRY_EVENT_NUMBER_OF_SERVICES: {},
|
||||
TELEMETRY_EVENT_HEART_BEAT: {},
|
||||
TELEMETRY_EVENT_LANGUAGE: {},
|
||||
TELEMETRY_EVENT_ENVIRONMENT: {},
|
||||
TELEMETRY_EVENT_DASHBOARDS_ALERTS: {},
|
||||
TELEMETRY_EVENT_ACTIVE_USER: {},
|
||||
TELEMETRY_EVENT_PATH: {},
|
||||
TELEMETRY_EVENT_ORG_SETTINGS: {},
|
||||
TELEMETRY_LICENSE_CHECK_FAILED: {},
|
||||
TELEMETRY_LICENSE_UPDATED: {},
|
||||
TELEMETRY_LICENSE_ACT_FAILED: {},
|
||||
}
|
||||
|
||||
const api_key = "9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr"
|
||||
|
||||
const IP_NOT_FOUND_PLACEHOLDER = "NA"
|
||||
const DEFAULT_NUMBER_OF_SERVICES = 6
|
||||
@@ -110,13 +120,13 @@ func (telemetry *Telemetry) CheckSigNozSignals(postData *v3.QueryRangeParamsV3)
|
||||
|
||||
if postData.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
for _, query := range postData.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource == v3.DataSourceLogs && len(query.Filters.Items) > 0 {
|
||||
if query.DataSource == v3.DataSourceLogs && query.Filters != nil && len(query.Filters.Items) > 0 {
|
||||
signozLogsUsed = true
|
||||
} else if query.DataSource == v3.DataSourceMetrics &&
|
||||
!strings.Contains(query.AggregateAttribute.Key, "signoz_") &&
|
||||
len(query.AggregateAttribute.Key) > 0 {
|
||||
signozMetricsUsed = true
|
||||
} else if query.DataSource == v3.DataSourceTraces && len(query.Filters.Items) > 0 {
|
||||
} else if query.DataSource == v3.DataSourceTraces && query.Filters != nil && len(query.Filters.Items) > 0 {
|
||||
signozTracesUsed = true
|
||||
}
|
||||
}
|
||||
@@ -159,9 +169,8 @@ func (telemetry *Telemetry) AddActiveLogsUser() {
|
||||
}
|
||||
|
||||
type Telemetry struct {
|
||||
operator analytics.Client
|
||||
ossOperator analytics.Client
|
||||
saasOperator analytics.Client
|
||||
phOperator ph.Client
|
||||
ipAddress string
|
||||
userEmail string
|
||||
isEnabled bool
|
||||
@@ -188,11 +197,10 @@ func createTelemetry() {
|
||||
}
|
||||
|
||||
telemetry = &Telemetry{
|
||||
operator: analytics.New(api_key),
|
||||
phOperator: ph.New(ph_api_key),
|
||||
ipAddress: getOutboundIP(),
|
||||
rateLimits: make(map[string]int8),
|
||||
activeUser: make(map[string]int8),
|
||||
ossOperator: analytics.New(api_key),
|
||||
ipAddress: getOutboundIP(),
|
||||
rateLimits: make(map[string]int8),
|
||||
activeUser: make(map[string]int8),
|
||||
}
|
||||
telemetry.minRandInt = 0
|
||||
telemetry.maxRandInt = int(1 / DEFAULT_SAMPLING)
|
||||
@@ -392,18 +400,16 @@ func (a *Telemetry) IdentifyUser(user *model.User) {
|
||||
})
|
||||
}
|
||||
|
||||
a.operator.Enqueue(analytics.Identify{
|
||||
a.ossOperator.Enqueue(analytics.Identify{
|
||||
UserId: a.ipAddress,
|
||||
Traits: analytics.NewTraits().SetName(user.Name).SetEmail(user.Email).Set("ip", a.ipAddress),
|
||||
})
|
||||
// Updating a groups properties
|
||||
a.phOperator.Enqueue(ph.GroupIdentify{
|
||||
Type: "companyDomain",
|
||||
Key: a.getCompanyDomain(),
|
||||
Properties: ph.NewProperties().
|
||||
Set("companyDomain", a.getCompanyDomain()),
|
||||
a.ossOperator.Enqueue(analytics.Group{
|
||||
UserId: a.ipAddress,
|
||||
GroupId: a.getCompanyDomain(),
|
||||
Traits: analytics.NewTraits().Set("company_domain", a.getCompanyDomain()),
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (a *Telemetry) SetCountUsers(countUsers int8) {
|
||||
@@ -520,33 +526,19 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEma
|
||||
})
|
||||
}
|
||||
|
||||
a.operator.Enqueue(analytics.Track{
|
||||
Event: event,
|
||||
UserId: userId,
|
||||
Properties: properties,
|
||||
})
|
||||
_, isOSSEvent := OSS_EVENTS_LIST[event]
|
||||
|
||||
if event == TELEMETRY_EVENT_NUMBER_OF_SERVICES {
|
||||
|
||||
a.phOperator.Enqueue(ph.Capture{
|
||||
DistinctId: userId,
|
||||
Event: TELEMETRY_EVENT_NUMBER_OF_SERVICES_PH,
|
||||
Properties: ph.Properties(properties),
|
||||
Groups: ph.NewGroups().
|
||||
Set("companyDomain", a.getCompanyDomain()),
|
||||
if a.ossOperator != nil && isOSSEvent {
|
||||
a.ossOperator.Enqueue(analytics.Track{
|
||||
Event: event,
|
||||
UserId: userId,
|
||||
Properties: properties,
|
||||
Context: &analytics.Context{
|
||||
Extra: map[string]interface{}{
|
||||
"groupId": a.getCompanyDomain(),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
}
|
||||
if event == TELEMETRY_EVENT_ACTIVE_USER {
|
||||
|
||||
a.phOperator.Enqueue(ph.Capture{
|
||||
DistinctId: userId,
|
||||
Event: TELEMETRY_EVENT_ACTIVE_USER_PH,
|
||||
Properties: ph.Properties(properties),
|
||||
Groups: ph.NewGroups().
|
||||
Set("companyDomain", a.getCompanyDomain()),
|
||||
})
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user