Compare commits
2 Commits
v0.49.1
...
new-table-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3476f3032d | ||
|
|
4b757b382f |
1
.github/workflows/push.yaml
vendored
1
.github/workflows/push.yaml
vendored
@@ -158,7 +158,6 @@ jobs:
|
||||
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
|
||||
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
- name: Install dependencies
|
||||
working-directory: frontend
|
||||
run: yarn install
|
||||
|
||||
@@ -347,7 +347,7 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
|
||||
```bash
|
||||
kubectl -n sample-application run strzal --image=djbingham/curl \
|
||||
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
|
||||
'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm
|
||||
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
|
||||
```
|
||||
|
||||
**5.1.3 To stop the load generation:**
|
||||
|
||||
1
Makefile
1
Makefile
@@ -188,4 +188,3 @@ test:
|
||||
go test ./pkg/query-service/tests/integration/...
|
||||
go test ./pkg/query-service/rules/...
|
||||
go test ./pkg/query-service/collectorsimulator/...
|
||||
go test ./pkg/query-service/postprocess/...
|
||||
|
||||
@@ -146,7 +146,7 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.49.1
|
||||
image: signoz/query-service:0.46.0
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
@@ -186,7 +186,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:0.48.0
|
||||
image: signoz/frontend:0.46.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -199,7 +199,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.88.24
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
@@ -237,7 +237,7 @@ services:
|
||||
- query-service
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:0.102.2
|
||||
image: signoz/signoz-schema-migrator:0.88.24
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -66,7 +66,7 @@ services:
|
||||
- --storage.path=/data
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -81,7 +81,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.88.24
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
@@ -204,7 +204,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -216,7 +216,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -230,7 +230,7 @@ services:
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
@@ -203,7 +203,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -215,7 +215,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -229,7 +229,7 @@ services:
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
|
||||
@@ -389,7 +389,7 @@ trap bye EXIT
|
||||
|
||||
URL="https://api.segment.io/v1/track"
|
||||
HEADER_1="Content-Type: application/json"
|
||||
HEADER_2="Authorization: Basic OWtScko3b1BDR1BFSkxGNlFqTVBMdDVibGpGaFJRQnI="
|
||||
HEADER_2="Authorization: Basic NEdtb2E0aXhKQVVIeDJCcEp4c2p3QTFiRWZud0VlUno6"
|
||||
|
||||
send_event() {
|
||||
error=""
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
type APIHandlerOptions struct {
|
||||
DataConnector interfaces.DataConnector
|
||||
SkipConfig *basemodel.SkipConfig
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
@@ -52,6 +53,7 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
SkipConfig: opts.SkipConfig,
|
||||
PerferDelta: opts.PreferDelta,
|
||||
PreferSpanMetrics: opts.PreferSpanMetrics,
|
||||
MaxIdleConns: opts.MaxIdleConns,
|
||||
MaxOpenConns: opts.MaxOpenConns,
|
||||
|
||||
@@ -64,6 +64,7 @@ type ServerOptions struct {
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
@@ -255,6 +256,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
apiOpts := api.APIHandlerOptions{
|
||||
DataConnector: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PreferDelta: serverOptions.PreferDelta,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
MaxIdleConns: serverOptions.MaxIdleConns,
|
||||
MaxOpenConns: serverOptions.MaxOpenConns,
|
||||
|
||||
@@ -89,6 +89,7 @@ func main() {
|
||||
|
||||
var cacheConfigPath, fluxInterval string
|
||||
var enableQueryServiceLogOTLPExport bool
|
||||
var preferDelta bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
@@ -99,13 +100,14 @@ func main() {
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)")
|
||||
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
|
||||
@@ -123,6 +125,7 @@ func main() {
|
||||
HTTPHostPort: baseconst.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferDelta: preferDelta,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: baseconst.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
|
||||
@@ -34,6 +34,8 @@
|
||||
"@dnd-kit/core": "6.1.0",
|
||||
"@dnd-kit/modifiers": "7.0.0",
|
||||
"@dnd-kit/sortable": "8.0.0",
|
||||
"@dnd-kit/utilities": "3.2.2",
|
||||
"@faker-js/faker": "8.4.1",
|
||||
"@grafana/data": "^9.5.2",
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
@@ -43,6 +45,7 @@
|
||||
"@sentry/react": "7.102.1",
|
||||
"@sentry/webpack-plugin": "2.16.0",
|
||||
"@signozhq/design-tokens": "0.0.8",
|
||||
"@tanstack/react-table": "8.17.3",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/shape": "3.5.0",
|
||||
@@ -88,7 +91,6 @@
|
||||
"lucide-react": "0.379.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.142.1",
|
||||
"rc-tween-one": "3.0.6",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
|
||||
@@ -17,7 +17,6 @@ import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
import history from 'lib/history';
|
||||
import { identity, pick, pickBy } from 'lodash-es';
|
||||
import posthog from 'posthog-js';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useEffect, useState } from 'react';
|
||||
@@ -39,7 +38,7 @@ import defaultRoutes, {
|
||||
|
||||
function App(): JSX.Element {
|
||||
const themeConfig = useThemeConfig();
|
||||
const { data: licenseData } = useLicense();
|
||||
const { data } = useLicense();
|
||||
const [routes, setRoutes] = useState<AppRoutes[]>(defaultRoutes);
|
||||
const { role, isLoggedIn: isLoggedInState, user, org } = useSelector<
|
||||
AppState,
|
||||
@@ -93,10 +92,10 @@ function App(): JSX.Element {
|
||||
});
|
||||
|
||||
const isOnBasicPlan =
|
||||
licenseData?.payload?.licenses?.some(
|
||||
data?.payload?.licenses?.some(
|
||||
(license) =>
|
||||
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
|
||||
) || licenseData?.payload?.licenses === null;
|
||||
) || data?.payload?.licenses === null;
|
||||
|
||||
const enableAnalytics = (user: User): void => {
|
||||
const orgName =
|
||||
@@ -113,7 +112,9 @@ function App(): JSX.Element {
|
||||
};
|
||||
|
||||
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
|
||||
|
||||
const domain = extractDomain(email);
|
||||
|
||||
const hostNameParts = hostname.split('.');
|
||||
|
||||
const groupTraits = {
|
||||
@@ -126,30 +127,10 @@ function App(): JSX.Element {
|
||||
};
|
||||
|
||||
window.analytics.identify(email, sanitizedIdentifyPayload);
|
||||
|
||||
window.analytics.group(domain, groupTraits);
|
||||
|
||||
window.clarity('identify', email, name);
|
||||
|
||||
posthog?.identify(email, {
|
||||
email,
|
||||
name,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
posthog?.group('company', domain, {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
@@ -163,6 +144,10 @@ function App(): JSX.Element {
|
||||
!isIdentifiedUser
|
||||
) {
|
||||
setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true');
|
||||
|
||||
if (isCloudUserVal) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
@@ -210,11 +195,6 @@ function App(): JSX.Element {
|
||||
console.error('Failed to parse local storage theme analytics event');
|
||||
}
|
||||
}
|
||||
|
||||
if (isCloudUserVal && user && user.email) {
|
||||
enableAnalytics(user);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [user]);
|
||||
|
||||
|
||||
@@ -5,13 +5,7 @@ import { Button, Dropdown, MenuProps } from 'antd';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useState } from 'react';
|
||||
|
||||
function DropDown({
|
||||
element,
|
||||
onDropDownItemClick,
|
||||
}: {
|
||||
element: JSX.Element[];
|
||||
onDropDownItemClick?: MenuProps['onClick'];
|
||||
}): JSX.Element {
|
||||
function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const items: MenuProps['items'] = element.map(
|
||||
@@ -29,7 +23,6 @@ function DropDown({
|
||||
items,
|
||||
onMouseEnter: (): void => setDdOpen(true),
|
||||
onMouseLeave: (): void => setDdOpen(false),
|
||||
onClick: (item): void => onDropDownItemClick?.(item),
|
||||
}}
|
||||
open={isDdOpen}
|
||||
>
|
||||
@@ -47,8 +40,4 @@ function DropDown({
|
||||
);
|
||||
}
|
||||
|
||||
DropDown.defaultProps = {
|
||||
onDropDownItemClick: (): void => {},
|
||||
};
|
||||
|
||||
export default DropDown;
|
||||
|
||||
@@ -62,6 +62,8 @@ function RawLogView({
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
|
||||
|
||||
const severityText = data.severity_text ? `${data.severity_text} |` : '';
|
||||
|
||||
const logType = getLogIndicatorType(data);
|
||||
|
||||
const updatedSelecedFields = useMemo(
|
||||
@@ -86,16 +88,17 @@ function RawLogView({
|
||||
attributesText += ' | ';
|
||||
}
|
||||
|
||||
const text = useMemo(() => {
|
||||
const date =
|
||||
const text = useMemo(
|
||||
() =>
|
||||
typeof data.timestamp === 'string'
|
||||
? dayjs(data.timestamp)
|
||||
: dayjs(data.timestamp / 1e6);
|
||||
|
||||
return `${date.format('YYYY-MM-DD HH:mm:ss.SSS')} | ${attributesText} ${
|
||||
data.body
|
||||
}`;
|
||||
}, [data.timestamp, data.body, attributesText]);
|
||||
? `${dayjs(data.timestamp).format(
|
||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
||||
)} | ${attributesText} ${severityText} ${data.body}`
|
||||
: `${dayjs(data.timestamp / 1e6).format(
|
||||
'YYYY-MM-DD HH:mm:ss.SSS',
|
||||
)} | ${attributesText} ${severityText} ${data.body}`,
|
||||
[data.timestamp, data.body, severityText, attributesText],
|
||||
);
|
||||
|
||||
const handleClickExpand = useCallback(() => {
|
||||
if (activeContextLog || isReadOnly) return;
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
import './DynamicColumnTable.syles.scss';
|
||||
|
||||
import { Button, Dropdown, Flex, MenuProps, Switch } from 'antd';
|
||||
import { ColumnGroupType, ColumnType } from 'antd/es/table';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||
import { SlidersHorizontal } from 'lucide-react';
|
||||
import { memo, useEffect, useState } from 'react';
|
||||
@@ -24,7 +22,6 @@ function DynamicColumnTable({
|
||||
dynamicColumns,
|
||||
onDragColumn,
|
||||
facingIssueBtn,
|
||||
shouldSendAlertsLogEvent,
|
||||
...restProps
|
||||
}: DynamicColumnTableProps): JSX.Element {
|
||||
const [columnsData, setColumnsData] = useState<ColumnsType | undefined>(
|
||||
@@ -50,18 +47,11 @@ function DynamicColumnTable({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [columns, dynamicColumns]);
|
||||
|
||||
const onToggleHandler = (
|
||||
index: number,
|
||||
column: ColumnGroupType<any> | ColumnType<any>,
|
||||
) => (checked: boolean, event: React.MouseEvent<HTMLButtonElement>): void => {
|
||||
const onToggleHandler = (index: number) => (
|
||||
checked: boolean,
|
||||
event: React.MouseEvent<HTMLButtonElement>,
|
||||
): void => {
|
||||
event.stopPropagation();
|
||||
|
||||
if (shouldSendAlertsLogEvent) {
|
||||
logEvent('Alert: Column toggled', {
|
||||
column: column?.title,
|
||||
action: checked ? 'Enable' : 'Disable',
|
||||
});
|
||||
}
|
||||
setVisibleColumns({
|
||||
tablesource,
|
||||
dynamicColumns,
|
||||
@@ -85,7 +75,7 @@ function DynamicColumnTable({
|
||||
<div>{column.title?.toString()}</div>
|
||||
<Switch
|
||||
checked={columnsData?.findIndex((c) => c.key === column.key) !== -1}
|
||||
onChange={onToggleHandler(index, column)}
|
||||
onChange={onToggleHandler(index)}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import { Table } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import { dragColumnParams } from 'hooks/useDragColumns/configs';
|
||||
import { set } from 'lodash-es';
|
||||
import {
|
||||
SyntheticEvent,
|
||||
useCallback,
|
||||
@@ -21,7 +20,6 @@ import { ResizeTableProps } from './types';
|
||||
function ResizeTable({
|
||||
columns,
|
||||
onDragColumn,
|
||||
pagination,
|
||||
...restProps
|
||||
}: ResizeTableProps): JSX.Element {
|
||||
const [columnsData, setColumns] = useState<ColumnsType>([]);
|
||||
@@ -60,21 +58,14 @@ function ResizeTable({
|
||||
[columnsData, onDragColumn, handleResize],
|
||||
);
|
||||
|
||||
const tableParams = useMemo(() => {
|
||||
const props = {
|
||||
const tableParams = useMemo(
|
||||
() => ({
|
||||
...restProps,
|
||||
components: { header: { cell: ResizableHeader } },
|
||||
columns: mergedColumns,
|
||||
};
|
||||
|
||||
set(
|
||||
props,
|
||||
'pagination',
|
||||
pagination ? { ...pagination, hideOnSinglePage: true } : false,
|
||||
);
|
||||
|
||||
return props;
|
||||
}, [mergedColumns, pagination, restProps]);
|
||||
}),
|
||||
[mergedColumns, restProps],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (columns) {
|
||||
|
||||
@@ -14,7 +14,6 @@ export interface DynamicColumnTableProps extends TableProps<any> {
|
||||
dynamicColumns: TableProps<any>['columns'];
|
||||
onDragColumn?: (fromIndex: number, toIndex: number) => void;
|
||||
facingIssueBtn?: FacingIssueBtnProps;
|
||||
shouldSendAlertsLogEvent?: boolean;
|
||||
}
|
||||
|
||||
export type GetVisibleColumnsFunction = (
|
||||
|
||||
@@ -9,6 +9,7 @@ import { Tooltip } from 'antd';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useMemo } from 'react';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { style } from './constant';
|
||||
|
||||
@@ -63,7 +64,7 @@ function TextToolTip({
|
||||
);
|
||||
|
||||
return (
|
||||
<Tooltip overlay={overlay}>
|
||||
<Tooltip getTooltipContainer={popupContainer} overlay={overlay}>
|
||||
{useFilledIcon ? (
|
||||
<QuestionCircleFilled style={iconStyle} />
|
||||
) : (
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import getAll from 'api/channels/getAll';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import Spinner from 'components/Spinner';
|
||||
import TextToolTip from 'components/TextToolTip';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import useFetch from 'hooks/useFetch';
|
||||
import history from 'lib/history';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useCallback, useEffect } from 'react';
|
||||
import { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -33,14 +31,6 @@ function AlertChannels(): JSX.Element {
|
||||
|
||||
const { loading, payload, error, errorMessage } = useFetch(getAll);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isUndefined(payload)) {
|
||||
logEvent('Alert Channel: Channel list page visited', {
|
||||
number: payload?.length,
|
||||
});
|
||||
}
|
||||
}, [payload]);
|
||||
|
||||
if (error) {
|
||||
return <Typography>{errorMessage}</Typography>;
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import './AppLayout.styles.scss';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Flex } from 'antd';
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import getDynamicConfigs from 'api/dynamicConfigs/getDynamicConfigs';
|
||||
import getUserLatestVersion from 'api/user/getLatestVersion';
|
||||
import getUserVersion from 'api/user/getVersion';
|
||||
import cx from 'classnames';
|
||||
@@ -37,6 +38,7 @@ import { sideBarCollapse } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import {
|
||||
UPDATE_CONFIGS,
|
||||
UPDATE_CURRENT_ERROR,
|
||||
UPDATE_CURRENT_VERSION,
|
||||
UPDATE_LATEST_VERSION,
|
||||
@@ -64,7 +66,11 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
const { t } = useTranslation(['titles']);
|
||||
|
||||
const [getUserVersionResponse, getUserLatestVersionResponse] = useQueries([
|
||||
const [
|
||||
getUserVersionResponse,
|
||||
getUserLatestVersionResponse,
|
||||
getDynamicConfigsResponse,
|
||||
] = useQueries([
|
||||
{
|
||||
queryFn: getUserVersion,
|
||||
queryKey: ['getUserVersion', user?.accessJwt],
|
||||
@@ -75,6 +81,10 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
queryKey: ['getUserLatestVersion', user?.accessJwt],
|
||||
enabled: isLoggedIn,
|
||||
},
|
||||
{
|
||||
queryFn: getDynamicConfigs,
|
||||
queryKey: ['getDynamicConfigs', user?.accessJwt],
|
||||
},
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -85,7 +95,15 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
if (getUserVersionResponse.status === 'idle' && isLoggedIn) {
|
||||
getUserVersionResponse.refetch();
|
||||
}
|
||||
}, [getUserLatestVersionResponse, getUserVersionResponse, isLoggedIn]);
|
||||
if (getDynamicConfigsResponse.status === 'idle') {
|
||||
getDynamicConfigsResponse.refetch();
|
||||
}
|
||||
}, [
|
||||
getUserLatestVersionResponse,
|
||||
getUserVersionResponse,
|
||||
isLoggedIn,
|
||||
getDynamicConfigsResponse,
|
||||
]);
|
||||
|
||||
const { children } = props;
|
||||
|
||||
@@ -93,6 +111,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
|
||||
const latestCurrentCounter = useRef(0);
|
||||
const latestVersionCounter = useRef(0);
|
||||
const latestConfigCounter = useRef(0);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
@@ -170,6 +189,23 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
getDynamicConfigsResponse.isFetched &&
|
||||
getDynamicConfigsResponse.isSuccess &&
|
||||
getDynamicConfigsResponse.data &&
|
||||
getDynamicConfigsResponse.data.payload &&
|
||||
latestConfigCounter.current === 0
|
||||
) {
|
||||
latestConfigCounter.current = 1;
|
||||
|
||||
dispatch({
|
||||
type: UPDATE_CONFIGS,
|
||||
payload: {
|
||||
configs: getDynamicConfigsResponse.data.payload,
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [
|
||||
dispatch,
|
||||
isLoggedIn,
|
||||
@@ -184,6 +220,9 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
getUserLatestVersionResponse.isFetched,
|
||||
getUserVersionResponse.isFetched,
|
||||
getUserLatestVersionResponse.isSuccess,
|
||||
getDynamicConfigsResponse.data,
|
||||
getDynamicConfigsResponse.isFetched,
|
||||
getDynamicConfigsResponse.isSuccess,
|
||||
notifications,
|
||||
]);
|
||||
|
||||
|
||||
@@ -11,12 +11,11 @@ import testOpsGenie from 'api/channels/testOpsgenie';
|
||||
import testPagerApi from 'api/channels/testPager';
|
||||
import testSlackApi from 'api/channels/testSlack';
|
||||
import testWebhookApi from 'api/channels/testWebhook';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ROUTES from 'constants/routes';
|
||||
import FormAlertChannels from 'container/FormAlertChannels';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import {
|
||||
@@ -44,10 +43,6 @@ function CreateAlertChannels({
|
||||
|
||||
const [formInstance] = Form.useForm();
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('Alert Channel: Create channel page visited', {});
|
||||
}, []);
|
||||
|
||||
const [selectedConfig, setSelectedConfig] = useState<
|
||||
Partial<
|
||||
SlackChannel &
|
||||
@@ -144,25 +139,19 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareSlackRequest, t, notifications]);
|
||||
|
||||
const prepareWebhookRequest = useCallback(() => {
|
||||
@@ -211,25 +200,19 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareWebhookRequest, t, notifications]);
|
||||
|
||||
const preparePagerRequest = useCallback(() => {
|
||||
@@ -262,8 +245,8 @@ function CreateAlertChannels({
|
||||
setSavingState(true);
|
||||
const request = preparePagerRequest();
|
||||
|
||||
try {
|
||||
if (request) {
|
||||
if (request) {
|
||||
try {
|
||||
const response = await createPagerApi(request);
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
@@ -272,31 +255,20 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [t, notifications, preparePagerRequest]);
|
||||
|
||||
const prepareOpsgenieRequest = useCallback(
|
||||
@@ -323,25 +295,19 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareOpsgenieRequest, t, notifications]);
|
||||
|
||||
const prepareEmailRequest = useCallback(
|
||||
@@ -366,25 +332,19 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareEmailRequest, t, notifications]);
|
||||
|
||||
const prepareMsTeamsRequest = useCallback(
|
||||
@@ -410,25 +370,19 @@ function CreateAlertChannels({
|
||||
description: t('channel_creation_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_creation_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_creation_failed'),
|
||||
});
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_creation_failed'),
|
||||
};
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_creation_failed'),
|
||||
});
|
||||
return { status: 'failed', statusMessage: t('channel_creation_failed') };
|
||||
} finally {
|
||||
setSavingState(false);
|
||||
}
|
||||
setSavingState(false);
|
||||
}, [prepareMsTeamsRequest, t, notifications]);
|
||||
|
||||
const onSaveHandler = useCallback(
|
||||
@@ -446,15 +400,7 @@ function CreateAlertChannels({
|
||||
const functionToCall = functionMapper[value as keyof typeof functionMapper];
|
||||
|
||||
if (functionToCall) {
|
||||
const result = await functionToCall();
|
||||
logEvent('Alert Channel: Save channel', {
|
||||
type: value,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'true',
|
||||
status: result?.status,
|
||||
statusMessage: result?.statusMessage,
|
||||
});
|
||||
functionToCall();
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
@@ -463,7 +409,6 @@ function CreateAlertChannels({
|
||||
}
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
onSlackHandler,
|
||||
onWebhookHandler,
|
||||
@@ -527,25 +472,14 @@ function CreateAlertChannels({
|
||||
description: t('channel_test_failed'),
|
||||
});
|
||||
}
|
||||
|
||||
logEvent('Alert Channel: Test notification', {
|
||||
type: channelType,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'true',
|
||||
status:
|
||||
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('channel_test_unexpected'),
|
||||
});
|
||||
}
|
||||
|
||||
setTestingState(false);
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
prepareWebhookRequest,
|
||||
t,
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import { Row, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
@@ -36,13 +34,6 @@ function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element {
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
logEvent('Alert: Sample alert link clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[option],
|
||||
link: url,
|
||||
page: 'New alert data source selection page',
|
||||
});
|
||||
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
const renderOptions = useMemo(
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Form, Row } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import FormAlertRules from 'container/FormAlertRules';
|
||||
@@ -69,8 +68,6 @@ function CreateRules(): JSX.Element {
|
||||
useEffect(() => {
|
||||
if (alertType) {
|
||||
onSelectType(alertType);
|
||||
} else {
|
||||
logEvent('Alert: New alert data source selection page visited', {});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [alertType]);
|
||||
|
||||
@@ -11,7 +11,6 @@ import testOpsgenie from 'api/channels/testOpsgenie';
|
||||
import testPagerApi from 'api/channels/testPager';
|
||||
import testSlackApi from 'api/channels/testSlack';
|
||||
import testWebhookApi from 'api/channels/testWebhook';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
ChannelType,
|
||||
@@ -90,7 +89,7 @@ function EditAlertChannels({
|
||||
description: t('webhook_url_required'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await editSlackApi(prepareSlackRequest());
|
||||
@@ -102,17 +101,13 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareSlackRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const prepareWebhookRequest = useCallback(() => {
|
||||
@@ -141,13 +136,13 @@ function EditAlertChannels({
|
||||
if (selectedConfig?.api_url === '') {
|
||||
showError(t('webhook_url_required'));
|
||||
setSavingState(false);
|
||||
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||
return;
|
||||
}
|
||||
|
||||
if (username && (!password || password === '')) {
|
||||
showError(t('username_no_password'));
|
||||
setSavingState(false);
|
||||
return { status: 'failed', statusMessage: t('username_no_password') };
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await editWebhookApi(prepareWebhookRequest());
|
||||
@@ -159,15 +154,10 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
} else {
|
||||
showError(response.error || t('channel_edit_failed'));
|
||||
}
|
||||
showError(response.error || t('channel_edit_failed'));
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareWebhookRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const prepareEmailRequest = useCallback(
|
||||
@@ -191,18 +181,13 @@ function EditAlertChannels({
|
||||
description: t('channel_edit_done'),
|
||||
});
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareEmailRequest, t, notifications]);
|
||||
|
||||
const preparePagerRequest = useCallback(
|
||||
@@ -233,7 +218,7 @@ function EditAlertChannels({
|
||||
description: validationError,
|
||||
});
|
||||
setSavingState(false);
|
||||
return { status: 'failed', statusMessage: validationError };
|
||||
return;
|
||||
}
|
||||
const response = await editPagerApi(preparePagerRequest());
|
||||
|
||||
@@ -244,18 +229,13 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [preparePagerRequest, notifications, selectedConfig, t]);
|
||||
|
||||
const prepareOpsgenieRequest = useCallback(
|
||||
@@ -279,7 +259,7 @@ function EditAlertChannels({
|
||||
description: t('api_key_required'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return { status: 'failed', statusMessage: t('api_key_required') };
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await editOpsgenie(prepareOpsgenieRequest());
|
||||
@@ -291,18 +271,13 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareOpsgenieRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const prepareMsTeamsRequest = useCallback(
|
||||
@@ -326,7 +301,7 @@ function EditAlertChannels({
|
||||
description: t('webhook_url_required'),
|
||||
});
|
||||
setSavingState(false);
|
||||
return { status: 'failed', statusMessage: t('webhook_url_required') };
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await editMsTeamsApi(prepareMsTeamsRequest());
|
||||
@@ -338,46 +313,31 @@ function EditAlertChannels({
|
||||
});
|
||||
|
||||
history.replace(ROUTES.ALL_CHANNELS);
|
||||
return { status: 'success', statusMessage: t('channel_edit_done') };
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
}
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('channel_edit_failed'),
|
||||
});
|
||||
|
||||
setSavingState(false);
|
||||
return {
|
||||
status: 'failed',
|
||||
statusMessage: response.error || t('channel_edit_failed'),
|
||||
};
|
||||
}, [prepareMsTeamsRequest, t, notifications, selectedConfig]);
|
||||
|
||||
const onSaveHandler = useCallback(
|
||||
async (value: ChannelType) => {
|
||||
let result;
|
||||
(value: ChannelType) => {
|
||||
if (value === ChannelType.Slack) {
|
||||
result = await onSlackEditHandler();
|
||||
onSlackEditHandler();
|
||||
} else if (value === ChannelType.Webhook) {
|
||||
result = await onWebhookEditHandler();
|
||||
onWebhookEditHandler();
|
||||
} else if (value === ChannelType.Pagerduty) {
|
||||
result = await onPagerEditHandler();
|
||||
onPagerEditHandler();
|
||||
} else if (value === ChannelType.MsTeams) {
|
||||
result = await onMsTeamsEditHandler();
|
||||
onMsTeamsEditHandler();
|
||||
} else if (value === ChannelType.Opsgenie) {
|
||||
result = await onOpsgenieEditHandler();
|
||||
onOpsgenieEditHandler();
|
||||
} else if (value === ChannelType.Email) {
|
||||
result = await onEmailEditHandler();
|
||||
onEmailEditHandler();
|
||||
}
|
||||
logEvent('Alert Channel: Save channel', {
|
||||
type: value,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'false',
|
||||
status: result?.status,
|
||||
statusMessage: result?.statusMessage,
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
onSlackEditHandler,
|
||||
onWebhookEditHandler,
|
||||
@@ -439,14 +399,6 @@ function EditAlertChannels({
|
||||
description: t('channel_test_failed'),
|
||||
});
|
||||
}
|
||||
logEvent('Alert Channel: Test notification', {
|
||||
type: channelType,
|
||||
sendResolvedAlert: selectedConfig.send_resolved,
|
||||
name: selectedConfig.name,
|
||||
new: 'false',
|
||||
status:
|
||||
response && response.statusCode === 200 ? 'Test success' : 'Test failed',
|
||||
});
|
||||
} catch (error) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
@@ -455,7 +407,6 @@ function EditAlertChannels({
|
||||
}
|
||||
setTestingState(false);
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
t,
|
||||
prepareWebhookRequest,
|
||||
|
||||
@@ -3,8 +3,6 @@ import './FormAlertRules.styles.scss';
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Button, Form, Select, Switch, Tooltip } from 'antd';
|
||||
import getChannels from 'api/channels/getAll';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import useFetch from 'hooks/useFetch';
|
||||
@@ -12,7 +10,6 @@ import { useCallback, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
import { AlertDef, Labels } from 'types/api/alerts/def';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
|
||||
@@ -76,24 +73,9 @@ function BasicInfo({
|
||||
|
||||
const noChannels = channels.payload?.length === 0;
|
||||
const handleCreateNewChannels = useCallback(() => {
|
||||
logEvent('Alert: Create notification channel button clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
ruleId: isNewRule ? 0 : alertDef?.id,
|
||||
});
|
||||
window.open(ROUTES.CHANNELS_NEW, '_blank');
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!channels.loading && isNewRule) {
|
||||
logEvent('Alert: New alert creation page visited', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
numberOfChannels: channels.payload?.length,
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [channels.payload, channels.loading]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading> {t('alert_form_step3')} </StepHeading>
|
||||
|
||||
@@ -2,7 +2,6 @@ import './QuerySection.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Tabs, Tooltip } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import PromQLIcon from 'assets/Dashboard/PromQl';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
@@ -32,7 +31,6 @@ function QuerySection({
|
||||
runQuery,
|
||||
alertDef,
|
||||
panelType,
|
||||
ruleId,
|
||||
}: QuerySectionProps): JSX.Element {
|
||||
// init namespace for translations
|
||||
const { t } = useTranslation('alerts');
|
||||
@@ -160,15 +158,7 @@ function QuerySection({
|
||||
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={(): void => {
|
||||
runQuery();
|
||||
logEvent('Alert: Stage and run query', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: queryCategory,
|
||||
});
|
||||
}}
|
||||
onClick={runQuery}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
@@ -238,7 +228,6 @@ interface QuerySectionProps {
|
||||
runQuery: VoidFunction;
|
||||
alertDef: AlertDef;
|
||||
panelType: PANEL_TYPES;
|
||||
ruleId: number;
|
||||
}
|
||||
|
||||
export default QuerySection;
|
||||
|
||||
@@ -12,10 +12,8 @@ import {
|
||||
} from 'antd';
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import testAlertApi from 'api/alerts/testAlert';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||
import { alertHelpMessage } from 'components/facingIssueBtn/util';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
@@ -340,13 +338,8 @@ function FormAlertRules({
|
||||
return;
|
||||
}
|
||||
const postableAlert = memoizedPreparePostData();
|
||||
|
||||
setLoading(true);
|
||||
|
||||
let logData = {
|
||||
status: 'error',
|
||||
statusMessage: t('unexpected_error'),
|
||||
};
|
||||
|
||||
try {
|
||||
const apiReq =
|
||||
ruleId && ruleId > 0
|
||||
@@ -356,15 +349,10 @@ function FormAlertRules({
|
||||
const response = await saveAlertApi(apiReq);
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
logData = {
|
||||
status: 'success',
|
||||
statusMessage:
|
||||
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
|
||||
};
|
||||
|
||||
notifications.success({
|
||||
message: 'Success',
|
||||
description: logData.statusMessage,
|
||||
description:
|
||||
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
|
||||
});
|
||||
|
||||
// invalidate rule in cache
|
||||
@@ -379,42 +367,18 @@ function FormAlertRules({
|
||||
history.replace(`${ROUTES.LIST_ALL_ALERT}?${urlQuery.toString()}`);
|
||||
}, 2000);
|
||||
} else {
|
||||
logData = {
|
||||
status: 'error',
|
||||
statusMessage: response.error || t('unexpected_error'),
|
||||
};
|
||||
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: logData.statusMessage,
|
||||
description: response.error || t('unexpected_error'),
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
logData = {
|
||||
status: 'error',
|
||||
statusMessage: t('unexpected_error'),
|
||||
};
|
||||
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: logData.statusMessage,
|
||||
description: t('unexpected_error'),
|
||||
});
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
|
||||
logEvent('Alert: Save alert', {
|
||||
...logData,
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[postableAlert?.alertType as AlertTypes],
|
||||
channelNames: postableAlert?.preferredChannels,
|
||||
broadcastToAll: postableAlert?.broadcastToAll,
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
alertId: postableAlert?.id,
|
||||
alertName: postableAlert?.alert,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
isFormValid,
|
||||
memoizedPreparePostData,
|
||||
@@ -450,7 +414,6 @@ function FormAlertRules({
|
||||
}
|
||||
const postableAlert = memoizedPreparePostData();
|
||||
|
||||
let statusResponse = { status: 'failed', message: '' };
|
||||
setLoading(true);
|
||||
try {
|
||||
const response = await testAlertApi({ data: postableAlert });
|
||||
@@ -462,43 +425,25 @@ function FormAlertRules({
|
||||
message: 'Error',
|
||||
description: t('no_alerts_found'),
|
||||
});
|
||||
statusResponse = { status: 'failed', message: t('no_alerts_found') };
|
||||
} else {
|
||||
notifications.success({
|
||||
message: 'Success',
|
||||
description: t('rule_test_fired'),
|
||||
});
|
||||
statusResponse = { status: 'success', message: t('rule_test_fired') };
|
||||
}
|
||||
} else {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: response.error || t('unexpected_error'),
|
||||
});
|
||||
statusResponse = {
|
||||
status: 'failed',
|
||||
message: response.error || t('unexpected_error'),
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
notifications.error({
|
||||
message: 'Error',
|
||||
description: t('unexpected_error'),
|
||||
});
|
||||
statusResponse = { status: 'failed', message: t('unexpected_error') };
|
||||
}
|
||||
setLoading(false);
|
||||
logEvent('Alert: Test notification', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
channelNames: postableAlert?.preferredChannels,
|
||||
broadcastToAll: postableAlert?.broadcastToAll,
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
status: statusResponse.status,
|
||||
statusMessage: statusResponse.message,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [t, isFormValid, memoizedPreparePostData, notifications]);
|
||||
|
||||
const renderBasicInfo = (): JSX.Element => (
|
||||
@@ -568,16 +513,6 @@ function FormAlertRules({
|
||||
|
||||
const isRuleCreated = !ruleId || ruleId === 0;
|
||||
|
||||
useEffect(() => {
|
||||
if (!isRuleCreated) {
|
||||
logEvent('Alert: Edit page visited', {
|
||||
ruleId,
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertType as AlertTypes],
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
function handleRedirection(option: AlertTypes): void {
|
||||
let url = '';
|
||||
switch (option) {
|
||||
@@ -600,13 +535,6 @@ function FormAlertRules({
|
||||
default:
|
||||
break;
|
||||
}
|
||||
logEvent('Alert: Check example alert clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
link: url,
|
||||
});
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
|
||||
@@ -644,7 +572,6 @@ function FormAlertRules({
|
||||
alertDef={alertDef}
|
||||
panelType={panelType || PANEL_TYPES.TIME_SERIES}
|
||||
key={currentQuery.queryType}
|
||||
ruleId={ruleId}
|
||||
/>
|
||||
|
||||
<RuleOptions
|
||||
|
||||
@@ -80,8 +80,6 @@ function FullView({
|
||||
query: updatedQuery,
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
fillGaps: widget.fillSpans,
|
||||
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
|
||||
@@ -109,7 +109,6 @@ function GridCardGraph({
|
||||
globalSelectedInterval,
|
||||
variables: getDashboardVariables(variables),
|
||||
fillGaps: widget.fillSpans,
|
||||
formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE,
|
||||
};
|
||||
}
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
|
||||
@@ -1,215 +0,0 @@
|
||||
export const tableDataMultipleQueriesSuccessResponse = {
|
||||
columns: [
|
||||
{
|
||||
name: 'service_name',
|
||||
queryName: '',
|
||||
isValueColumn: false,
|
||||
},
|
||||
{
|
||||
name: 'A',
|
||||
queryName: 'A',
|
||||
isValueColumn: true,
|
||||
},
|
||||
{
|
||||
name: 'B',
|
||||
queryName: 'B',
|
||||
isValueColumn: true,
|
||||
},
|
||||
],
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: 4196.71,
|
||||
B: 'n/a',
|
||||
service_name: 'demo-app',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 500.83,
|
||||
B: 'n/a',
|
||||
service_name: 'customer',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 499.5,
|
||||
B: 'n/a',
|
||||
service_name: 'mysql',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 293.22,
|
||||
B: 'n/a',
|
||||
service_name: 'frontend',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 230.03,
|
||||
B: 'n/a',
|
||||
service_name: 'driver',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 67.09,
|
||||
B: 'n/a',
|
||||
service_name: 'route',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 30.96,
|
||||
B: 'n/a',
|
||||
service_name: 'redis',
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
A: 'n/a',
|
||||
B: 112.27,
|
||||
service_name: 'n/a',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const widgetQueryWithLegend = {
|
||||
clickhouse_sql: [
|
||||
{
|
||||
name: 'A',
|
||||
legend: '',
|
||||
disabled: false,
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
promql: [
|
||||
{
|
||||
name: 'A',
|
||||
query: '',
|
||||
legend: '',
|
||||
disabled: false,
|
||||
},
|
||||
],
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: 'metrics',
|
||||
queryName: 'A',
|
||||
aggregateOperator: 'count',
|
||||
aggregateAttribute: {
|
||||
dataType: 'float64',
|
||||
id: 'signoz_latency--float64--ExponentialHistogram--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'signoz_latency',
|
||||
type: 'ExponentialHistogram',
|
||||
},
|
||||
timeAggregation: '',
|
||||
spaceAggregation: 'p90',
|
||||
functions: [],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
stepInterval: 60,
|
||||
having: [],
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: 'string',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'service_name',
|
||||
type: 'tag',
|
||||
id: 'service_name--string--tag--false',
|
||||
},
|
||||
],
|
||||
legend: 'p99',
|
||||
reduceTo: 'avg',
|
||||
},
|
||||
{
|
||||
dataSource: 'metrics',
|
||||
queryName: 'B',
|
||||
aggregateOperator: 'rate',
|
||||
aggregateAttribute: {
|
||||
dataType: 'float64',
|
||||
id: 'system_disk_operations--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'system_disk_operations',
|
||||
type: 'Sum',
|
||||
},
|
||||
timeAggregation: 'rate',
|
||||
spaceAggregation: 'sum',
|
||||
functions: [],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
expression: 'B',
|
||||
disabled: false,
|
||||
stepInterval: 60,
|
||||
having: [],
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
groupBy: [],
|
||||
legend: '',
|
||||
reduceTo: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
id: '48ad5a67-9a3c-49d4-a886-d7a34f8b875d',
|
||||
queryType: 'builder',
|
||||
};
|
||||
|
||||
export const expectedOutputWithLegends = {
|
||||
dataSource: [
|
||||
{
|
||||
A: 4196.71,
|
||||
B: 'n/a',
|
||||
service_name: 'demo-app',
|
||||
},
|
||||
{
|
||||
A: 500.83,
|
||||
B: 'n/a',
|
||||
service_name: 'customer',
|
||||
},
|
||||
{
|
||||
A: 499.5,
|
||||
B: 'n/a',
|
||||
service_name: 'mysql',
|
||||
},
|
||||
{
|
||||
A: 293.22,
|
||||
B: 'n/a',
|
||||
service_name: 'frontend',
|
||||
},
|
||||
{
|
||||
A: 230.03,
|
||||
B: 'n/a',
|
||||
service_name: 'driver',
|
||||
},
|
||||
{
|
||||
A: 67.09,
|
||||
B: 'n/a',
|
||||
service_name: 'route',
|
||||
},
|
||||
{
|
||||
A: 30.96,
|
||||
B: 'n/a',
|
||||
service_name: 'redis',
|
||||
},
|
||||
{
|
||||
A: 'n/a',
|
||||
B: 112.27,
|
||||
service_name: 'n/a',
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -1,42 +0,0 @@
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { createColumnsAndDataSource, getQueryLegend } from '../utils';
|
||||
import {
|
||||
expectedOutputWithLegends,
|
||||
tableDataMultipleQueriesSuccessResponse,
|
||||
widgetQueryWithLegend,
|
||||
} from './response';
|
||||
|
||||
describe('Table Panel utils', () => {
|
||||
it('createColumnsAndDataSource function', () => {
|
||||
const data = tableDataMultipleQueriesSuccessResponse;
|
||||
const query = widgetQueryWithLegend as Query;
|
||||
|
||||
const { columns, dataSource } = createColumnsAndDataSource(data, query);
|
||||
|
||||
expect(dataSource).toStrictEqual(expectedOutputWithLegends.dataSource);
|
||||
|
||||
// this makes sure that the columns are rendered in the same order as response
|
||||
expect(columns[0].title).toBe('service_name');
|
||||
// the next specifically makes sure that the legends are properly applied in multiple queries
|
||||
expect(columns[1].title).toBe('p99');
|
||||
// this makes sure that the query without a legend takes the title from the query response
|
||||
expect(columns[2].title).toBe('B');
|
||||
|
||||
// this is to ensure that the rows properly map to the column data indexes as the dataIndex should be equal to name of the columns
|
||||
// returned in the response as the rows will be mapped with them
|
||||
expect((columns[0] as any).dataIndex).toBe('service_name');
|
||||
expect((columns[1] as any).dataIndex).toBe('A');
|
||||
expect((columns[2] as any).dataIndex).toBe('B');
|
||||
});
|
||||
|
||||
it('getQueryLegend function', () => {
|
||||
const query = widgetQueryWithLegend as Query;
|
||||
|
||||
// query A has a legend of p99
|
||||
expect(getQueryLegend(query, 'A')).toBe('p99');
|
||||
|
||||
// should return undefined when legend not present
|
||||
expect(getQueryLegend(query, 'B')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
@@ -3,7 +3,10 @@ import { Space, Tooltip } from 'antd';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import { Events } from 'constants/events';
|
||||
import { QueryTable } from 'container/QueryTable';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import {
|
||||
createTableColumnsFromQuery,
|
||||
RowData,
|
||||
} from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep, get, isEmpty, set } from 'lodash-es';
|
||||
import { memo, ReactNode, useCallback, useEffect, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -11,11 +14,7 @@ import { eventEmitter } from 'utils/getEventEmitter';
|
||||
|
||||
import { WrapperStyled } from './styles';
|
||||
import { GridTableComponentProps } from './types';
|
||||
import {
|
||||
createColumnsAndDataSource,
|
||||
findMatchingThreshold,
|
||||
TableData,
|
||||
} from './utils';
|
||||
import { findMatchingThreshold } from './utils';
|
||||
|
||||
function GridTableComponent({
|
||||
data,
|
||||
@@ -26,26 +25,28 @@ function GridTableComponent({
|
||||
...props
|
||||
}: GridTableComponentProps): JSX.Element {
|
||||
const { t } = useTranslation(['valueGraph']);
|
||||
|
||||
// create columns and dataSource in the ui friendly structure
|
||||
// use the query from the widget here to extract the legend information
|
||||
const { columns, dataSource: originalDataSource } = useMemo(
|
||||
() => createColumnsAndDataSource((data as unknown) as TableData, query),
|
||||
[query, data],
|
||||
() =>
|
||||
createTableColumnsFromQuery({
|
||||
query,
|
||||
queryTableData: data,
|
||||
}),
|
||||
[data, query],
|
||||
);
|
||||
|
||||
const createDataInCorrectFormat = useCallback(
|
||||
(dataSource: RowData[]): RowData[] =>
|
||||
dataSource.map((d) => {
|
||||
const finalObject = {};
|
||||
|
||||
// we use the order of the columns here to have similar download as the user view
|
||||
columns.forEach((k) => {
|
||||
set(
|
||||
finalObject,
|
||||
get(k, 'title', '') as string,
|
||||
get(d, get(k, 'dataIndex', ''), 'n/a'),
|
||||
const keys = Object.keys(d);
|
||||
keys.forEach((k) => {
|
||||
const label = get(
|
||||
columns.find((c) => get(c, 'dataIndex', '') === k) || {},
|
||||
'title',
|
||||
'',
|
||||
);
|
||||
if (label) {
|
||||
set(finalObject, label as string, d[k]);
|
||||
}
|
||||
});
|
||||
return finalObject as RowData;
|
||||
}),
|
||||
@@ -64,11 +65,7 @@ function GridTableComponent({
|
||||
const newValue = { ...val };
|
||||
Object.keys(val).forEach((k) => {
|
||||
if (columnUnits[k]) {
|
||||
// the check below takes care of not adding units for rows that have n/a values
|
||||
newValue[k] =
|
||||
val[k] !== 'n/a'
|
||||
? getYAxisFormattedValue(String(val[k]), columnUnits[k])
|
||||
: val[k];
|
||||
newValue[k] = getYAxisFormattedValue(String(val[k]), columnUnits[k]);
|
||||
newValue[`${k}_without_unit`] = val[k];
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import { ColumnsType, ColumnType } from 'antd/es/table';
|
||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { QUERY_TABLE_CONFIG } from 'container/QueryTable/config';
|
||||
import { QueryTableProps } from 'container/QueryTable/QueryTable.intefaces';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { isEmpty, isNaN } from 'lodash-es';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
// Helper function to evaluate the condition based on the operator
|
||||
function evaluateCondition(
|
||||
@@ -63,85 +56,3 @@ export function findMatchingThreshold(
|
||||
hasMultipleMatches,
|
||||
};
|
||||
}
|
||||
|
||||
export interface TableData {
|
||||
columns: { name: string; queryName: string; isValueColumn: boolean }[];
|
||||
rows: { data: any }[];
|
||||
}
|
||||
|
||||
export function getQueryLegend(
|
||||
currentQuery: Query,
|
||||
queryName: string,
|
||||
): string | undefined {
|
||||
let legend: string | undefined;
|
||||
switch (currentQuery.queryType) {
|
||||
case EQueryType.QUERY_BUILDER:
|
||||
// check if the value is present in the queries
|
||||
legend = currentQuery.builder.queryData.find(
|
||||
(query) => query.queryName === queryName,
|
||||
)?.legend;
|
||||
|
||||
if (!legend) {
|
||||
// check if the value is present in the formula
|
||||
legend = currentQuery.builder.queryFormulas.find(
|
||||
(query) => query.queryName === queryName,
|
||||
)?.legend;
|
||||
}
|
||||
break;
|
||||
case EQueryType.CLICKHOUSE:
|
||||
legend = currentQuery.clickhouse_sql.find(
|
||||
(query) => query.name === queryName,
|
||||
)?.legend;
|
||||
break;
|
||||
case EQueryType.PROM:
|
||||
legend = currentQuery.promql.find((query) => query.name === queryName)
|
||||
?.legend;
|
||||
break;
|
||||
default:
|
||||
legend = undefined;
|
||||
break;
|
||||
}
|
||||
|
||||
return legend;
|
||||
}
|
||||
|
||||
export function createColumnsAndDataSource(
|
||||
data: TableData,
|
||||
currentQuery: Query,
|
||||
renderColumnCell?: QueryTableProps['renderColumnCell'],
|
||||
): { columns: ColumnsType<RowData>; dataSource: RowData[] } {
|
||||
const columns: ColumnsType<RowData> =
|
||||
data.columns?.reduce<ColumnsType<RowData>>((acc, item) => {
|
||||
// is the column is the value column then we need to check for the available legend
|
||||
const legend = item.isValueColumn
|
||||
? getQueryLegend(currentQuery, item.queryName)
|
||||
: undefined;
|
||||
|
||||
const column: ColumnType<RowData> = {
|
||||
dataIndex: item.name,
|
||||
// if no legend present then rely on the column name value
|
||||
title: !isEmpty(legend) ? legend : item.name,
|
||||
width: QUERY_TABLE_CONFIG.width,
|
||||
render: renderColumnCell && renderColumnCell[item.name],
|
||||
sorter: (a: RowData, b: RowData): number => {
|
||||
const valueA = Number(a[`${item.name}_without_unit`] ?? a[item.name]);
|
||||
const valueB = Number(b[`${item.name}_without_unit`] ?? b[item.name]);
|
||||
|
||||
if (!isNaN(valueA) && !isNaN(valueB)) {
|
||||
return valueA - valueB;
|
||||
}
|
||||
|
||||
return ((a[item.name] as string) || '').localeCompare(
|
||||
(b[item.name] as string) || '',
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
return [...acc, column];
|
||||
}, []) || [];
|
||||
|
||||
// the rows returned have data encapsulation hence removing the same here
|
||||
const dataSource = data.rows?.map((d) => d.data) || [];
|
||||
|
||||
return { columns, dataSource };
|
||||
}
|
||||
|
||||
@@ -27,7 +27,6 @@ import {
|
||||
import { useSelector } from 'react-redux';
|
||||
import { NavLink } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { License } from 'types/api/licenses/def';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { getFormattedDate, getRemainingDays } from 'utils/timeUtils';
|
||||
|
||||
@@ -110,13 +109,9 @@ function HeaderContainer(): JSX.Element {
|
||||
|
||||
const { data: licenseData, isFetching, status: licenseStatus } = useLicense();
|
||||
|
||||
const licensesStatus: string =
|
||||
licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ||
|
||||
'';
|
||||
|
||||
const isLicenseActive =
|
||||
licensesStatus?.toLocaleLowerCase() ===
|
||||
LICENSE_PLAN_STATUS.VALID.toLocaleLowerCase();
|
||||
licenseData?.payload?.licenses?.find((e) => e.isCurrent)?.status ===
|
||||
LICENSE_PLAN_STATUS.VALID;
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
|
||||
@@ -7,20 +7,17 @@ interface AlertInfoCardProps {
|
||||
header: string;
|
||||
subheader: string;
|
||||
link: string;
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
function AlertInfoCard({
|
||||
header,
|
||||
subheader,
|
||||
link,
|
||||
onClick,
|
||||
}: AlertInfoCardProps): JSX.Element {
|
||||
return (
|
||||
<div
|
||||
className="alert-info-card"
|
||||
onClick={(): void => {
|
||||
onClick();
|
||||
window.open(link, '_blank');
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -2,7 +2,6 @@ import './AlertsEmptyState.styles.scss';
|
||||
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Button, Divider, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
@@ -11,26 +10,12 @@ import { useCallback, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
|
||||
import AlertInfoCard from './AlertInfoCard';
|
||||
import { ALERT_CARDS, ALERT_INFO_LINKS } from './alertLinks';
|
||||
import InfoLinkText from './InfoLinkText';
|
||||
|
||||
const alertLogEvents = (
|
||||
title: string,
|
||||
link: string,
|
||||
dataSource?: DataSource,
|
||||
): void => {
|
||||
const attributes = {
|
||||
link,
|
||||
page: 'Alert empty state page',
|
||||
};
|
||||
|
||||
logEvent(title, dataSource ? { ...attributes, dataSource } : attributes);
|
||||
};
|
||||
|
||||
export function AlertsEmptyState(): JSX.Element {
|
||||
const { t } = useTranslation('common');
|
||||
const { role, featureResponse } = useSelector<AppState, AppReducer>(
|
||||
@@ -106,33 +91,18 @@ export function AlertsEmptyState(): JSX.Element {
|
||||
link="https://youtu.be/xjxNIqiv4_M"
|
||||
leftIconVisible
|
||||
rightIconVisible
|
||||
onClick={(): void =>
|
||||
alertLogEvents(
|
||||
'Alert: Video tutorial link clicked',
|
||||
'https://youtu.be/xjxNIqiv4_M',
|
||||
)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{ALERT_INFO_LINKS.map((info) => {
|
||||
const logEventTriggered = (): void =>
|
||||
alertLogEvents(
|
||||
'Alert: Tutorial doc link clicked',
|
||||
info.link,
|
||||
info.dataSource,
|
||||
);
|
||||
return (
|
||||
<InfoLinkText
|
||||
key={info.link}
|
||||
infoText={info.infoText}
|
||||
link={info.link}
|
||||
leftIconVisible={info.leftIconVisible}
|
||||
rightIconVisible={info.rightIconVisible}
|
||||
onClick={logEventTriggered}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{ALERT_INFO_LINKS.map((info) => (
|
||||
<InfoLinkText
|
||||
key={info.link}
|
||||
infoText={info.infoText}
|
||||
link={info.link}
|
||||
leftIconVisible={info.leftIconVisible}
|
||||
rightIconVisible={info.rightIconVisible}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
<div className="get-started-text">
|
||||
@@ -143,23 +113,14 @@ export function AlertsEmptyState(): JSX.Element {
|
||||
</Divider>
|
||||
</div>
|
||||
|
||||
{ALERT_CARDS.map((card) => {
|
||||
const logEventTriggered = (): void =>
|
||||
alertLogEvents(
|
||||
'Alert: Sample alert link clicked',
|
||||
card.link,
|
||||
card.dataSource,
|
||||
);
|
||||
return (
|
||||
<AlertInfoCard
|
||||
key={card.link}
|
||||
header={card.header}
|
||||
subheader={card.subheader}
|
||||
link={card.link}
|
||||
onClick={logEventTriggered}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{ALERT_CARDS.map((card) => (
|
||||
<AlertInfoCard
|
||||
key={card.link}
|
||||
header={card.header}
|
||||
subheader={card.subheader}
|
||||
link={card.link}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -6,7 +6,6 @@ interface InfoLinkTextProps {
|
||||
link: string;
|
||||
leftIconVisible: boolean;
|
||||
rightIconVisible: boolean;
|
||||
onClick: () => void;
|
||||
}
|
||||
|
||||
function InfoLinkText({
|
||||
@@ -14,12 +13,10 @@ function InfoLinkText({
|
||||
link,
|
||||
leftIconVisible,
|
||||
rightIconVisible,
|
||||
onClick,
|
||||
}: InfoLinkTextProps): JSX.Element {
|
||||
return (
|
||||
<Flex
|
||||
onClick={(): void => {
|
||||
onClick();
|
||||
window.open(link, '_blank');
|
||||
}}
|
||||
className="info-link-container"
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export const ALERT_INFO_LINKS = [
|
||||
{
|
||||
infoText: 'How to create Metrics-based alerts',
|
||||
@@ -7,7 +5,6 @@ export const ALERT_INFO_LINKS = [
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||
leftIconVisible: false,
|
||||
rightIconVisible: true,
|
||||
dataSource: DataSource.METRICS,
|
||||
},
|
||||
{
|
||||
infoText: 'How to create Log-based alerts',
|
||||
@@ -15,7 +12,6 @@ export const ALERT_INFO_LINKS = [
|
||||
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||
leftIconVisible: false,
|
||||
rightIconVisible: true,
|
||||
dataSource: DataSource.LOGS,
|
||||
},
|
||||
{
|
||||
infoText: 'How to create Trace-based alerts',
|
||||
@@ -23,7 +19,6 @@ export const ALERT_INFO_LINKS = [
|
||||
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page',
|
||||
leftIconVisible: false,
|
||||
rightIconVisible: true,
|
||||
dataSource: DataSource.TRACES,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -31,28 +26,24 @@ export const ALERT_CARDS = [
|
||||
{
|
||||
header: 'Alert on high memory usage',
|
||||
subheader: "Monitor your host's memory usage",
|
||||
dataSource: DataSource.METRICS,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-memory-usage-for-host-goes-above-400-mb-or-any-fixed-memory',
|
||||
},
|
||||
{
|
||||
header: 'Alert on slow external API calls',
|
||||
subheader: 'Monitor your external API calls',
|
||||
dataSource: DataSource.TRACES,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-external-api-latency-p90-is-over-1-second-for-last-5-mins',
|
||||
},
|
||||
{
|
||||
header: 'Alert on high percentage of timeout errors in logs',
|
||||
subheader: 'Monitor your logs for errors',
|
||||
dataSource: DataSource.LOGS,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-empty-page#1-alert-when-percentage-of-redis-timeout-error-logs-greater-than-7-in-last-5-mins',
|
||||
},
|
||||
{
|
||||
header: 'Alert on high error percentage of an endpoint',
|
||||
subheader: 'Monitor your API endpoint',
|
||||
dataSource: DataSource.METRICS,
|
||||
link:
|
||||
'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-empty-page#3-alert-when-the-error-percentage-for-an-endpoint-exceeds-5',
|
||||
},
|
||||
|
||||
@@ -3,7 +3,6 @@ import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Input, Typography } from 'antd';
|
||||
import type { ColumnsType } from 'antd/es/table/interface';
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import DropDown from 'components/DropDown/DropDown';
|
||||
import { listAlertMessage } from 'components/facingIssueBtn/util';
|
||||
import {
|
||||
@@ -42,7 +41,7 @@ import {
|
||||
} from './styles';
|
||||
import Status from './TableComponents/Status';
|
||||
import ToggleAlertState from './ToggleAlertState';
|
||||
import { alertActionLogEvent, filterAlerts } from './utils';
|
||||
import { filterAlerts } from './utils';
|
||||
|
||||
const { Search } = Input;
|
||||
|
||||
@@ -108,16 +107,12 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
}, [notificationsApi, t]);
|
||||
|
||||
const onClickNewAlertHandler = useCallback(() => {
|
||||
logEvent('Alert: New alert button clicked', {
|
||||
number: allAlertRules?.length,
|
||||
});
|
||||
featureResponse
|
||||
.refetch()
|
||||
.then(() => {
|
||||
history.push(ROUTES.ALERTS_NEW);
|
||||
})
|
||||
.catch(handleError);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [featureResponse, handleError]);
|
||||
|
||||
const onEditHandler = (record: GettableAlert) => (): void => {
|
||||
@@ -326,7 +321,6 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
width: 10,
|
||||
render: (id: GettableAlert['id'], record): JSX.Element => (
|
||||
<DropDown
|
||||
onDropDownItemClick={(item): void => alertActionLogEvent(item.key, record)}
|
||||
element={[
|
||||
<ToggleAlertState
|
||||
key="1"
|
||||
@@ -362,9 +356,6 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
const paginationConfig = {
|
||||
defaultCurrent: Number(paginationParam) || 1,
|
||||
};
|
||||
return (
|
||||
<>
|
||||
<SearchContainer>
|
||||
@@ -394,10 +385,11 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
columns={columns}
|
||||
rowKey="id"
|
||||
dataSource={data}
|
||||
shouldSendAlertsLogEvent
|
||||
dynamicColumns={dynamicColumns}
|
||||
onChange={handleChange}
|
||||
pagination={paginationConfig}
|
||||
pagination={{
|
||||
defaultCurrent: Number(paginationParam) || 1,
|
||||
}}
|
||||
facingIssueBtn={{
|
||||
attributes: {
|
||||
screen: 'Alert list page',
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { Space } from 'antd';
|
||||
import getAll from 'api/alerts/getAll';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ReleaseNote from 'components/ReleaseNote';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -21,19 +19,8 @@ function ListAlertRules(): JSX.Element {
|
||||
cacheTime: 0,
|
||||
});
|
||||
|
||||
const logEventCalledRef = useRef(false);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
useEffect(() => {
|
||||
if (!logEventCalledRef.current && !isUndefined(data?.payload)) {
|
||||
logEvent('Alert: List page visited', {
|
||||
number: data?.payload?.length,
|
||||
});
|
||||
logEventCalledRef.current = true;
|
||||
}
|
||||
}, [data?.payload]);
|
||||
|
||||
useEffect(() => {
|
||||
if (status === 'error' || (status === 'success' && data.statusCode >= 400)) {
|
||||
notifications.error({
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
import { GettableAlert } from 'types/api/alerts/get';
|
||||
|
||||
export const filterAlerts = (
|
||||
@@ -26,32 +23,3 @@ export const filterAlerts = (
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
export const alertActionLogEvent = (
|
||||
action: string,
|
||||
record: GettableAlert,
|
||||
): void => {
|
||||
let actionValue = '';
|
||||
switch (action) {
|
||||
case '0':
|
||||
actionValue = 'Enable/Disable';
|
||||
break;
|
||||
case '1':
|
||||
actionValue = 'Edit';
|
||||
break;
|
||||
case '2':
|
||||
actionValue = 'Clone';
|
||||
break;
|
||||
case '3':
|
||||
actionValue = 'Delete';
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
logEvent('Alert: Action', {
|
||||
ruleId: record.id,
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[record.alertType as AlertTypes],
|
||||
name: record.alert,
|
||||
action: actionValue,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -609,16 +609,6 @@ function DashboardsList(): JSX.Element {
|
||||
</>
|
||||
);
|
||||
|
||||
const paginationConfig = data.length > 20 && {
|
||||
pageSize: 20,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
onChange: (page: any): void => handlePageSizeUpdate(page),
|
||||
current: Number(sortOrder.pagination),
|
||||
defaultCurrent: Number(sortOrder.pagination) || 1,
|
||||
hideOnSinglePage: true,
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="dashboards-list-container">
|
||||
<div className="dashboards-list-view-content">
|
||||
@@ -832,7 +822,16 @@ function DashboardsList(): JSX.Element {
|
||||
showSorterTooltip
|
||||
loading={isDashboardListLoading || isFilteringDashboards}
|
||||
showHeader={false}
|
||||
pagination={paginationConfig}
|
||||
pagination={
|
||||
data.length > 20 && {
|
||||
pageSize: 20,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
onChange: (page): void => handlePageSizeUpdate(page),
|
||||
current: Number(sortOrder.pagination),
|
||||
defaultCurrent: Number(sortOrder.pagination) || 1,
|
||||
}
|
||||
}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -16,6 +16,7 @@ import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import PeriscopeTable from 'periscope/components/Table/Table';
|
||||
import { memo, useCallback, useMemo, useRef } from 'react';
|
||||
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||
// interfaces
|
||||
@@ -157,6 +158,7 @@ function LogsExplorerList({
|
||||
|
||||
return (
|
||||
<div className="logs-list-view-container">
|
||||
<PeriscopeTable />
|
||||
{(isLoading || (isFetching && logs.length === 0)) && <LogsLoading />}
|
||||
|
||||
{!isLoading &&
|
||||
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
} from 'hooks/useResourceAttribute/utils';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
@@ -94,26 +93,6 @@ function External(): JSX.Element {
|
||||
[servicename, tagFilterItems],
|
||||
);
|
||||
|
||||
const errorApmToTraceQuery = useGetAPMToTracesQueries({
|
||||
servicename,
|
||||
isExternalCall: true,
|
||||
filters: [
|
||||
{
|
||||
id: uuid().slice(0, 8),
|
||||
key: {
|
||||
key: 'hasError',
|
||||
dataType: DataTypes.bool,
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'hasError--bool--tag--true',
|
||||
},
|
||||
op: 'in',
|
||||
value: ['true'],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const externalCallRPSWidget = useMemo(
|
||||
() =>
|
||||
getWidgetQueryBuilder({
|
||||
@@ -177,7 +156,7 @@ function External(): JSX.Element {
|
||||
servicename,
|
||||
selectedTraceTags,
|
||||
timestamp: selectedTimeStamp,
|
||||
apmToTraceQuery: errorApmToTraceQuery,
|
||||
apmToTraceQuery,
|
||||
})}
|
||||
>
|
||||
View Traces
|
||||
|
||||
@@ -2,6 +2,8 @@ import { Card, Typography } from 'antd';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { WidgetGraphContainerProps } from 'container/NewWidget/types';
|
||||
// import useUrlQuery from 'hooks/useUrlQuery';
|
||||
// import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
||||
|
||||
import { NotFoundContainer } from './styles';
|
||||
@@ -12,7 +14,6 @@ function WidgetGraphContainer({
|
||||
queryResponse,
|
||||
setRequestData,
|
||||
selectedWidget,
|
||||
isLoadingPanelData,
|
||||
}: WidgetGraphContainerProps): JSX.Element {
|
||||
if (queryResponse.data && selectedGraph === PANEL_TYPES.BAR) {
|
||||
const sortedSeriesData = getSortedSeriesData(
|
||||
@@ -37,10 +38,6 @@ function WidgetGraphContainer({
|
||||
return <Spinner size="large" tip="Loading..." />;
|
||||
}
|
||||
|
||||
if (isLoadingPanelData) {
|
||||
return <Spinner size="large" tip="Loading..." />;
|
||||
}
|
||||
|
||||
if (
|
||||
selectedGraph !== PANEL_TYPES.LIST &&
|
||||
queryResponse.data?.payload.data?.result?.length === 0
|
||||
@@ -62,14 +59,6 @@ function WidgetGraphContainer({
|
||||
);
|
||||
}
|
||||
|
||||
if (queryResponse.isIdle) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
<Typography>No Data</Typography>
|
||||
</NotFoundContainer>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<WidgetGraph
|
||||
selectedWidget={selectedWidget}
|
||||
|
||||
@@ -17,7 +17,6 @@ function WidgetGraph({
|
||||
queryResponse,
|
||||
setRequestData,
|
||||
selectedWidget,
|
||||
isLoadingPanelData,
|
||||
}: WidgetGraphContainerProps): JSX.Element {
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
@@ -44,7 +43,6 @@ function WidgetGraph({
|
||||
)}
|
||||
|
||||
<WidgetGraphComponent
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
selectedGraph={selectedGraph}
|
||||
queryResponse={queryResponse}
|
||||
setRequestData={setRequestData}
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import './LeftContainer.styles.scss';
|
||||
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { memo } from 'react';
|
||||
import { memo, useEffect, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { getGraphType } from 'utils/getGraphType';
|
||||
|
||||
import { WidgetGraphProps } from '../types';
|
||||
import ExplorerColumnsRenderer from './ExplorerColumnsRenderer';
|
||||
@@ -24,17 +27,62 @@ function LeftContainer({
|
||||
selectedTracesFields,
|
||||
setSelectedTracesFields,
|
||||
selectedWidget,
|
||||
requestData,
|
||||
setRequestData,
|
||||
isLoadingPanelData,
|
||||
selectedTime,
|
||||
}: WidgetGraphProps): JSX.Element {
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
const { stagedQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
|
||||
return {
|
||||
selectedTime: selectedWidget?.timePreferance,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery || initialQueriesMap.metrics,
|
||||
globalSelectedInterval,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
};
|
||||
}
|
||||
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
redirectWithQueryBuilderData(updatedQuery);
|
||||
return {
|
||||
query: updatedQuery,
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
||||
globalSelectedInterval,
|
||||
tableParams: {
|
||||
pagination: {
|
||||
offset: 0,
|
||||
limit: updatedQuery.builder.queryData[0].limit || 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (stagedQuery) {
|
||||
setRequestData((prev) => ({
|
||||
...prev,
|
||||
selectedTime: selectedTime.enum || prev.selectedTime,
|
||||
globalSelectedInterval,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery,
|
||||
fillGaps: selectedWidget.fillSpans || false,
|
||||
}));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
stagedQuery,
|
||||
selectedTime,
|
||||
selectedWidget.fillSpans,
|
||||
globalSelectedInterval,
|
||||
]);
|
||||
|
||||
const queryResponse = useGetQueryRange(
|
||||
requestData,
|
||||
selectedDashboard?.data?.version || DEFAULT_ENTITY_VERSION,
|
||||
@@ -56,7 +104,6 @@ function LeftContainer({
|
||||
queryResponse={queryResponse}
|
||||
setRequestData={setRequestData}
|
||||
selectedWidget={selectedWidget}
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
/>
|
||||
<QueryContainer className="query-section-left-container">
|
||||
<QuerySection selectedGraph={selectedGraph} queryResponse={queryResponse} />
|
||||
|
||||
@@ -7,7 +7,7 @@ import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||
import { chartHelpMessage } from 'components/facingIssueBtn/util';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DashboardShortcuts } from 'constants/shortcuts/DashboardShortcuts';
|
||||
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
||||
@@ -18,8 +18,6 @@ import useAxiosError from 'hooks/useAxiosError';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { MESSAGE, useIsFeatureDisabled } from 'hooks/useFeatureFlag';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import history from 'lib/history';
|
||||
import { defaultTo, isUndefined } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
@@ -40,8 +38,6 @@ import { IField } from 'types/api/logs/fields';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { getGraphType, getGraphTypeForFormat } from 'utils/getGraphType';
|
||||
|
||||
import LeftContainer from './LeftContainer';
|
||||
import QueryTypeTag from './LeftContainer/QueryTypeTag';
|
||||
@@ -87,10 +83,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
const { featureResponse } = useSelector<AppState, AppReducer>(
|
||||
(state) => state.app,
|
||||
);
|
||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const { widgets = [] } = selectedDashboard?.data || {};
|
||||
|
||||
@@ -286,65 +278,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
|
||||
const handleError = useAxiosError();
|
||||
|
||||
// this loading state is to take care of mismatch in the responses for table and other panels
|
||||
// hence while changing the query contains the older value and the processing logic fails
|
||||
const [isLoadingPanelData, setIsLoadingPanelData] = useState<boolean>(false);
|
||||
|
||||
// request data should be handled by the parent and the child components should consume the same
|
||||
// this has been moved here from the left container
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
if (selectedWidget && selectedGraph !== PANEL_TYPES.LIST) {
|
||||
return {
|
||||
selectedTime: selectedWidget?.timePreferance,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery || initialQueriesMap.metrics,
|
||||
globalSelectedInterval,
|
||||
formatForWeb:
|
||||
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
|
||||
PANEL_TYPES.TABLE,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
};
|
||||
}
|
||||
const updatedQuery = { ...(stagedQuery || initialQueriesMap.metrics) };
|
||||
updatedQuery.builder.queryData[0].pageSize = 10;
|
||||
redirectWithQueryBuilderData(updatedQuery);
|
||||
return {
|
||||
query: updatedQuery,
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
||||
globalSelectedInterval,
|
||||
tableParams: {
|
||||
pagination: {
|
||||
offset: 0,
|
||||
limit: updatedQuery.builder.queryData[0].limit || 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (stagedQuery) {
|
||||
setIsLoadingPanelData(false);
|
||||
setRequestData((prev) => ({
|
||||
...prev,
|
||||
selectedTime: selectedTime.enum || prev.selectedTime,
|
||||
globalSelectedInterval,
|
||||
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
|
||||
query: stagedQuery,
|
||||
fillGaps: selectedWidget.fillSpans || false,
|
||||
formatForWeb:
|
||||
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
|
||||
PANEL_TYPES.TABLE,
|
||||
}));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
stagedQuery,
|
||||
selectedTime,
|
||||
selectedWidget.fillSpans,
|
||||
globalSelectedInterval,
|
||||
]);
|
||||
|
||||
const onClickSaveHandler = useCallback(() => {
|
||||
if (!selectedDashboard) {
|
||||
return;
|
||||
@@ -469,7 +402,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
}, [dashboardId]);
|
||||
|
||||
const setGraphHandler = (type: PANEL_TYPES): void => {
|
||||
setIsLoadingPanelData(true);
|
||||
const updatedQuery = handleQueryChange(type as any, supersetQuery);
|
||||
setGraphType(type);
|
||||
redirectWithQueryBuilderData(
|
||||
@@ -595,9 +527,6 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
setSelectedTracesFields={setSelectedTracesFields}
|
||||
selectedWidget={selectedWidget}
|
||||
selectedTime={selectedTime}
|
||||
requestData={requestData}
|
||||
setRequestData={setRequestData}
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
/>
|
||||
)}
|
||||
</LeftContainerWrapper>
|
||||
|
||||
@@ -24,9 +24,6 @@ export interface WidgetGraphProps {
|
||||
selectedWidget: Widgets;
|
||||
selectedGraph: PANEL_TYPES;
|
||||
selectedTime: timePreferance;
|
||||
requestData: GetQueryResultsProps;
|
||||
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
||||
isLoadingPanelData: boolean;
|
||||
}
|
||||
|
||||
export type WidgetGraphContainerProps = {
|
||||
@@ -37,5 +34,4 @@ export type WidgetGraphContainerProps = {
|
||||
setRequestData: Dispatch<SetStateAction<GetQueryResultsProps>>;
|
||||
selectedGraph: PANEL_TYPES;
|
||||
selectedWidget: Widgets;
|
||||
isLoadingPanelData: boolean;
|
||||
};
|
||||
|
||||
@@ -4,45 +4,50 @@
|
||||
|
||||
Prior to installation, you must ensure your Kubernetes cluster is ready and that you have the necessary permissions to deploy applications. Follow these steps to use Helm for setting up the Collector:
|
||||
|
||||
|
||||
|
||||
1. **Add the OpenTelemetry Helm repository:**
|
||||
|
||||
```bash
|
||||
helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts
|
||||
```
|
||||
|
||||
|
||||
|
||||
2. **Prepare the `otel-collector-values.yaml` Configuration**
|
||||
|
||||
|
||||
#### Azure Event Hub Receiver Configuration
|
||||
If you haven't created the logs Event Hub, you can create one by following the steps in the [Azure Event Hubs documentation](../../bootstrapping/data-ingestion).
|
||||
|
||||
#### Azure Event Hub Receiver Configuration
|
||||
and replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
|
||||
|
||||
Replace the placeholders `<Primary Connection String>` with the primary connection string for your Event Hub, it should look something like this:
|
||||
```yaml
|
||||
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
|
||||
```
|
||||
The Event Hub docs have a step to create a SAS policy for the event hub and copy the connection string.
|
||||
|
||||
```yaml
|
||||
connection: Endpoint=sb://namespace.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=superSecret1234=;EntityPath=hubName
|
||||
```
|
||||
The Event Hub setup have a step to create a SAS policy for the event hub and copy the connection string.
|
||||
#### Azure Monitor Receiver Configuration
|
||||
|
||||
|
||||
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
|
||||
|
||||
#### Azure Monitor Receiver Configuration
|
||||
1. Follow the steps in the [Create a service principal Azure Doc](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#register-an-application-with-microsoft-entra-id-and-create-a-service-principal) documentation to create a service principal.
|
||||
You can name it `signoz-central-collector-app` the redirect URI can be empty.
|
||||
2. To add read permissions to Azure Monitor, Follow the [Assign Role](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#assign-a-role-to-the-application) documentation. The read acess can be given to the full subscription.
|
||||
3. There are multiple ways to authenticate the service principal, we will use the client secret option, follow [Creating a client secret](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#option-3-create-a-new-client-secret) and don't forget to copy the client secret. The secret is used in the configuration file as `client_secret`.
|
||||
|
||||
You will need to set up a [service principal](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal) with Read permissions to receive data from Azure Monitor.
|
||||
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
|
||||
|
||||
1. Follow the steps in the [Create a service principal Azure Doc](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#register-an-application-with-microsoft-entra-id-and-create-a-service-principal) documentation to create a service principal.
|
||||
You can name it `signoz-central-collector-app` the redirect URI can be empty.
|
||||
2. To add read permissions to Azure Monitor, Follow the [Assign Role](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#assign-a-role-to-the-application) documentation. The read acess can be given to the full subscription.
|
||||
3. There are multiple ways to authenticate the service principal, we will use the client secret option, follow [Creating a client secret](https://learn.microsoft.com/en-us/entra/identity-platform/howto-create-service-principal-portal#option-3-create-a-new-client-secret) and don't forget to copy the client secret. The secret is used in the configuration file as `client_secret`.
|
||||
<figure data-zoomable align="center">
|
||||
<img
|
||||
src="/img/docs/azure-monitoring/service-principal-app-overview.webp"
|
||||
alt="Application Overview"
|
||||
/>
|
||||
<figcaption>
|
||||
<i>
|
||||
Application Overview
|
||||
</i>
|
||||
</figcaption>
|
||||
</figure>
|
||||
|
||||
4. To find `client_id` and `tenant_id`, go to the [Azure Portal](https://portal.azure.com/) and search for the `Application` you created. You would see the `Application (client) ID` and `Directory (tenant) ID` in the Overview section.
|
||||
|
||||
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
|
||||
|
||||
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
|
||||
5. To find `subscription_id`, follow steps in [Find Your Subscription](https://learn.microsoft.com/en-us/azure/azure-portal/get-subscription-tenant-id#find-your-azure-subscription) and populate them in the configuration file.
|
||||
|
||||
6. Ensure you replace the placeholders `<region>` and `<ingestion-key>` with the appropriate values for your signoz cloud instance.
|
||||
|
||||
|
||||
|
||||
@@ -87,15 +92,13 @@ processors:
|
||||
batch: {}
|
||||
exporters:
|
||||
otlp:
|
||||
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
|
||||
endpoint: "ingest.<region>.signoz.cloud:443"
|
||||
tls:
|
||||
insecure: false
|
||||
headers:
|
||||
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
|
||||
"signoz-access-token": "<ingestion-key>"
|
||||
```
|
||||
|
||||
|
||||
|
||||
3. **Deploy the OpenTelemetry Collector to your Kubernetes cluster:**
|
||||
|
||||
You'll need to prepare a custom configuration file, say `otel-collector-values.yaml`, that matches your environment's specific needs. Replace `<namespace>` with the Kubernetes namespace where you wish to install the Collector.
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import { act, render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import Members from '../Members';
|
||||
|
||||
describe('Organization Settings Page', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('render list of members', async () => {
|
||||
act(() => {
|
||||
render(<Members />);
|
||||
});
|
||||
|
||||
const title = await screen.findByText(/Members/i);
|
||||
expect(title).toBeInTheDocument();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('firstUser@test.io')).toBeInTheDocument(); // first item
|
||||
expect(screen.getByText('lastUser@test.io')).toBeInTheDocument(); // last item
|
||||
});
|
||||
});
|
||||
|
||||
// this is required as our edit/delete logic is dependent on the index and it will break with pagination enabled
|
||||
it('render list of members without pagination', async () => {
|
||||
render(<Members />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('firstUser@test.io')).toBeInTheDocument(); // first item
|
||||
expect(screen.getByText('lastUser@test.io')).toBeInTheDocument(); // last item
|
||||
|
||||
expect(
|
||||
document.querySelector('.ant-table-pagination'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -9,7 +9,7 @@ function TablePanelWrapper({
|
||||
tableProcessedDataRef,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const panelData =
|
||||
(queryResponse.data?.payload?.data?.result?.[0] as any)?.table || [];
|
||||
queryResponse.data?.payload?.data?.newResult?.data?.result || [];
|
||||
const { thresholds } = widget;
|
||||
return (
|
||||
<GridTableComponent
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
export const historyPagination = {
|
||||
defaultPageSize: 5,
|
||||
hideOnSinglePage: true,
|
||||
};
|
||||
|
||||
@@ -334,11 +334,6 @@ export function PlannedDowntimeList({
|
||||
}
|
||||
}, [downtimeSchedules.error, downtimeSchedules.isError, notifications]);
|
||||
|
||||
const paginationConfig = {
|
||||
pageSize: 5,
|
||||
showSizeChanger: false,
|
||||
hideOnSinglePage: true,
|
||||
};
|
||||
return (
|
||||
<Table<DowntimeSchedulesTableData>
|
||||
columns={columns}
|
||||
@@ -347,7 +342,7 @@ export function PlannedDowntimeList({
|
||||
dataSource={tableData || []}
|
||||
loading={downtimeSchedules.isLoading || downtimeSchedules.isFetching}
|
||||
showHeader={false}
|
||||
pagination={paginationConfig}
|
||||
pagination={{ pageSize: 5, showSizeChanger: false }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -33,12 +33,10 @@ export const getColumnSearchProps = (
|
||||
record: ServicesList,
|
||||
): boolean => {
|
||||
if (record[dataIndex]) {
|
||||
return (
|
||||
record[dataIndex]
|
||||
?.toString()
|
||||
.toLowerCase()
|
||||
.includes(value.toString().toLowerCase()) || false
|
||||
);
|
||||
record[dataIndex]
|
||||
?.toString()
|
||||
.toLowerCase()
|
||||
.includes(value.toString().toLowerCase());
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
@@ -79,11 +79,6 @@ function ServiceMetricTable({
|
||||
}
|
||||
}, [services, licenseData, isFetching, isCloudUserVal]);
|
||||
|
||||
const paginationConfig = {
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
};
|
||||
return (
|
||||
<>
|
||||
{RPS > MAX_RPS_LIMIT && (
|
||||
@@ -97,7 +92,11 @@ function ServiceMetricTable({
|
||||
<ResourceAttributesFilter />
|
||||
|
||||
<ResizeTable
|
||||
pagination={paginationConfig}
|
||||
pagination={{
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
}}
|
||||
columns={tableColumns}
|
||||
loading={isLoading}
|
||||
dataSource={services}
|
||||
|
||||
@@ -36,11 +36,6 @@ function ServiceTraceTable({
|
||||
}
|
||||
}, [services, licenseData, isFetching, isCloudUserVal]);
|
||||
|
||||
const paginationConfig = {
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
};
|
||||
return (
|
||||
<>
|
||||
{RPS > MAX_RPS_LIMIT && (
|
||||
@@ -54,7 +49,11 @@ function ServiceTraceTable({
|
||||
<ResourceAttributesFilter />
|
||||
|
||||
<ResizeTable
|
||||
pagination={paginationConfig}
|
||||
pagination={{
|
||||
defaultPageSize: 10,
|
||||
showTotal: (total: number, range: number[]): string =>
|
||||
`${range[0]}-${range[1]} of ${total} items`,
|
||||
}}
|
||||
columns={tableColumns}
|
||||
loading={loading}
|
||||
dataSource={services}
|
||||
|
||||
@@ -152,13 +152,9 @@ function SideNav({
|
||||
|
||||
const { t } = useTranslation('');
|
||||
|
||||
const licenseStatus: string =
|
||||
licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ||
|
||||
'';
|
||||
|
||||
const isLicenseActive =
|
||||
licenseStatus?.toLocaleLowerCase() ===
|
||||
LICENSE_PLAN_STATUS.VALID.toLocaleLowerCase();
|
||||
licenseData?.payload?.licenses?.find((e: License) => e.isCurrent)?.status ===
|
||||
LICENSE_PLAN_STATUS.VALID;
|
||||
|
||||
const isEnterprise = licenseData?.payload?.licenses?.some(
|
||||
(license: License) =>
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import './Tags.styles.scss';
|
||||
|
||||
import { Tooltip } from 'antd';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Fragment, useMemo } from 'react';
|
||||
@@ -28,12 +26,7 @@ function Tag({ tags, onToggleHandler, setText }: TagProps): JSX.Element {
|
||||
<Container>
|
||||
<CustomSubTitle>{tags.key}</CustomSubTitle>
|
||||
<SubTextContainer isDarkMode={isDarkMode}>
|
||||
<Tooltip
|
||||
overlayClassName="tagTooltip"
|
||||
placement="left"
|
||||
autoAdjustOverflow
|
||||
title={value}
|
||||
>
|
||||
<Tooltip overlay={(): string => value}>
|
||||
<CustomSubText
|
||||
ellipsis={{
|
||||
rows: isEllipsed ? 2 : 0,
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
.tagTooltip {
|
||||
.ant-tooltip-content {
|
||||
max-height: 300px;
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
padding: 8px;
|
||||
|
||||
background-color: var(--bg-slate-400);
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.ant-tooltip-inner {
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.tagTooltip {
|
||||
.ant-tooltip-content {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.ant-tooltip-inner {
|
||||
box-shadow: none;
|
||||
background-color: var(--bg-vanilla-300);
|
||||
color: var(--bg-ink-200);
|
||||
}
|
||||
|
||||
.ant-tooltip-arrow {
|
||||
border-top-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
|
||||
&.ant-tooltip {
|
||||
--antd-arrow-background-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,17 +19,12 @@ import { CardContainer, CustomSubText, styles } from './styles';
|
||||
import Tags from './Tags';
|
||||
|
||||
function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element {
|
||||
const { tree, firstSpanStartTime } = props;
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
tree,
|
||||
firstSpanStartTime,
|
||||
traceStartTime = minTime,
|
||||
traceEndTime = maxTime,
|
||||
} = props;
|
||||
|
||||
const { id: traceId } = useParams<Params>();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -79,7 +74,7 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element {
|
||||
];
|
||||
|
||||
const onLogsHandler = (): void => {
|
||||
const query = getTraceToLogsQuery(traceId, traceStartTime, traceEndTime);
|
||||
const query = getTraceToLogsQuery(traceId, minTime, maxTime);
|
||||
|
||||
history.push(
|
||||
`${ROUTES.LOGS_EXPLORER}?${createQueryParams({
|
||||
@@ -145,14 +140,10 @@ function SelectedSpanDetails(props: SelectedSpanDetailsProps): JSX.Element {
|
||||
interface SelectedSpanDetailsProps {
|
||||
tree?: ITraceTree;
|
||||
firstSpanStartTime: number;
|
||||
traceStartTime?: number;
|
||||
traceEndTime?: number;
|
||||
}
|
||||
|
||||
SelectedSpanDetails.defaultProps = {
|
||||
tree: undefined,
|
||||
traceStartTime: undefined,
|
||||
traceEndTime: undefined,
|
||||
};
|
||||
|
||||
export interface ModalText {
|
||||
|
||||
@@ -48,12 +48,6 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
|
||||
[response],
|
||||
);
|
||||
|
||||
const traceStartTime = useMemo(() => response[0].startTimestampMillis, [
|
||||
response,
|
||||
]);
|
||||
|
||||
const traceEndTime = useMemo(() => response[0].endTimestampMillis, [response]);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const [spanId] = useState<string | null>(urlQuery.get('spanId'));
|
||||
|
||||
@@ -266,8 +260,6 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
|
||||
<StyledCol styledclass={[styles.selectedSpanDetailContainer]}>
|
||||
<SelectedSpanDetails
|
||||
firstSpanStartTime={firstSpanStartTime}
|
||||
traceStartTime={traceStartTime}
|
||||
traceEndTime={traceEndTime}
|
||||
tree={[
|
||||
...(getSelectedNode.spanTree ? getSelectedNode.spanTree : []),
|
||||
...(getSelectedNode.missingSpanTree
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import getTriggeredApi from 'api/alerts/getTriggered';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import useAxiosError from 'hooks/useAxiosError';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -16,8 +13,6 @@ function TriggeredAlerts(): JSX.Element {
|
||||
(state) => state.app.user?.userId,
|
||||
);
|
||||
|
||||
const hasLoggedEvent = useRef(false); // Track if logEvent has been called
|
||||
|
||||
const handleError = useAxiosError();
|
||||
|
||||
const alertsResponse = useQuery(
|
||||
@@ -34,15 +29,6 @@ function TriggeredAlerts(): JSX.Element {
|
||||
},
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hasLoggedEvent.current && !isUndefined(alertsResponse.data?.payload)) {
|
||||
logEvent('Alert: Triggered alert list page visited', {
|
||||
number: alertsResponse.data?.payload?.length,
|
||||
});
|
||||
hasLoggedEvent.current = true;
|
||||
}
|
||||
}, [alertsResponse.data?.payload]);
|
||||
|
||||
if (alertsResponse.error) {
|
||||
return <TriggerComponent allAlerts={[]} />;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import AppRoutes from 'AppRoutes';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ThemeProvider } from 'hooks/useDarkMode';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import posthog from 'posthog-js';
|
||||
import { createRoot } from 'react-dom/client';
|
||||
import { HelmetProvider } from 'react-helmet-async';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
@@ -34,13 +33,6 @@ const queryClient = new QueryClient({
|
||||
|
||||
const container = document.getElementById('root');
|
||||
|
||||
if (process.env.POSTHOG_KEY) {
|
||||
posthog.init(process.env.POSTHOG_KEY, {
|
||||
api_host: 'https://us.i.posthog.com',
|
||||
person_profiles: 'identified_only', // or 'always' to create profiles for anonymous users as well
|
||||
});
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
tunnel: process.env.TUNNEL_URL,
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { Pagination } from 'hooks/queryPagination';
|
||||
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
|
||||
import { isEmpty, cloneDeep } from 'lodash-es';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -40,10 +40,6 @@ export async function GetMetricQueryRange(
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
if (props.formatForWeb) {
|
||||
return response;
|
||||
}
|
||||
|
||||
if (response.payload?.data?.result) {
|
||||
const v2Range = convertNewDataToOld(response.payload);
|
||||
|
||||
@@ -80,7 +76,6 @@ export interface GetQueryResultsProps {
|
||||
variables?: Record<string, unknown>;
|
||||
params?: Record<string, unknown>;
|
||||
fillGaps?: boolean;
|
||||
formatForWeb?: boolean;
|
||||
tableParams?: {
|
||||
pagination?: Pagination;
|
||||
selectColumns?: any;
|
||||
|
||||
@@ -16,7 +16,6 @@ export const prepareQueryRangePayload = ({
|
||||
query,
|
||||
globalSelectedInterval,
|
||||
graphType,
|
||||
formatForWeb,
|
||||
selectedTime,
|
||||
tableParams,
|
||||
variables = {},
|
||||
@@ -103,7 +102,6 @@ export const prepareQueryRangePayload = ({
|
||||
inputFormat: 'ns',
|
||||
}),
|
||||
variables,
|
||||
formatForWeb,
|
||||
compositeQuery,
|
||||
...restParams,
|
||||
};
|
||||
|
||||
@@ -583,11 +583,11 @@ export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({
|
||||
q.series?.sort((a, b) => {
|
||||
let labelA = '';
|
||||
let labelB = '';
|
||||
a.labelsArray?.forEach((lab) => {
|
||||
a.labelsArray.forEach((lab) => {
|
||||
labelA += Object.values(lab)[0];
|
||||
});
|
||||
|
||||
b.labelsArray?.forEach((lab) => {
|
||||
b.labelsArray.forEach((lab) => {
|
||||
labelB += Object.values(lab)[0];
|
||||
});
|
||||
|
||||
|
||||
@@ -64,10 +64,6 @@ export interface GetUPlotChartOptions {
|
||||
function getStackedSeries(apiResponse: QueryData[]): QueryData[] {
|
||||
const series = cloneDeep(apiResponse);
|
||||
|
||||
if (!series) {
|
||||
return series;
|
||||
}
|
||||
|
||||
for (let i = series.length - 2; i >= 0; i--) {
|
||||
const { values } = series[i];
|
||||
for (let j = 0; j < values.length; j++) {
|
||||
@@ -88,9 +84,6 @@ function getStackedSeries(apiResponse: QueryData[]): QueryData[] {
|
||||
*/
|
||||
function getStackedSeriesQueryFormat(apiResponse: QueryData[]): QueryData[] {
|
||||
const series = cloneDeep(apiResponse);
|
||||
if (!series) {
|
||||
return apiResponse;
|
||||
}
|
||||
|
||||
for (let i = series.length - 2; i >= 0; i--) {
|
||||
const { values } = series[i];
|
||||
@@ -109,12 +102,9 @@ function getStackedSeriesQueryFormat(apiResponse: QueryData[]): QueryData[] {
|
||||
|
||||
function getStackedSeriesYAxis(apiResponse: QueryDataV3[]): QueryDataV3[] {
|
||||
const series = cloneDeep(apiResponse);
|
||||
if (!series) {
|
||||
return apiResponse;
|
||||
}
|
||||
|
||||
for (let i = 0; i < series.length; i++) {
|
||||
series[i].series = getStackedSeriesQueryFormat(series[i].series || []);
|
||||
series[i].series = getStackedSeriesQueryFormat(series[i].series);
|
||||
}
|
||||
|
||||
return series;
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
|
||||
export const membersResponse = [
|
||||
{
|
||||
id: '3223a874-5678458745786',
|
||||
name: 'John Doe',
|
||||
email: 'firstUser@test.io',
|
||||
createdAt: 1666357530,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '5e9681b1-5678458745786',
|
||||
name: 'Jane Doe',
|
||||
email: 'johndoe2@test.io',
|
||||
createdAt: 1666365394,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '11e8c55d-5678458745786',
|
||||
name: 'Alex',
|
||||
email: 'blah@test.io',
|
||||
createdAt: 1666366317,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: 'd878012367813286731aab62',
|
||||
role: 'VIEWER',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '2ad2e404-5678458745786',
|
||||
name: 'Tom',
|
||||
email: 'johndoe4@test.io',
|
||||
createdAt: 1673441483,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '6f532456-5678458745786',
|
||||
name: 'Harry',
|
||||
email: 'harry@test.io',
|
||||
createdAt: 1691551672,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: 'ae22fa73-5678458745786',
|
||||
name: 'Ron',
|
||||
email: 'ron@test.io',
|
||||
createdAt: 1691668239,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '3223a874-5678458745786',
|
||||
name: 'John Doe',
|
||||
email: 'johndoe@test.io',
|
||||
createdAt: 1666357530,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '5e9681b1-5678458745786',
|
||||
name: 'Jane Doe',
|
||||
email: 'johndoe2@test.io',
|
||||
createdAt: 1666365394,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '11e8c55d-5678458745786',
|
||||
name: 'Alex',
|
||||
email: 'blah@test.io',
|
||||
createdAt: 1666366317,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: 'd878012367813286731aab62',
|
||||
role: 'VIEWER',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '2ad2e404-5678458745786',
|
||||
name: 'Tom',
|
||||
email: 'johndoe4@test.io',
|
||||
createdAt: 1673441483,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '6f532456-5678458745786',
|
||||
name: 'Harry',
|
||||
email: 'harry@test.io',
|
||||
createdAt: 1691551672,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: 'ae22fa73-5678458745786',
|
||||
name: 'Ron',
|
||||
email: 'ron@test.io',
|
||||
createdAt: 1691668239,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '3223a874-5678458745786',
|
||||
name: 'John Doe',
|
||||
email: 'johndoe@test.io',
|
||||
createdAt: 1666357530,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '5e9681b1-5678458745786',
|
||||
name: 'Jane Doe',
|
||||
email: 'johndoe2@test.io',
|
||||
createdAt: 1666365394,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '11e8c55d-5678458745786',
|
||||
name: 'Alex',
|
||||
email: 'blah@test.io',
|
||||
createdAt: 1666366317,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: 'd878012367813286731aab62',
|
||||
role: 'VIEWER',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '2ad2e404-5678458745786',
|
||||
name: 'Tom',
|
||||
email: 'johndoe4@test.io',
|
||||
createdAt: 1673441483,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: '6f532456-5678458745786',
|
||||
name: 'Harry',
|
||||
email: 'harry@test.io',
|
||||
createdAt: 1691551672,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
{
|
||||
id: 'ae22fa73-5678458745786',
|
||||
name: 'Ron',
|
||||
email: 'lastUser@test.io',
|
||||
createdAt: 1691668239,
|
||||
profilePictureURL: '',
|
||||
orgId: '1287612376312867312867',
|
||||
groupId: '5678458745786',
|
||||
role: 'ADMIN',
|
||||
organization: 'Test Inc',
|
||||
flags: null,
|
||||
},
|
||||
];
|
||||
@@ -2,7 +2,6 @@ import { rest } from 'msw';
|
||||
|
||||
import { billingSuccessResponse } from './__mockdata__/billing';
|
||||
import { licensesSuccessResponse } from './__mockdata__/licenses';
|
||||
import { membersResponse } from './__mockdata__/members';
|
||||
import { queryRangeSuccessResponse } from './__mockdata__/query_range';
|
||||
import { serviceSuccessResponse } from './__mockdata__/services';
|
||||
import { topLevelOperationSuccessResponse } from './__mockdata__/top_level_operations';
|
||||
@@ -26,9 +25,6 @@ export const handlers = [
|
||||
res(ctx.status(200), ctx.json(topLevelOperationSuccessResponse)),
|
||||
),
|
||||
|
||||
rest.get('http://localhost/api/v1/orgUsers/*', (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(membersResponse)),
|
||||
),
|
||||
rest.get(
|
||||
'http://localhost/api/v3/autocomplete/attribute_keys',
|
||||
(req, res, ctx) => {
|
||||
|
||||
@@ -46,8 +46,6 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
||||
},
|
||||
];
|
||||
|
||||
const paginationConfig = { pageSize: 20, hideOnSinglePage: true };
|
||||
|
||||
return (
|
||||
<div className="integration-data-collected">
|
||||
<div className="logs-section">
|
||||
@@ -61,7 +59,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
||||
index % 2 === 0 ? 'table-row-dark' : ''
|
||||
}
|
||||
dataSource={logsData}
|
||||
pagination={paginationConfig}
|
||||
pagination={{ pageSize: 20 }}
|
||||
className="logs-section-table"
|
||||
/>
|
||||
</div>
|
||||
@@ -76,7 +74,7 @@ function DataCollected(props: DataCollectedProps): JSX.Element {
|
||||
index % 2 === 0 ? 'table-row-dark' : ''
|
||||
}
|
||||
dataSource={metricsData}
|
||||
pagination={paginationConfig}
|
||||
pagination={{ pageSize: 20 }}
|
||||
className="metrics-section-table"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -277,8 +277,6 @@ function SaveView(): JSX.Element {
|
||||
},
|
||||
];
|
||||
|
||||
const paginationConfig = { pageSize: 5, hideOnSinglePage: true };
|
||||
|
||||
return (
|
||||
<div className="save-view-container">
|
||||
<div className="save-view-content">
|
||||
@@ -305,7 +303,7 @@ function SaveView(): JSX.Element {
|
||||
dataSource={dataSource}
|
||||
loading={isLoading || isRefetching}
|
||||
showHeader={false}
|
||||
pagination={paginationConfig}
|
||||
pagination={{ pageSize: 5 }}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
69
frontend/src/periscope/components/Table/Table.styles.scss
Normal file
69
frontend/src/periscope/components/Table/Table.styles.scss
Normal file
@@ -0,0 +1,69 @@
|
||||
table,
|
||||
.divTable {
|
||||
border: 1px solid lightgray;
|
||||
width: fit-content;
|
||||
}
|
||||
|
||||
.tr {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
tr,
|
||||
.tr {
|
||||
width: fit-content;
|
||||
height: 30px;
|
||||
}
|
||||
|
||||
th,
|
||||
.th,
|
||||
td,
|
||||
.td {
|
||||
box-shadow: inset 0 0 0 1px lightgray;
|
||||
padding: 0.25rem;
|
||||
}
|
||||
|
||||
th,
|
||||
.th {
|
||||
padding: 2px 4px;
|
||||
position: relative;
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
height: 30px;
|
||||
}
|
||||
|
||||
td,
|
||||
.td {
|
||||
height: 30px;
|
||||
}
|
||||
|
||||
.resizer {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
right: 0;
|
||||
width: 5px;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
cursor: col-resize;
|
||||
user-select: none;
|
||||
touch-action: none;
|
||||
}
|
||||
|
||||
.resizer.isResizing {
|
||||
background: blue;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
@media (hover: hover) {
|
||||
.resizer {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
*:hover > .resizer {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.container {
|
||||
border: 1px solid lightgray;
|
||||
margin: 1rem auto;
|
||||
}
|
||||
295
frontend/src/periscope/components/Table/Table.tsx
Normal file
295
frontend/src/periscope/components/Table/Table.tsx
Normal file
@@ -0,0 +1,295 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable react/no-unstable-nested-components */
|
||||
import './Table.styles.scss';
|
||||
|
||||
// needed for table body level scope DnD setup
|
||||
import {
|
||||
closestCenter,
|
||||
DndContext,
|
||||
DragEndEvent,
|
||||
KeyboardSensor,
|
||||
MouseSensor,
|
||||
TouchSensor,
|
||||
useSensor,
|
||||
useSensors,
|
||||
} from '@dnd-kit/core';
|
||||
import { restrictToHorizontalAxis } from '@dnd-kit/modifiers';
|
||||
import {
|
||||
arrayMove,
|
||||
horizontalListSortingStrategy,
|
||||
SortableContext,
|
||||
useSortable,
|
||||
} from '@dnd-kit/sortable';
|
||||
// needed for row & cell level scope DnD setup
|
||||
import { CSS } from '@dnd-kit/utilities';
|
||||
import {
|
||||
Cell,
|
||||
ColumnDef,
|
||||
flexRender,
|
||||
getCoreRowModel,
|
||||
Header,
|
||||
Table,
|
||||
useReactTable,
|
||||
} from '@tanstack/react-table';
|
||||
import React, { CSSProperties } from 'react';
|
||||
|
||||
import { makeData, Person } from './makeData';
|
||||
|
||||
function DraggableTableHeader({
|
||||
header,
|
||||
}: {
|
||||
header: Header<Person, unknown>;
|
||||
}): JSX.Element {
|
||||
const {
|
||||
attributes,
|
||||
isDragging,
|
||||
listeners,
|
||||
setNodeRef,
|
||||
transform,
|
||||
} = useSortable({
|
||||
id: header.column.id,
|
||||
});
|
||||
|
||||
const style: CSSProperties = {
|
||||
opacity: isDragging ? 0.8 : 1,
|
||||
position: 'relative',
|
||||
transform: CSS.Translate.toString(transform), // translate instead of transform to avoid squishing
|
||||
transition: 'width transform 0.2s ease-in-out',
|
||||
whiteSpace: 'nowrap',
|
||||
width: header.column.getSize(),
|
||||
zIndex: isDragging ? 1 : 0,
|
||||
};
|
||||
|
||||
return (
|
||||
<th colSpan={header.colSpan} ref={setNodeRef} style={style}>
|
||||
{header.isPlaceholder
|
||||
? null
|
||||
: flexRender(header.column.columnDef.header, header.getContext())}
|
||||
<button type="button" {...attributes} {...listeners}>
|
||||
🟰
|
||||
</button>
|
||||
|
||||
<div
|
||||
{...{
|
||||
onDoubleClick: (): void => header.column.resetSize(),
|
||||
onMouseDown: header.getResizeHandler(),
|
||||
onTouchStart: header.getResizeHandler(),
|
||||
className: `resizer ${header.column.getIsResizing() ? 'isResizing' : ''}`,
|
||||
}}
|
||||
/>
|
||||
</th>
|
||||
);
|
||||
}
|
||||
|
||||
function DragAlongCell({ cell }: { cell: Cell<Person, unknown> }): JSX.Element {
|
||||
const { isDragging, setNodeRef, transform } = useSortable({
|
||||
id: cell.column.id,
|
||||
});
|
||||
|
||||
const style: CSSProperties = {
|
||||
opacity: isDragging ? 0.8 : 1,
|
||||
position: 'relative',
|
||||
transform: CSS.Translate.toString(transform), // translate instead of transform to avoid squishing
|
||||
transition: 'width transform 0.2s ease-in-out',
|
||||
width: cell.column.getSize(),
|
||||
zIndex: isDragging ? 1 : 0,
|
||||
};
|
||||
|
||||
return (
|
||||
<td style={style} ref={setNodeRef}>
|
||||
{flexRender(cell.column.columnDef.cell, cell.getContext())}
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
// un-memoized normal table body component - see memoized version below
|
||||
function TableBody({ table }: { table: Table<Person> }): JSX.Element {
|
||||
const { columnOrder } = table.getState();
|
||||
|
||||
console.log('columnOrder', columnOrder);
|
||||
|
||||
return (
|
||||
<div
|
||||
{...{
|
||||
className: 'tbody',
|
||||
}}
|
||||
>
|
||||
{table.getRowModel().rows.map((row) => (
|
||||
<div key={row.id} className="tr">
|
||||
{row.getVisibleCells().map((cell) => (
|
||||
<SortableContext
|
||||
key={cell.id}
|
||||
items={columnOrder}
|
||||
strategy={horizontalListSortingStrategy}
|
||||
>
|
||||
<DragAlongCell key={cell.id} cell={cell} />
|
||||
</SortableContext>
|
||||
))}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// special memoized wrapper for our table body that we will use during column resizing
|
||||
export const MemoizedTableBody = React.memo(
|
||||
TableBody,
|
||||
(prev, next) => prev.table.options.data === next.table.options.data,
|
||||
) as typeof TableBody;
|
||||
|
||||
function PeriscopeTable(): JSX.Element {
|
||||
const columns = React.useMemo<ColumnDef<Person>[]>(
|
||||
() => [
|
||||
{
|
||||
accessorKey: 'firstName',
|
||||
cell: (info): any => info.getValue(),
|
||||
id: 'firstName',
|
||||
size: 150,
|
||||
},
|
||||
{
|
||||
accessorFn: (row): any => row.lastName,
|
||||
cell: (info): any => info.getValue(),
|
||||
header: (): JSX.Element => <span>Last Name</span>,
|
||||
id: 'lastName',
|
||||
size: 150,
|
||||
},
|
||||
{
|
||||
accessorKey: 'age',
|
||||
header: (): any => 'Age',
|
||||
id: 'age',
|
||||
size: 120,
|
||||
},
|
||||
{
|
||||
accessorKey: 'visits',
|
||||
header: (): JSX.Element => <span>Visits</span>,
|
||||
id: 'visits',
|
||||
size: 120,
|
||||
},
|
||||
{
|
||||
accessorKey: 'status',
|
||||
header: 'Status',
|
||||
id: 'status',
|
||||
size: 150,
|
||||
},
|
||||
{
|
||||
accessorKey: 'progress',
|
||||
header: 'Profile Progress',
|
||||
id: 'progress',
|
||||
size: 180,
|
||||
},
|
||||
],
|
||||
[],
|
||||
);
|
||||
const [data, setData] = React.useState(() => makeData(20));
|
||||
const [columnOrder, setColumnOrder] = React.useState<string[]>(() =>
|
||||
columns.map((c) => c.id!),
|
||||
);
|
||||
const [columnVisibility, setColumnVisibility] = React.useState({});
|
||||
|
||||
const table = useReactTable({
|
||||
data,
|
||||
columns,
|
||||
getCoreRowModel: getCoreRowModel(),
|
||||
state: {
|
||||
columnOrder,
|
||||
columnVisibility,
|
||||
},
|
||||
defaultColumn: {
|
||||
minSize: 60,
|
||||
maxSize: 800,
|
||||
},
|
||||
columnResizeMode: 'onChange',
|
||||
onColumnOrderChange: setColumnOrder,
|
||||
onColumnVisibilityChange: setColumnVisibility,
|
||||
debugTable: true,
|
||||
debugHeaders: true,
|
||||
debugColumns: true,
|
||||
});
|
||||
|
||||
/**
|
||||
* Instead of calling `column.getSize()` on every render for every header
|
||||
* and especially every data cell (very expensive),
|
||||
* we will calculate all column sizes at once at the root table level in a useMemo
|
||||
* and pass the column sizes down as CSS variables to the <table> element.
|
||||
*/
|
||||
const columnSizeVars = React.useMemo(() => {
|
||||
const headers = table.getFlatHeaders();
|
||||
const colSizes: { [key: string]: number } = {};
|
||||
for (let i = 0; i < headers.length; i++) {
|
||||
const header = headers[i]!;
|
||||
colSizes[`--header-${header.id}-size`] = header.getSize();
|
||||
colSizes[`--col-${header.column.id}-size`] = header.column.getSize();
|
||||
}
|
||||
return colSizes;
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [table.getState().columnSizingInfo, table.getState().columnSizing]);
|
||||
|
||||
// reorder columns after drag & drop
|
||||
function handleDragEnd(event: DragEndEvent): void {
|
||||
const { active, over } = event;
|
||||
|
||||
console.log('active', active, over);
|
||||
if (active && over && active.id !== over.id) {
|
||||
setColumnOrder((columnOrder) => {
|
||||
const oldIndex = columnOrder.indexOf(active.id as string);
|
||||
const newIndex = columnOrder.indexOf(over.id as string);
|
||||
return arrayMove(columnOrder, oldIndex, newIndex); // this is just a splice util
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const sensors = useSensors(
|
||||
useSensor(MouseSensor, {}),
|
||||
useSensor(TouchSensor, {}),
|
||||
useSensor(KeyboardSensor, {}),
|
||||
);
|
||||
|
||||
return (
|
||||
<DndContext
|
||||
collisionDetection={closestCenter}
|
||||
modifiers={[restrictToHorizontalAxis]}
|
||||
// eslint-disable-next-line react/jsx-no-bind
|
||||
onDragEnd={handleDragEnd}
|
||||
sensors={sensors}
|
||||
>
|
||||
<div className="p-2">
|
||||
<div className="overflow-x-auto">
|
||||
<div
|
||||
className="divTable"
|
||||
style={{
|
||||
...columnSizeVars, // Define column sizes on the <table> element
|
||||
width: table.getTotalSize(),
|
||||
}}
|
||||
>
|
||||
<div className="thead">
|
||||
{table.getHeaderGroups().map((headerGroup) => (
|
||||
<div key={headerGroup.id} className="tr">
|
||||
<SortableContext
|
||||
items={columnOrder}
|
||||
strategy={horizontalListSortingStrategy}
|
||||
>
|
||||
{headerGroup.headers.map((header) => (
|
||||
<div
|
||||
key={header.id}
|
||||
className="th"
|
||||
style={{
|
||||
width: `calc(var(--header-${header?.id}-size) * 1px)`,
|
||||
}}
|
||||
>
|
||||
<DraggableTableHeader key={header.id} header={header} />
|
||||
</div>
|
||||
))}
|
||||
</SortableContext>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<TableBody table={table} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</DndContext>
|
||||
);
|
||||
}
|
||||
|
||||
export default PeriscopeTable;
|
||||
46
frontend/src/periscope/components/Table/makeData.ts
Normal file
46
frontend/src/periscope/components/Table/makeData.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { faker } from '@faker-js/faker';
|
||||
|
||||
export type Person = {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
age: number;
|
||||
visits: number;
|
||||
progress: number;
|
||||
status: 'relationship' | 'complicated' | 'single';
|
||||
subRows?: Person[];
|
||||
};
|
||||
|
||||
const range = (len: number) => {
|
||||
const arr: number[] = [];
|
||||
for (let i = 0; i < len; i++) {
|
||||
arr.push(i);
|
||||
}
|
||||
return arr;
|
||||
};
|
||||
|
||||
const newPerson = (): Person => ({
|
||||
firstName: faker.person.firstName(),
|
||||
lastName: faker.person.lastName(),
|
||||
age: faker.number.int(40),
|
||||
visits: faker.number.int(1000),
|
||||
progress: faker.number.int(100),
|
||||
status: faker.helpers.shuffle<Person['status']>([
|
||||
'relationship',
|
||||
'complicated',
|
||||
'single',
|
||||
])[0]!,
|
||||
});
|
||||
|
||||
export function makeData(...lens: number[]): any {
|
||||
const makeDataLevel = (depth = 0): Person[] => {
|
||||
const len = lens[depth]!;
|
||||
return range(len).map(
|
||||
(d): Person => ({
|
||||
...newPerson(),
|
||||
subRows: lens[depth + 1] ? makeDataLevel(depth + 1) : undefined,
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
return makeDataLevel();
|
||||
}
|
||||
@@ -42,15 +42,6 @@ const mockStored = (role?: string): any =>
|
||||
accessJwt: '',
|
||||
refreshJwt: '',
|
||||
},
|
||||
org: [
|
||||
{
|
||||
createdAt: 0,
|
||||
hasOptedUpdates: false,
|
||||
id: 'xyz',
|
||||
isAnonymous: false,
|
||||
name: 'Test Inc. - India',
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@ export type QueryRangePayload = {
|
||||
start: number;
|
||||
step: number;
|
||||
variables?: Record<string, unknown>;
|
||||
formatForWeb?: boolean;
|
||||
[param: string]: unknown;
|
||||
};
|
||||
export interface MetricRangePayloadProps {
|
||||
|
||||
@@ -15,8 +15,6 @@ export interface PayloadProps {
|
||||
segmentID: string;
|
||||
columns: string[];
|
||||
isSubTree: boolean;
|
||||
startTimestampMillis: number;
|
||||
endTimestampMillis: number;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,3 @@ export const getGraphType = (panelType: PANEL_TYPES): PANEL_TYPES => {
|
||||
}
|
||||
return panelType;
|
||||
};
|
||||
|
||||
export const getGraphTypeForFormat = (panelType: PANEL_TYPES): PANEL_TYPES =>
|
||||
panelType;
|
||||
|
||||
@@ -22,7 +22,6 @@ const plugins = [
|
||||
template: 'src/index.html.ejs',
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
@@ -40,7 +39,6 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
|
||||
@@ -27,7 +27,6 @@ const plugins = [
|
||||
template: 'src/index.html.ejs',
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
@@ -50,7 +49,6 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
INTERCOM_APP_ID: process.env.INTERCOM_APP_ID,
|
||||
SEGMENT_ID: process.env.SEGMENT_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
CLARITY_PROJECT_ID: process.env.CLARITY_PROJECT_ID,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
|
||||
@@ -2541,7 +2541,7 @@
|
||||
"@dnd-kit/utilities" "^3.2.2"
|
||||
tslib "^2.0.0"
|
||||
|
||||
"@dnd-kit/utilities@^3.2.2":
|
||||
"@dnd-kit/utilities@3.2.2", "@dnd-kit/utilities@^3.2.2":
|
||||
version "3.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@dnd-kit/utilities/-/utilities-3.2.2.tgz#5a32b6af356dc5f74d61b37d6f7129a4040ced7b"
|
||||
integrity sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==
|
||||
@@ -2597,6 +2597,11 @@
|
||||
minimatch "^3.0.4"
|
||||
strip-json-comments "^3.1.1"
|
||||
|
||||
"@faker-js/faker@8.4.1":
|
||||
version "8.4.1"
|
||||
resolved "https://registry.yarnpkg.com/@faker-js/faker/-/faker-8.4.1.tgz#5d5e8aee8fce48f5e189bf730ebd1f758f491451"
|
||||
integrity sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg==
|
||||
|
||||
"@floating-ui/core@^1.4.2":
|
||||
version "1.5.2"
|
||||
resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.5.2.tgz#53a0f7a98c550e63134d504f26804f6b83dbc071"
|
||||
@@ -3800,6 +3805,18 @@
|
||||
dependencies:
|
||||
"@sinonjs/commons" "^1.7.0"
|
||||
|
||||
"@tanstack/react-table@8.17.3":
|
||||
version "8.17.3"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/react-table/-/react-table-8.17.3.tgz#4e10b4cf5355a40d6d72a83d3f4b3ecd32f56bf4"
|
||||
integrity sha512-5gwg5SvPD3lNAXPuJJz1fOCEZYk9/GeBFH3w/hCgnfyszOIzwkwgp5I7Q4MJtn0WECp84b5STQUDdmvGi8m3nA==
|
||||
dependencies:
|
||||
"@tanstack/table-core" "8.17.3"
|
||||
|
||||
"@tanstack/table-core@8.17.3":
|
||||
version "8.17.3"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/table-core/-/table-core-8.17.3.tgz#d7a9830abb29cd369b52b2a7159dc0360af646fd"
|
||||
integrity sha512-mPBodDGVL+fl6d90wUREepHa/7lhsghg2A3vFpakEhrhtbIlgNAZiMr7ccTgak5qbHqF14Fwy+W1yFWQt+WmYQ==
|
||||
|
||||
"@testing-library/dom@^8.5.0":
|
||||
version "8.20.0"
|
||||
resolved "https://registry.npmjs.org/@testing-library/dom/-/dom-8.20.0.tgz"
|
||||
@@ -6195,11 +6212,11 @@ brace-expansion@^2.0.1:
|
||||
balanced-match "^1.0.0"
|
||||
|
||||
braces@^3.0.2, braces@~3.0.2:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789"
|
||||
integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==
|
||||
version "3.0.2"
|
||||
resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz"
|
||||
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
|
||||
dependencies:
|
||||
fill-range "^7.1.1"
|
||||
fill-range "^7.0.1"
|
||||
|
||||
broadcast-channel@^3.4.1:
|
||||
version "3.7.0"
|
||||
@@ -8776,11 +8793,6 @@ fb-watchman@^2.0.0:
|
||||
dependencies:
|
||||
bser "2.1.1"
|
||||
|
||||
fflate@^0.4.8:
|
||||
version "0.4.8"
|
||||
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
||||
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
||||
|
||||
figures@^3.0.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af"
|
||||
@@ -8808,10 +8820,10 @@ file-saver@^2.0.2:
|
||||
resolved "https://registry.yarnpkg.com/file-saver/-/file-saver-2.0.5.tgz#d61cfe2ce059f414d899e9dd6d4107ee25670c38"
|
||||
integrity sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==
|
||||
|
||||
fill-range@^7.1.1:
|
||||
version "7.1.1"
|
||||
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292"
|
||||
integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==
|
||||
fill-range@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz"
|
||||
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
|
||||
dependencies:
|
||||
to-regex-range "^5.0.1"
|
||||
|
||||
@@ -13705,20 +13717,6 @@ postcss@8.4.38, postcss@^8.0.0, postcss@^8.1.1, postcss@^8.3.7, postcss@^8.4.21,
|
||||
picocolors "^1.0.0"
|
||||
source-map-js "^1.2.0"
|
||||
|
||||
posthog-js@1.142.1:
|
||||
version "1.142.1"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.142.1.tgz#3b91229732938c5c76b5ee6d410698a267e073e9"
|
||||
integrity sha512-yqeWTWitlb0sCaH5v6s7UJ+pPspzf/lkzPaSE5pMMXRM2i2KNsMoZEAZqbPCW8fQ8QL6lHs6d8PLjHrvbR288w==
|
||||
dependencies:
|
||||
fflate "^0.4.8"
|
||||
preact "^10.19.3"
|
||||
web-vitals "^4.0.1"
|
||||
|
||||
preact@^10.19.3:
|
||||
version "10.22.0"
|
||||
resolved "https://registry.yarnpkg.com/preact/-/preact-10.22.0.tgz#a50f38006ae438d255e2631cbdaf7488e6dd4e16"
|
||||
integrity sha512-RRurnSjJPj4rp5K6XoP45Ui33ncb7e4H7WiOHVpjbkvqvA3U+N8Z6Qbo0AE6leGYBV66n8EhEaFixvIu3SkxFw==
|
||||
|
||||
prelude-ls@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz"
|
||||
@@ -17219,11 +17217,6 @@ web-vitals@^0.2.4:
|
||||
resolved "https://registry.npmjs.org/web-vitals/-/web-vitals-0.2.4.tgz"
|
||||
integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg==
|
||||
|
||||
web-vitals@^4.0.1:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-4.2.0.tgz#008949ab79717a68ccaaa3c4371cbc7bbbd78a92"
|
||||
integrity sha512-ohj72kbtVWCpKYMxcbJ+xaOBV3En76hW47j52dG+tEGG36LZQgfFw5yHl9xyjmosy3XUMn8d/GBUAy4YPM839w==
|
||||
|
||||
web-worker@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz"
|
||||
@@ -17638,14 +17631,14 @@ write-file-atomic@^4.0.2:
|
||||
signal-exit "^3.0.7"
|
||||
|
||||
ws@^7.3.1, ws@^7.4.6:
|
||||
version "7.5.10"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
|
||||
integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
|
||||
version "7.5.9"
|
||||
resolved "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz"
|
||||
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
|
||||
|
||||
ws@^8.13.0:
|
||||
version "8.17.1"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b"
|
||||
integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==
|
||||
version "8.13.0"
|
||||
resolved "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz"
|
||||
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==
|
||||
|
||||
xhr-request@^1.0.1:
|
||||
version "1.1.0"
|
||||
|
||||
3
go.mod
3
go.mod
@@ -6,7 +6,7 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.20.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.2
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.0
|
||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
|
||||
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
@@ -37,6 +37,7 @@ require (
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f
|
||||
github.com/prometheus/common v0.54.0
|
||||
github.com/prometheus/prometheus v2.5.0+incompatible
|
||||
github.com/rs/cors v1.11.0
|
||||
|
||||
14
go.sum
14
go.sum
@@ -64,8 +64,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/prometheus v1.11.1 h1:roM8ugYf4UxaeKKujEeBvoX7ybq3IrS+TB26KiRtIJg=
|
||||
github.com/SigNoz/prometheus v1.11.1/go.mod h1:uv4mQwZQtx7y4GQ6EdHOi8Wsk07uHNn2XHd1zM85m6I=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.2 h1:SmjsBZjMjTVVpuOlfJXlsDJQbdefQP/9Wz3CyzSuZuU=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.2/go.mod h1:ISAXYhZenojCWg6CdDJtPMpfS6Zwc08+uoxH25tc6Y0=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.0 h1:v6ap+gdvrKklMwU+M9FJgrn28vN0YxrINl3kvdcLonA=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.0/go.mod h1:kCx5BfzDujq6C0+kotiqLp5COG2ut4Cb039+55rbWE0=
|
||||
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
|
||||
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
|
||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
|
||||
@@ -137,6 +137,7 @@ github.com/coreos/go-oidc/v3 v3.10.0 h1:tDnXHnLyiTVyT/2zLDGj09pFPkhND8Gl8lnTRhoE
|
||||
github.com/coreos/go-oidc/v3 v3.10.0/go.mod h1:5j11xcw0D3+SGxn6Z/WFADsgcWVMyNAlSQupk0KK3ac=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -378,8 +379,8 @@ github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+l
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
|
||||
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.4 h1:ZQgVdpTdAL7WpMIwLzCfbalOcSUdkDZnpUv3/+BxzFA=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8=
|
||||
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||
@@ -624,6 +625,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
|
||||
github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f h1:h0p1aZ9F5d6IXOygysob3g4B07b+HuVUQC0VJKD8wA4=
|
||||
github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f/go.mod h1:oa2sAs9tGai3VldabTV0eWejt/O4/OOD7azP8GaikqU=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c h1:NRoLoZvkBTKvR5gQLgA3e0hqjkY9u1wm+iOL45VN/qI=
|
||||
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
@@ -670,6 +673,7 @@ github.com/russellhaering/gosaml2 v0.9.0 h1:CNMnH42z/GirrKjdmNrSS6bAAs47F9bPdl4P
|
||||
github.com/russellhaering/gosaml2 v0.9.0/go.mod h1:byViER/1YPUa0Puj9ROZblpoq2jsE7h/CJmitzX0geU=
|
||||
github.com/russellhaering/goxmldsig v1.2.0 h1:Y6GTTc9Un5hCxSzVz4UIWQ/zuVwDvzJk80guqzwx6Vg=
|
||||
github.com/russellhaering/goxmldsig v1.2.0/go.mod h1:gM4MDENBQf7M+V824SGfyIUVFWydB7n0KkEubVJl+Tw=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
@@ -693,6 +697,7 @@ github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
|
||||
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
|
||||
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
|
||||
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo=
|
||||
@@ -742,6 +747,7 @@ github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFA
|
||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc=
|
||||
github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4=
|
||||
github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ=
|
||||
|
||||
@@ -53,6 +53,7 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/interfaces"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/rules"
|
||||
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||
)
|
||||
@@ -1923,7 +1924,6 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_TRACE_DETAIL_API, data, userEmail, true, false)
|
||||
}
|
||||
|
||||
var startTime, endTime, durationNano uint64
|
||||
var searchScanResponses []model.SearchSpanDBResponseItem
|
||||
|
||||
query := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
|
||||
@@ -1941,7 +1941,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
end := time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
|
||||
searchSpansResult := []model.SearchSpansResult{{
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
|
||||
Events: make([][]interface{}, len(searchScanResponses)),
|
||||
IsSubTree: false,
|
||||
},
|
||||
@@ -1954,15 +1954,6 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
easyjson.Unmarshal([]byte(item.Model), &jsonItem)
|
||||
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano() / 1000000)
|
||||
searchSpanResponses = append(searchSpanResponses, jsonItem)
|
||||
if startTime == 0 || jsonItem.TimeUnixNano < startTime {
|
||||
startTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if endTime == 0 || jsonItem.TimeUnixNano > endTime {
|
||||
endTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if durationNano == 0 || uint64(jsonItem.DurationNano) > durationNano {
|
||||
durationNano = uint64(jsonItem.DurationNano)
|
||||
}
|
||||
}
|
||||
end = time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery unmarshal took: ", zap.Duration("duration", end.Sub(start)))
|
||||
@@ -1992,9 +1983,6 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
}
|
||||
}
|
||||
|
||||
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano / 1000000)
|
||||
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano / 1000000)
|
||||
|
||||
return &searchSpansResult, nil
|
||||
}
|
||||
|
||||
@@ -3231,7 +3219,7 @@ func (r *ClickHouseReader) GetSamplesInfoInLastHeartBeatInterval(ctx context.Con
|
||||
|
||||
var totalSamples uint64
|
||||
|
||||
queryStr := fmt.Sprintf("select count() from %s.%s where metric_name not like 'signoz_%%' and unix_milli > toUnixTimestamp(now()-toIntervalMinute(%d))*1000;", signozMetricDBName, signozSampleTableName, int(interval.Minutes()))
|
||||
queryStr := fmt.Sprintf("select count() from %s.%s where metric_name not like 'signoz_%%' and timestamp_ms > toUnixTimestamp(now()-toIntervalMinute(%d))*1000;", signozMetricDBName, signozSampleTableName, int(interval.Minutes()))
|
||||
|
||||
r.db.QueryRow(ctx, queryStr).Scan(&totalSamples)
|
||||
|
||||
@@ -3419,6 +3407,36 @@ func countPanelsInDashboard(data map[string]interface{}) model.DashboardsInfo {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||
alertsInfo := model.AlertsInfo{}
|
||||
// fetch alerts from rules db
|
||||
query := "SELECT data FROM rules"
|
||||
var alertsData []string
|
||||
err := r.localDB.Select(&alertsData, query)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return &alertsInfo, err
|
||||
}
|
||||
for _, alert := range alertsData {
|
||||
var rule rules.GettableRule
|
||||
err = json.Unmarshal([]byte(alert), &rule)
|
||||
if err != nil {
|
||||
zap.L().Error("invalid rule data", zap.Error(err))
|
||||
continue
|
||||
}
|
||||
if rule.AlertType == "LOGS_BASED_ALERT" {
|
||||
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
|
||||
} else if rule.AlertType == "METRIC_BASED_ALERT" {
|
||||
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
|
||||
} else if rule.AlertType == "TRACES_BASED_ALERT" {
|
||||
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
|
||||
}
|
||||
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
|
||||
}
|
||||
|
||||
return &alertsInfo, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error) {
|
||||
savedViewsInfo := model.SavedViewsInfo{}
|
||||
savedViews, err := explorer.GetViews()
|
||||
@@ -4403,8 +4421,8 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
case *time.Time:
|
||||
point.Timestamp = v.UnixMilli()
|
||||
case *float64, *float32:
|
||||
isValidPoint = true
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(reflect.ValueOf(v).Elem().Float())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float()))
|
||||
@@ -4413,24 +4431,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Float())
|
||||
}
|
||||
case **float64, **float32:
|
||||
val := reflect.ValueOf(v)
|
||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||
value := reflect.ValueOf(v).Elem().Elem().Float()
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = value
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", value)
|
||||
}
|
||||
}
|
||||
case *uint, *uint8, *uint64, *uint16, *uint32:
|
||||
isValidPoint = true
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(reflect.ValueOf(v).Elem().Uint())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
|
||||
@@ -4439,24 +4442,9 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
|
||||
}
|
||||
case **uint, **uint8, **uint64, **uint16, **uint32:
|
||||
val := reflect.ValueOf(v)
|
||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||
value := reflect.ValueOf(v).Elem().Elem().Uint()
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(value)
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", value)
|
||||
}
|
||||
}
|
||||
case *int, *int8, *int16, *int32, *int64:
|
||||
isValidPoint = true
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(reflect.ValueOf(v).Elem().Int())
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
|
||||
@@ -4465,21 +4453,6 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
|
||||
}
|
||||
case **int, **int8, **int16, **int32, **int64:
|
||||
val := reflect.ValueOf(v)
|
||||
if val.IsValid() && !val.IsNil() && !val.Elem().IsNil() {
|
||||
value := reflect.ValueOf(v).Elem().Elem().Int()
|
||||
if _, ok := constants.ReservedColumnTargetAliases[colName]; ok || countOfNumberCols == 1 {
|
||||
isValidPoint = true
|
||||
point.Value = float64(value)
|
||||
} else {
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", value))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
groupAttributesArray = append(groupAttributesArray, map[string]string{colName: fmt.Sprintf("%v", value)})
|
||||
}
|
||||
groupAttributes[colName] = fmt.Sprintf("%v", value)
|
||||
}
|
||||
}
|
||||
case *bool:
|
||||
groupBy = append(groupBy, fmt.Sprintf("%v", *v))
|
||||
if _, ok := groupAttributes[colName]; !ok {
|
||||
|
||||
@@ -76,6 +76,7 @@ type APIHandler struct {
|
||||
querier interfaces.Querier
|
||||
querierV2 interfaces.Querier
|
||||
queryBuilder *queryBuilder.QueryBuilder
|
||||
preferDelta bool
|
||||
preferSpanMetrics bool
|
||||
|
||||
// temporalityMap is a map of metric name to temporality
|
||||
@@ -105,6 +106,7 @@ type APIHandlerOpts struct {
|
||||
|
||||
SkipConfig *model.SkipConfig
|
||||
|
||||
PerferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
|
||||
MaxIdleConns int
|
||||
@@ -164,6 +166,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
reader: opts.Reader,
|
||||
appDao: opts.AppDao,
|
||||
skipConfig: opts.SkipConfig,
|
||||
preferDelta: opts.PerferDelta,
|
||||
preferSpanMetrics: opts.PreferSpanMetrics,
|
||||
temporalityMap: make(map[string]map[v3.Temporality]bool),
|
||||
maxIdleConns: opts.MaxIdleConns,
|
||||
@@ -3013,7 +3016,6 @@ func (aH *APIHandler) QueryRangeV3Format(w http.ResponseWriter, r *http.Request)
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
queryRangeParams.Version = "v3"
|
||||
|
||||
aH.Respond(w, queryRangeParams)
|
||||
}
|
||||
@@ -3068,14 +3070,6 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
postprocess.FillGaps(result, queryRangeParams)
|
||||
}
|
||||
|
||||
if queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable && queryRangeParams.FormatForWeb {
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
} else if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
result = postprocess.TransformToTableForBuilderQueries(result, queryRangeParams)
|
||||
}
|
||||
}
|
||||
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: result,
|
||||
}
|
||||
@@ -3324,10 +3318,8 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
|
||||
result, err = postprocess.PostProcessResult(result, queryRangeParams)
|
||||
} else if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL &&
|
||||
queryRangeParams.CompositeQuery.PanelType == v3.PanelTypeTable && queryRangeParams.FormatForWeb {
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@@ -3351,7 +3343,6 @@ func (aH *APIHandler) QueryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
RespondError(w, apiErrorObj, nil)
|
||||
return
|
||||
}
|
||||
queryRangeParams.Version = "v4"
|
||||
|
||||
// add temporality for each metric
|
||||
temporalityErr := aH.populateTemporality(r.Context(), queryRangeParams)
|
||||
|
||||
@@ -142,11 +142,6 @@ func checkDuplicateString(pipeline []string) bool {
|
||||
for _, processor := range pipeline {
|
||||
name := processor
|
||||
if _, ok := exists[name]; ok {
|
||||
zap.L().Error(
|
||||
"duplicate processor name detected in generated collector config for log pipelines",
|
||||
zap.String("processor", processor),
|
||||
zap.Any("pipeline", pipeline),
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -5,10 +5,7 @@ import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var buildProcessorTestData = []struct {
|
||||
@@ -207,89 +204,3 @@ func TestBuildLogsPipeline(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPipelineAliasCollisionsDontResultInDuplicateCollectorProcessors(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
baseConf := []byte(`
|
||||
receivers:
|
||||
memory:
|
||||
id: in-memory-receiver
|
||||
exporters:
|
||||
memory:
|
||||
id: in-memory-exporter
|
||||
service:
|
||||
pipelines:
|
||||
logs:
|
||||
receivers:
|
||||
- memory
|
||||
processors: []
|
||||
exporters:
|
||||
- memory
|
||||
`)
|
||||
|
||||
makeTestPipeline := func(name string, alias string) Pipeline {
|
||||
return Pipeline{
|
||||
OrderId: 1,
|
||||
Name: name,
|
||||
Alias: alias,
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "method",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "GET",
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "regex",
|
||||
Type: "regex_parser",
|
||||
Enabled: true,
|
||||
Name: "regex parser",
|
||||
ParseFrom: "attributes.test_regex_target",
|
||||
ParseTo: "attributes",
|
||||
Regex: `^\s*(?P<json_data>{.*})\s*$`,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
testPipelines := []Pipeline{
|
||||
makeTestPipeline("test pipeline 1", "pipeline-alias"),
|
||||
makeTestPipeline("test pipeline 2", "pipeline-alias"),
|
||||
}
|
||||
|
||||
recommendedConfYaml, apiErr := GenerateCollectorConfigWithPipelines(
|
||||
baseConf, testPipelines,
|
||||
)
|
||||
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation: %v", apiErr))
|
||||
|
||||
var recommendedConf map[string]interface{}
|
||||
err := yaml.Unmarshal(recommendedConfYaml, &recommendedConf)
|
||||
require.Nil(err, "couldn't unmarshal recommended config")
|
||||
|
||||
logsProcessors := recommendedConf["service"].(map[string]any)["pipelines"].(map[string]any)["logs"].(map[string]any)["processors"].([]any)
|
||||
|
||||
require.Equal(
|
||||
len(logsProcessors), len(testPipelines),
|
||||
"test pipelines not included in recommended config as expected",
|
||||
)
|
||||
|
||||
recommendedConfYaml2, apiErr := GenerateCollectorConfigWithPipelines(
|
||||
baseConf, testPipelines,
|
||||
)
|
||||
require.Nil(apiErr, fmt.Sprintf("couldn't generate config recommendation again: %v", apiErr))
|
||||
require.Equal(
|
||||
string(recommendedConfYaml), string(recommendedConfYaml2),
|
||||
"collector config should not change across recommendations for same set of pipelines",
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ func CollectorConfProcessorName(p Pipeline) string {
|
||||
func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []string, error) {
|
||||
processors := map[string]interface{}{}
|
||||
names := []string{}
|
||||
for pipelineIdx, v := range pipelines {
|
||||
for _, v := range pipelines {
|
||||
if !v.Enabled {
|
||||
continue
|
||||
}
|
||||
@@ -70,12 +70,6 @@ func PreparePipelineProcessor(pipelines []Pipeline) (map[string]interface{}, []s
|
||||
Operators: v.Config,
|
||||
}
|
||||
name := CollectorConfProcessorName(v)
|
||||
|
||||
// Ensure name is unique
|
||||
if _, nameExists := processors[name]; nameExists {
|
||||
name = fmt.Sprintf("%s-%d", name, pipelineIdx)
|
||||
}
|
||||
|
||||
processors[name] = processor
|
||||
names = append(names, name)
|
||||
}
|
||||
|
||||
@@ -803,3 +803,76 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) {
|
||||
_, test2Exists := result[0].Attributes_string["test2"]
|
||||
require.False(test2Exists)
|
||||
}
|
||||
|
||||
func TestTemporaryWorkaroundForSupportingAttribsContainingDots(t *testing.T) {
|
||||
// TODO(Raj): Remove this after dots are supported
|
||||
|
||||
require := require.New(t)
|
||||
|
||||
testPipeline := Pipeline{
|
||||
OrderId: 1,
|
||||
Name: "pipeline1",
|
||||
Alias: "pipeline1",
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "k8s_deployment_name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "ingress",
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "add",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add",
|
||||
Field: "attributes.test",
|
||||
Value: "test-value",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
testLogs := []model.SignozLog{{
|
||||
Timestamp: uint64(time.Now().UnixNano()),
|
||||
Body: "test log",
|
||||
Attributes_string: map[string]string{},
|
||||
Resources_string: map[string]string{
|
||||
"k8s_deployment_name": "ingress",
|
||||
},
|
||||
SeverityText: entry.Info.String(),
|
||||
SeverityNumber: uint8(entry.Info),
|
||||
SpanID: "",
|
||||
TraceID: "",
|
||||
}, {
|
||||
Timestamp: uint64(time.Now().UnixNano()),
|
||||
Body: "test log",
|
||||
Attributes_string: map[string]string{},
|
||||
Resources_string: map[string]string{
|
||||
"k8s.deployment.name": "ingress",
|
||||
},
|
||||
SeverityText: entry.Info.String(),
|
||||
SeverityNumber: uint8(entry.Info),
|
||||
SpanID: "",
|
||||
TraceID: "",
|
||||
}}
|
||||
|
||||
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
|
||||
context.Background(),
|
||||
[]Pipeline{testPipeline},
|
||||
testLogs,
|
||||
)
|
||||
require.Nil(err)
|
||||
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
|
||||
require.Equal(2, len(result))
|
||||
for _, processedLog := range result {
|
||||
require.Equal(processedLog.Attributes_string["test"], "test-value")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,9 +58,7 @@ func ParseLogFilterParams(r *http.Request) (*model.LogsFilterParams, error) {
|
||||
res.OrderBy = val[0]
|
||||
}
|
||||
if val, ok := params[ORDER]; ok {
|
||||
if val[0] == ASC || val[0] == DESC {
|
||||
res.Order = val[0]
|
||||
}
|
||||
res.Order = val[0]
|
||||
}
|
||||
if val, ok := params["q"]; ok {
|
||||
res.Query = val[0]
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
package logs
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
@@ -434,51 +432,3 @@ func TestGenerateSQLQuery(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var parseLogFilterParams = []struct {
|
||||
Name string
|
||||
ReqParams string
|
||||
ExpectedLogFilterParams *model.LogsFilterParams
|
||||
}{
|
||||
{
|
||||
Name: "test with proper order by",
|
||||
ReqParams: "order=desc&q=service.name='myservice'&limit=10",
|
||||
ExpectedLogFilterParams: &model.LogsFilterParams{
|
||||
Limit: 10,
|
||||
OrderBy: "timestamp",
|
||||
Order: DESC,
|
||||
Query: "service.name='myservice'",
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "test with proper order by asc",
|
||||
ReqParams: "order=asc&q=service.name='myservice'&limit=10",
|
||||
ExpectedLogFilterParams: &model.LogsFilterParams{
|
||||
Limit: 10,
|
||||
OrderBy: "timestamp",
|
||||
Order: ASC,
|
||||
Query: "service.name='myservice'",
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "test with incorrect order by",
|
||||
ReqParams: "order=undefined&q=service.name='myservice'&limit=10",
|
||||
ExpectedLogFilterParams: &model.LogsFilterParams{
|
||||
Limit: 10,
|
||||
OrderBy: "timestamp",
|
||||
Order: DESC,
|
||||
Query: "service.name='myservice'",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestParseLogFilterParams(t *testing.T) {
|
||||
for _, test := range parseLogFilterParams {
|
||||
Convey(test.Name, t, func() {
|
||||
req := httptest.NewRequest(http.MethodGet, "/logs?"+test.ReqParams, nil)
|
||||
params, err := ParseLogFilterParams(req)
|
||||
So(err, ShouldBeNil)
|
||||
So(params, ShouldEqual, test.ExpectedLogFilterParams)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,6 +55,7 @@ type ServerOptions struct {
|
||||
// alert specific params
|
||||
DisableRules bool
|
||||
RuleRepoURL string
|
||||
PreferDelta bool
|
||||
PreferSpanMetrics bool
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
@@ -171,6 +172,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
apiHandler, err := NewAPIHandler(APIHandlerOpts{
|
||||
Reader: reader,
|
||||
SkipConfig: skipConfig,
|
||||
PerferDelta: serverOptions.PreferDelta,
|
||||
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
|
||||
MaxIdleConns: serverOptions.MaxIdleConns,
|
||||
MaxOpenConns: serverOptions.MaxOpenConns,
|
||||
|
||||
@@ -73,6 +73,7 @@ type Reader interface {
|
||||
LiveTailLogsV3(ctx context.Context, query string, timestampStart uint64, idStart string, client *v3.LogsLiveTailClient)
|
||||
|
||||
GetDashboardsInfo(ctx context.Context) (*model.DashboardsInfo, error)
|
||||
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
|
||||
GetSavedViewsInfo(ctx context.Context) (*model.SavedViewsInfo, error)
|
||||
GetTotalSpans(ctx context.Context) (uint64, error)
|
||||
GetTotalLogs(ctx context.Context) (uint64, error)
|
||||
|
||||
@@ -37,6 +37,7 @@ func main() {
|
||||
var ruleRepoURL, cacheConfigPath, fluxInterval string
|
||||
var cluster string
|
||||
|
||||
var preferDelta bool
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
@@ -46,10 +47,11 @@ func main() {
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
// Allow using the consistent naming with the signoz collector
|
||||
flag.StringVar(&cluster, "cluster-name", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
@@ -69,6 +71,7 @@ func main() {
|
||||
HTTPHostPort: constants.HTTPHostPort,
|
||||
PromConfigPath: promConfigPath,
|
||||
SkipTopLvlOpsPath: skipTopLvlOpsPath,
|
||||
PreferDelta: preferDelta,
|
||||
PreferSpanMetrics: preferSpanMetrics,
|
||||
PrivateHostPort: constants.PrivateHostPort,
|
||||
DisableRules: disableRules,
|
||||
|
||||
@@ -212,11 +212,9 @@ type ServiceOverviewItem struct {
|
||||
}
|
||||
|
||||
type SearchSpansResult struct {
|
||||
StartTimestampMillis uint64 `json:"startTimestampMillis"`
|
||||
EndTimestampMillis uint64 `json:"endTimestampMillis"`
|
||||
Columns []string `json:"columns"`
|
||||
Events [][]interface{} `json:"events"`
|
||||
IsSubTree bool `json:"isSubTree"`
|
||||
Columns []string `json:"columns"`
|
||||
Events [][]interface{} `json:"events"`
|
||||
IsSubTree bool `json:"isSubTree"`
|
||||
}
|
||||
|
||||
type GetFilterSpansResponseItem struct {
|
||||
@@ -252,22 +250,19 @@ type Event struct {
|
||||
|
||||
//easyjson:json
|
||||
type SearchSpanResponseItem struct {
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
DurationNano int64 `json:"durationNano"`
|
||||
SpanID string `json:"spanId"`
|
||||
RootSpanID string `json:"rootSpanId"`
|
||||
TraceID string `json:"traceId"`
|
||||
HasError bool `json:"hasError"`
|
||||
Kind int32 `json:"kind"`
|
||||
ServiceName string `json:"serviceName"`
|
||||
Name string `json:"name"`
|
||||
References []OtelSpanRef `json:"references,omitempty"`
|
||||
TagMap map[string]string `json:"tagMap"`
|
||||
Events []string `json:"event"`
|
||||
RootName string `json:"rootName"`
|
||||
StatusMessage string `json:"statusMessage"`
|
||||
StatusCodeString string `json:"statusCodeString"`
|
||||
SpanKind string `json:"spanKind"`
|
||||
TimeUnixNano uint64 `json:"timestamp"`
|
||||
DurationNano int64 `json:"durationNano"`
|
||||
SpanID string `json:"spanId"`
|
||||
RootSpanID string `json:"rootSpanId"`
|
||||
TraceID string `json:"traceId"`
|
||||
HasError bool `json:"hasError"`
|
||||
Kind int32 `json:"kind"`
|
||||
ServiceName string `json:"serviceName"`
|
||||
Name string `json:"name"`
|
||||
References []OtelSpanRef `json:"references,omitempty"`
|
||||
TagMap map[string]string `json:"tagMap"`
|
||||
Events []string `json:"event"`
|
||||
RootName string `json:"rootName"`
|
||||
}
|
||||
|
||||
type OtelSpanRef struct {
|
||||
@@ -304,7 +299,7 @@ func (item *SearchSpanResponseItem) GetValues() []interface{} {
|
||||
keys = append(keys, k)
|
||||
values = append(values, v)
|
||||
}
|
||||
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError, item.StatusMessage, item.StatusCodeString, item.SpanKind}
|
||||
returnArray := []interface{}{item.TimeUnixNano, item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), keys, values, referencesStringArray, item.Events, item.HasError}
|
||||
|
||||
return returnArray
|
||||
}
|
||||
|
||||
@@ -118,12 +118,6 @@ func easyjson6ff3ac1dDecodeGoSignozIoSignozPkgQueryServiceModel(in *jlexer.Lexer
|
||||
}
|
||||
case "rootName":
|
||||
out.RootName = string(in.String())
|
||||
case "statusMessage":
|
||||
out.StatusMessage = string(in.String())
|
||||
case "statusCodeString":
|
||||
out.StatusCodeString = string(in.String())
|
||||
case "spanKind":
|
||||
out.SpanKind = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
@@ -239,21 +233,6 @@ func easyjson6ff3ac1dEncodeGoSignozIoSignozPkgQueryServiceModel(out *jwriter.Wri
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.RootName))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"statusMessage\":"
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.StatusMessage))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"statusCodeString\":"
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.StatusCodeString))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"spanKind\":"
|
||||
out.RawString(prefix)
|
||||
out.String(string(in.SpanKind))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
|
||||
|
||||
@@ -354,8 +354,6 @@ type QueryRangeParamsV3 struct {
|
||||
CompositeQuery *CompositeQuery `json:"compositeQuery"`
|
||||
Variables map[string]interface{} `json:"variables,omitempty"`
|
||||
NoCache bool `json:"noCache"`
|
||||
Version string `json:"-"`
|
||||
FormatForWeb bool `json:"formatForWeb,omitempty"`
|
||||
}
|
||||
|
||||
type PromQuery struct {
|
||||
@@ -401,11 +399,8 @@ type CompositeQuery struct {
|
||||
PromQueries map[string]*PromQuery `json:"promQueries,omitempty"`
|
||||
PanelType PanelType `json:"panelType"`
|
||||
QueryType QueryType `json:"queryType"`
|
||||
// Unit for the time series data shown in the graph
|
||||
// This is used in alerts to format the value and threshold
|
||||
Unit string `json:"unit,omitempty"`
|
||||
// FillGaps is used to fill the gaps in the time series data
|
||||
FillGaps bool `json:"fillGaps,omitempty"`
|
||||
Unit string `json:"unit,omitempty"`
|
||||
FillGaps bool `json:"fillGaps,omitempty"`
|
||||
}
|
||||
|
||||
func (c *CompositeQuery) EnabledQueries() int {
|
||||
@@ -991,30 +986,10 @@ type QueryRangeResponse struct {
|
||||
Result []*Result `json:"result"`
|
||||
}
|
||||
|
||||
type TableColumn struct {
|
||||
Name string `json:"name"`
|
||||
// QueryName is the name of the query that this column belongs to
|
||||
QueryName string `json:"queryName"`
|
||||
// IsValueColumn is true if this column is a value column
|
||||
// i.e it is the column that contains the actual value that is being plotted
|
||||
IsValueColumn bool `json:"isValueColumn"`
|
||||
}
|
||||
|
||||
type TableRow struct {
|
||||
Data map[string]interface{} `json:"data"`
|
||||
QueryName string `json:"-"`
|
||||
}
|
||||
|
||||
type Table struct {
|
||||
Columns []*TableColumn `json:"columns"`
|
||||
Rows []*TableRow `json:"rows"`
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
QueryName string `json:"queryName,omitempty"`
|
||||
Series []*Series `json:"series,omitempty"`
|
||||
List []*Row `json:"list,omitempty"`
|
||||
Table *Table `json:"table,omitempty"`
|
||||
QueryName string `json:"queryName"`
|
||||
Series []*Series `json:"series"`
|
||||
List []*Row `json:"list"`
|
||||
}
|
||||
|
||||
type LogsLiveTailClient struct {
|
||||
|
||||
@@ -46,9 +46,6 @@ func fillGap(series *v3.Series, start, end, step int64) *v3.Series {
|
||||
|
||||
// TODO(srikanthccv): can WITH FILL be perfect substitute for all cases https://clickhouse.com/docs/en/sql-reference/statements/select/order-by#order-by-expr-with-fill-modifier
|
||||
func FillGaps(results []*v3.Result, params *v3.QueryRangeParamsV3) {
|
||||
if params.CompositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
return
|
||||
}
|
||||
for _, result := range results {
|
||||
// A `result` item in `results` contains the query result for individual query.
|
||||
// If there are no series in the result, we add empty series and `fillGap` adds all zeros
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user