mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-27 18:54:27 +00:00
Compare commits
21 Commits
chore/remo
...
perf/panel
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
18e856f62c | ||
|
|
c0bd545e26 | ||
|
|
346be7b7ba | ||
|
|
10176e3128 | ||
|
|
a6832d6ed0 | ||
|
|
5504e90620 | ||
|
|
19cffa0165 | ||
|
|
4dc4a0b95e | ||
|
|
2462f551be | ||
|
|
78ab80d294 | ||
|
|
d285b90f09 | ||
|
|
08e756cf5d | ||
|
|
42c1ddda39 | ||
|
|
efb85fc205 | ||
|
|
0e972257db | ||
|
|
f8144e3a1d | ||
|
|
02a8a11976 | ||
|
|
4a351e5280 | ||
|
|
311257a518 | ||
|
|
d5c665a72a | ||
|
|
0d6c8785d9 |
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.105.1
|
||||
image: signoz/signoz:v0.104.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.105.1
|
||||
image: signoz/signoz:v0.104.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.105.1}
|
||||
image: signoz/signoz:${VERSION:-v0.104.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.105.1}
|
||||
image: signoz/signoz:${VERSION:-v0.104.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -26,10 +26,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err)
|
||||
}
|
||||
|
||||
// calculate eval delay based on rule config
|
||||
evalDelay := baserules.CalculateEvalDelay(opts.Rule, opts.ManagerOpts.EvalDelay)
|
||||
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
@@ -39,7 +35,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.SLogger,
|
||||
baserules.WithEvalDelay(evalDelay),
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
@@ -84,7 +80,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Querier,
|
||||
opts.SLogger,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(evalDelay),
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -132,34 +132,117 @@ function CeleryTaskBar({
|
||||
[selectedFilters, celerySuccessStateData],
|
||||
);
|
||||
|
||||
const onGraphClick = (
|
||||
widgetData: Widgets,
|
||||
xValue: number,
|
||||
_yValue: number,
|
||||
_mouseX: number,
|
||||
_mouseY: number,
|
||||
data?: {
|
||||
[key: string]: string;
|
||||
const onGraphClick = useCallback(
|
||||
(
|
||||
widgetData: Widgets,
|
||||
xValue: number,
|
||||
_yValue: number,
|
||||
_mouseX: number,
|
||||
_mouseY: number,
|
||||
data?: {
|
||||
[key: string]: string;
|
||||
},
|
||||
): void => {
|
||||
const { start, end } = getStartAndEndTimesInMilliseconds(xValue);
|
||||
|
||||
// Extract entity and value from data
|
||||
const [firstDataPoint] = Object.entries(data || {});
|
||||
const [entity, value] = (firstDataPoint || ([] as unknown)) as [
|
||||
string,
|
||||
string,
|
||||
];
|
||||
|
||||
if (!isEmpty(entity) || !isEmpty(value)) {
|
||||
onClick?.({
|
||||
entity,
|
||||
value,
|
||||
timeRange: [start, end],
|
||||
widgetData,
|
||||
});
|
||||
}
|
||||
},
|
||||
): void => {
|
||||
const { start, end } = getStartAndEndTimesInMilliseconds(xValue);
|
||||
[onClick],
|
||||
);
|
||||
|
||||
// Extract entity and value from data
|
||||
const [firstDataPoint] = Object.entries(data || {});
|
||||
const [entity, value] = (firstDataPoint || ([] as unknown)) as [
|
||||
string,
|
||||
string,
|
||||
];
|
||||
const onAllStateClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data?: any,
|
||||
): void => {
|
||||
onGraphClick(
|
||||
celerySlowestTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClick],
|
||||
);
|
||||
|
||||
if (!isEmpty(entity) || !isEmpty(value)) {
|
||||
onClick?.({
|
||||
entity,
|
||||
value,
|
||||
timeRange: [start, end],
|
||||
widgetData,
|
||||
});
|
||||
}
|
||||
};
|
||||
const onFailedStateClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data?: any,
|
||||
): void => {
|
||||
onGraphClick(
|
||||
celeryFailedTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClick],
|
||||
);
|
||||
|
||||
const onRetryStateClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data?: any,
|
||||
): void => {
|
||||
onGraphClick(
|
||||
celeryRetryTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClick],
|
||||
);
|
||||
|
||||
const onSuccessStateClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data?: any,
|
||||
): void => {
|
||||
onGraphClick(
|
||||
celerySuccessTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClick],
|
||||
);
|
||||
|
||||
const { getCustomSeries } = useGetGraphCustomSeries({
|
||||
isDarkMode,
|
||||
@@ -185,16 +268,7 @@ function CeleryTaskBar({
|
||||
headerMenuList={[...ViewMenuAction]}
|
||||
onDragSelect={onDragSelect}
|
||||
isQueryEnabled={queryEnabled}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
|
||||
onGraphClick(
|
||||
celerySlowestTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
)
|
||||
}
|
||||
onClickHandler={onAllStateClick}
|
||||
customSeries={getCustomSeries}
|
||||
dataAvailable={checkIfDataExists}
|
||||
/>
|
||||
@@ -205,16 +279,7 @@ function CeleryTaskBar({
|
||||
headerMenuList={[...ViewMenuAction]}
|
||||
onDragSelect={onDragSelect}
|
||||
isQueryEnabled={queryEnabled}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
|
||||
onGraphClick(
|
||||
celeryFailedTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
)
|
||||
}
|
||||
onClickHandler={onFailedStateClick}
|
||||
customSeries={getCustomSeries}
|
||||
/>
|
||||
)}
|
||||
@@ -224,16 +289,7 @@ function CeleryTaskBar({
|
||||
headerMenuList={[...ViewMenuAction]}
|
||||
onDragSelect={onDragSelect}
|
||||
isQueryEnabled={queryEnabled}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
|
||||
onGraphClick(
|
||||
celeryRetryTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
)
|
||||
}
|
||||
onClickHandler={onRetryStateClick}
|
||||
customSeries={getCustomSeries}
|
||||
/>
|
||||
)}
|
||||
@@ -243,16 +299,7 @@ function CeleryTaskBar({
|
||||
headerMenuList={[...ViewMenuAction]}
|
||||
onDragSelect={onDragSelect}
|
||||
isQueryEnabled={queryEnabled}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
|
||||
onGraphClick(
|
||||
celerySuccessTasksTableWidgetData,
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
data,
|
||||
)
|
||||
}
|
||||
onClickHandler={onSuccessStateClick}
|
||||
customSeries={getCustomSeries}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -393,21 +393,15 @@ function ExplorerOptions({
|
||||
backwardCompatibleOptions = omit(options, 'version');
|
||||
}
|
||||
|
||||
// Use the correct default columns based on the current data source
|
||||
const defaultColumns =
|
||||
sourcepage === DataSource.TRACES
|
||||
? defaultTraceSelectedColumns
|
||||
: defaultLogsSelectedColumns;
|
||||
|
||||
if (extraData.selectColumns?.length) {
|
||||
handleOptionsChange({
|
||||
...backwardCompatibleOptions,
|
||||
selectColumns: extraData.selectColumns,
|
||||
});
|
||||
} else if (!isEqual(defaultColumns, options.selectColumns)) {
|
||||
} else if (!isEqual(defaultTraceSelectedColumns, options.selectColumns)) {
|
||||
handleOptionsChange({
|
||||
...backwardCompatibleOptions,
|
||||
selectColumns: defaultColumns,
|
||||
selectColumns: defaultTraceSelectedColumns,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
@@ -76,6 +77,7 @@ function WidgetGraphComponent({
|
||||
const isFullViewOpen = params.get(QueryParams.expandedWidgetId) === widget.id;
|
||||
|
||||
const lineChartRef = useRef<ToggleGraphProps>();
|
||||
|
||||
const [graphVisibility, setGraphVisibility] = useState<boolean[]>(
|
||||
Array(queryResponse.data?.payload?.data?.result?.length || 0).fill(true),
|
||||
);
|
||||
@@ -110,7 +112,7 @@ function WidgetGraphComponent({
|
||||
|
||||
const updateDashboardMutation = useUpdateDashboard();
|
||||
|
||||
const onDeleteHandler = (): void => {
|
||||
const onDeleteHandler = useCallback((): void => {
|
||||
if (!selectedDashboard) return;
|
||||
|
||||
const updatedWidgets = selectedDashboard?.data?.widgets?.filter(
|
||||
@@ -138,9 +140,15 @@ function WidgetGraphComponent({
|
||||
setDeleteModal(false);
|
||||
},
|
||||
});
|
||||
};
|
||||
}, [
|
||||
selectedDashboard,
|
||||
widget.id,
|
||||
updateDashboardMutation,
|
||||
setLayouts,
|
||||
setSelectedDashboard,
|
||||
]);
|
||||
|
||||
const onCloneHandler = async (): Promise<void> => {
|
||||
const onCloneHandler = useCallback(async (): Promise<void> => {
|
||||
if (!selectedDashboard) return;
|
||||
|
||||
const uuid = v4();
|
||||
@@ -204,9 +212,18 @@ function WidgetGraphComponent({
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
}, [
|
||||
selectedDashboard,
|
||||
widget,
|
||||
updateDashboardMutation,
|
||||
setLayouts,
|
||||
setSelectedDashboard,
|
||||
notifications,
|
||||
safeNavigate,
|
||||
pathname,
|
||||
]);
|
||||
|
||||
const handleOnView = (): void => {
|
||||
const handleOnView = useCallback((): void => {
|
||||
const queryParams = {
|
||||
[QueryParams.expandedWidgetId]: widget.id,
|
||||
};
|
||||
@@ -225,17 +242,17 @@ function WidgetGraphComponent({
|
||||
pathname,
|
||||
search: newSearch,
|
||||
});
|
||||
};
|
||||
}, [widget.id, search, pathname, safeNavigate]);
|
||||
|
||||
const handleOnDelete = (): void => {
|
||||
const handleOnDelete = useCallback((): void => {
|
||||
onToggleModal(setDeleteModal);
|
||||
};
|
||||
}, [onToggleModal]);
|
||||
|
||||
const onDeleteModelHandler = (): void => {
|
||||
const onDeleteModelHandler = useCallback((): void => {
|
||||
onToggleModal(setDeleteModal);
|
||||
};
|
||||
}, [onToggleModal]);
|
||||
|
||||
const onToggleModelHandler = (): void => {
|
||||
const onToggleModelHandler = useCallback((): void => {
|
||||
const existingSearchParams = new URLSearchParams(search);
|
||||
existingSearchParams.delete(QueryParams.expandedWidgetId);
|
||||
existingSearchParams.delete(QueryParams.compositeQuery);
|
||||
@@ -254,63 +271,84 @@ function WidgetGraphComponent({
|
||||
pathname,
|
||||
search: createQueryParams(updatedQueryParams),
|
||||
});
|
||||
};
|
||||
}, [search, queryResponse.data?.payload, widget.id, pathname, safeNavigate]);
|
||||
|
||||
const [searchTerm, setSearchTerm] = useState<string>('');
|
||||
|
||||
// Memoize the isButtonEnabled value to prevent recalculation
|
||||
const isGraphClickButtonEnabled = useMemo(
|
||||
() =>
|
||||
(widget?.query?.builder?.queryData &&
|
||||
Array.isArray(widget.query.builder.queryData)
|
||||
? widget.query.builder.queryData
|
||||
: []
|
||||
).some(
|
||||
(q) =>
|
||||
q.dataSource === DataSource.TRACES || q.dataSource === DataSource.LOGS,
|
||||
),
|
||||
[widget?.query?.builder?.queryData],
|
||||
);
|
||||
|
||||
const graphClick = useGraphClickToShowButton({
|
||||
graphRef: currentGraphRef?.current ? currentGraphRef : graphRef,
|
||||
isButtonEnabled: (widget?.query?.builder?.queryData &&
|
||||
Array.isArray(widget.query.builder.queryData)
|
||||
? widget.query.builder.queryData
|
||||
: []
|
||||
).some(
|
||||
(q) =>
|
||||
q.dataSource === DataSource.TRACES || q.dataSource === DataSource.LOGS,
|
||||
),
|
||||
isButtonEnabled: isGraphClickButtonEnabled,
|
||||
buttonClassName: 'view-onclick-show-button',
|
||||
});
|
||||
|
||||
const navigateToExplorer = useNavigateToExplorer();
|
||||
|
||||
const graphClickHandler = (
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
metric?: { [key: string]: string },
|
||||
queryData?: { queryName: string; inFocusOrNot: boolean },
|
||||
): void => {
|
||||
const customTracesTimeRange = getCustomTimeRangeWindowSweepInMS(
|
||||
const graphClickHandler = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
metric?: { [key: string]: string },
|
||||
queryData?: { queryName: string; inFocusOrNot: boolean },
|
||||
): void => {
|
||||
const customTracesTimeRange = getCustomTimeRangeWindowSweepInMS(
|
||||
customTimeRangeWindowForCoRelation,
|
||||
);
|
||||
const { start, end } = getStartAndEndTimesInMilliseconds(
|
||||
xValue,
|
||||
customTracesTimeRange,
|
||||
);
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
...(customTimeRangeWindowForCoRelation
|
||||
? { customTracesTimeRange: { start, end } }
|
||||
: {}),
|
||||
});
|
||||
},
|
||||
[
|
||||
customTimeRangeWindowForCoRelation,
|
||||
);
|
||||
const { start, end } = getStartAndEndTimesInMilliseconds(
|
||||
xValue,
|
||||
customTracesTimeRange,
|
||||
);
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
widget,
|
||||
navigateToExplorerPages,
|
||||
navigateToExplorer,
|
||||
notifications,
|
||||
graphClick,
|
||||
...(customTimeRangeWindowForCoRelation
|
||||
? { customTracesTimeRange: { start, end } }
|
||||
: {}),
|
||||
});
|
||||
};
|
||||
],
|
||||
);
|
||||
|
||||
const { truncatedText, fullText } = useGetResolvedText({
|
||||
text: widget.title as string,
|
||||
maxLength: 100,
|
||||
});
|
||||
|
||||
// Use the provided onClickHandler if available, otherwise use the default graphClickHandler
|
||||
// Both should be stable references due to useCallback
|
||||
const clickHandler = onClickHandler ?? graphClickHandler;
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
@@ -366,7 +404,7 @@ function WidgetGraphComponent({
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onToggleModelHandler={onToggleModelHandler}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
onClickHandler={onClickHandler ?? graphClickHandler}
|
||||
onClickHandler={clickHandler}
|
||||
customOnDragSelect={customOnDragSelect}
|
||||
setCurrentGraphRef={setCurrentGraphRef}
|
||||
enableDrillDown={
|
||||
@@ -416,7 +454,7 @@ function WidgetGraphComponent({
|
||||
setRequestData={setRequestData}
|
||||
setGraphVisibility={setGraphVisibility}
|
||||
graphVisibility={graphVisibility}
|
||||
onClickHandler={onClickHandler ?? graphClickHandler}
|
||||
onClickHandler={clickHandler}
|
||||
onDragSelect={onDragSelect}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
customTooltipElement={customTooltipElement}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -46,6 +46,11 @@ function useUpdatedQuery(): UseUpdatedQueryResult {
|
||||
[selectedDashboard],
|
||||
);
|
||||
|
||||
// Use ref to access latest mutateAsync without recreating the callback
|
||||
// queryRangeMutation object recreates on every render, but mutateAsync is stable
|
||||
const mutateAsyncRef = useRef(queryRangeMutation.mutateAsync);
|
||||
mutateAsyncRef.current = queryRangeMutation.mutateAsync;
|
||||
|
||||
const getUpdatedQuery = useCallback(
|
||||
async ({
|
||||
widgetConfig,
|
||||
@@ -63,12 +68,12 @@ function useUpdatedQuery(): UseUpdatedQueryResult {
|
||||
});
|
||||
|
||||
// Execute query and process results
|
||||
const queryResult = await queryRangeMutation.mutateAsync(queryPayload);
|
||||
const queryResult = await mutateAsyncRef.current(queryPayload);
|
||||
|
||||
// Map query data from API response
|
||||
return mapQueryDataFromApi(queryResult.data.compositeQuery);
|
||||
},
|
||||
[dynamicVariables, globalSelectedInterval, queryRangeMutation],
|
||||
[dynamicVariables, globalSelectedInterval],
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
@@ -238,6 +238,86 @@ function External(): JSX.Element {
|
||||
setSelectedData,
|
||||
);
|
||||
|
||||
const onErrorPercentageClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data: any,
|
||||
): void => {
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_error_percentage',
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClickHandler],
|
||||
);
|
||||
|
||||
const onDurationClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data: any,
|
||||
): void => {
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_duration',
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClickHandler],
|
||||
);
|
||||
|
||||
const onRPSByAddressClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data: any,
|
||||
): void => {
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_rps_by_address',
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClickHandler],
|
||||
);
|
||||
|
||||
const onDurationByAddressClick = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
yValue: number,
|
||||
mouseX: number,
|
||||
mouseY: number,
|
||||
data: any,
|
||||
): void => {
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_duration_by_address',
|
||||
data,
|
||||
);
|
||||
},
|
||||
[onGraphClickHandler],
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Row gutter={24}>
|
||||
@@ -266,16 +346,7 @@ function External(): JSX.Element {
|
||||
<Graph
|
||||
headerMenuList={MENU_ITEMS}
|
||||
widget={externalCallErrorWidget}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void => {
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_error_percentage',
|
||||
data,
|
||||
);
|
||||
}}
|
||||
onClickHandler={onErrorPercentageClick}
|
||||
onDragSelect={onDragSelect}
|
||||
version={ENTITY_VERSION_V4}
|
||||
/>
|
||||
@@ -309,16 +380,7 @@ function External(): JSX.Element {
|
||||
<Graph
|
||||
headerMenuList={MENU_ITEMS}
|
||||
widget={externalCallDurationWidget}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void => {
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_duration',
|
||||
data,
|
||||
);
|
||||
}}
|
||||
onClickHandler={onDurationClick}
|
||||
onDragSelect={onDragSelect}
|
||||
version={ENTITY_VERSION_V4}
|
||||
/>
|
||||
@@ -353,16 +415,7 @@ function External(): JSX.Element {
|
||||
<Graph
|
||||
widget={externalCallRPSWidget}
|
||||
headerMenuList={MENU_ITEMS}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): Promise<void> =>
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_rps_by_address',
|
||||
data,
|
||||
)
|
||||
}
|
||||
onClickHandler={onRPSByAddressClick}
|
||||
onDragSelect={onDragSelect}
|
||||
version={ENTITY_VERSION_V4}
|
||||
/>
|
||||
@@ -396,16 +449,7 @@ function External(): JSX.Element {
|
||||
<Graph
|
||||
widget={externalCallDurationAddressWidget}
|
||||
headerMenuList={MENU_ITEMS}
|
||||
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void => {
|
||||
onGraphClickHandler(
|
||||
xValue,
|
||||
yValue,
|
||||
mouseX,
|
||||
mouseY,
|
||||
'external_call_duration_by_address',
|
||||
data,
|
||||
);
|
||||
}}
|
||||
onClickHandler={onDurationByAddressClick}
|
||||
onDragSelect={onDragSelect}
|
||||
version={ENTITY_VERSION_V4}
|
||||
/>
|
||||
|
||||
@@ -516,8 +516,6 @@
|
||||
"falcon",
|
||||
"fastapi",
|
||||
"flask",
|
||||
"celery",
|
||||
"gunicorn",
|
||||
"monitor python application",
|
||||
"monitor python backend",
|
||||
"opentelemetry python",
|
||||
@@ -542,33 +540,134 @@
|
||||
],
|
||||
"id": "python",
|
||||
"question": {
|
||||
"desc": "What is your Environment?",
|
||||
"desc": "Which Python framework do you use?",
|
||||
"type": "select",
|
||||
"entityID": "environment",
|
||||
"entityID": "framework",
|
||||
"options": [
|
||||
{
|
||||
"key": "vm",
|
||||
"label": "VM",
|
||||
"imgUrl": "/Logos/vm.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
|
||||
"key": "flask",
|
||||
"label": "Flask",
|
||||
"imgUrl": "/Logos/flask.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-flask/",
|
||||
"question": {
|
||||
"desc": "What is your Environment?",
|
||||
"type": "select",
|
||||
"entityID": "environment",
|
||||
"options": [
|
||||
{
|
||||
"key": "vm",
|
||||
"label": "VM",
|
||||
"imgUrl": "/Logos/vm.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-flask/"
|
||||
},
|
||||
{
|
||||
"key": "k8s",
|
||||
"label": "Kubernetes",
|
||||
"imgUrl": "/Logos/kubernetes.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-flask/"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "k8s",
|
||||
"label": "Kubernetes",
|
||||
"imgUrl": "/Logos/kubernetes.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
|
||||
"key": "django",
|
||||
"label": "Django",
|
||||
"imgUrl": "/Logos/django.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-django/",
|
||||
"question": {
|
||||
"desc": "What is your Environment?",
|
||||
"type": "select",
|
||||
"entityID": "environment",
|
||||
"options": [
|
||||
{
|
||||
"key": "vm",
|
||||
"label": "VM",
|
||||
"imgUrl": "/Logos/vm.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-django/"
|
||||
},
|
||||
{
|
||||
"key": "k8s",
|
||||
"label": "Kubernetes",
|
||||
"imgUrl": "/Logos/kubernetes.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-django/"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "windows",
|
||||
"label": "Windows",
|
||||
"imgUrl": "/Logos/windows.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
|
||||
"key": "fastapi",
|
||||
"label": "FastAPI",
|
||||
"imgUrl": "/Logos/fastapi.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-fastapi/",
|
||||
"question": {
|
||||
"desc": "What is your Environment?",
|
||||
"type": "select",
|
||||
"entityID": "environment",
|
||||
"options": [
|
||||
{
|
||||
"key": "vm",
|
||||
"label": "VM",
|
||||
"imgUrl": "/Logos/vm.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-fastapi/"
|
||||
},
|
||||
{
|
||||
"key": "k8s",
|
||||
"label": "Kubernetes",
|
||||
"imgUrl": "/Logos/kubernetes.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-fastapi/"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "docker",
|
||||
"label": "Docker",
|
||||
"imgUrl": "/Logos/docker.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
|
||||
"key": "falcon",
|
||||
"label": "Falcon",
|
||||
"imgUrl": "/Logos/falcon.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-falcon/",
|
||||
"question": {
|
||||
"desc": "What is your Environment?",
|
||||
"type": "select",
|
||||
"entityID": "environment",
|
||||
"options": [
|
||||
{
|
||||
"key": "vm",
|
||||
"label": "VM",
|
||||
"imgUrl": "/Logos/vm.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-falcon/"
|
||||
},
|
||||
{
|
||||
"key": "k8s",
|
||||
"label": "Kubernetes",
|
||||
"imgUrl": "/Logos/kubernetes.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-falcon/"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "others",
|
||||
"label": "Others",
|
||||
"imgUrl": "/Logos/python-others.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/",
|
||||
"question": {
|
||||
"desc": "What is your Environment?",
|
||||
"type": "select",
|
||||
"entityID": "environment",
|
||||
"options": [
|
||||
{
|
||||
"key": "vm",
|
||||
"label": "VM",
|
||||
"imgUrl": "/Logos/vm.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
|
||||
},
|
||||
{
|
||||
"key": "k8s",
|
||||
"label": "Kubernetes",
|
||||
"imgUrl": "/Logos/kubernetes.svg",
|
||||
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -5323,7 +5422,7 @@
|
||||
"imgUrl": "/Logos/logs.svg",
|
||||
"tags": [
|
||||
"Frontend Monitoring",
|
||||
"logs"
|
||||
"logs"
|
||||
],
|
||||
"module": "logs",
|
||||
"relatedSearchKeywords": [
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { FC } from 'react';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
import { FC, memo } from 'react';
|
||||
|
||||
import { PanelTypeVsPanelWrapper } from './constants';
|
||||
import { PanelWrapperProps } from './panelWrapper.types';
|
||||
@@ -55,4 +56,36 @@ function PanelWrapper({
|
||||
);
|
||||
}
|
||||
|
||||
export default PanelWrapper;
|
||||
function arePropsEqual(
|
||||
prevProps: PanelWrapperProps,
|
||||
nextProps: PanelWrapperProps,
|
||||
): boolean {
|
||||
// Destructure to separate props that need deep comparison from the rest
|
||||
const {
|
||||
widget: prevWidget,
|
||||
queryResponse: prevQueryResponse,
|
||||
...prevRest
|
||||
} = prevProps;
|
||||
const {
|
||||
widget: nextWidget,
|
||||
queryResponse: nextQueryResponse,
|
||||
...nextRest
|
||||
} = nextProps;
|
||||
|
||||
// Shallow equality check for all other props (primitives, functions, refs, arrays)
|
||||
const restKeys = Object.keys(prevRest) as Array<
|
||||
keyof Omit<PanelWrapperProps, 'widget' | 'queryResponse'>
|
||||
>;
|
||||
|
||||
if (restKeys.some((key) => prevRest[key] !== nextRest[key])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Deep equality only for widget config and query response data payload
|
||||
return (
|
||||
isEqual(prevWidget, nextWidget) &&
|
||||
isEqual(prevQueryResponse.data?.payload, nextQueryResponse.data?.payload)
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(PanelWrapper, arePropsEqual);
|
||||
|
||||
@@ -132,11 +132,21 @@ function UplotPanelWrapper({
|
||||
[selectedGraph, widget?.panelTypes, widget?.stackedBarChart],
|
||||
);
|
||||
|
||||
const chartData = getUPlotChartData(
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
// Memoize chartData to prevent unnecessary recalculations
|
||||
const chartData = useMemo(
|
||||
() =>
|
||||
getUPlotChartData(
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
),
|
||||
[
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -77,6 +77,20 @@ function MessagingQueuesGraph(): JSX.Element {
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const onClickHandler = useCallback(
|
||||
(
|
||||
xValue: number,
|
||||
_yValue: number,
|
||||
_mouseX: number,
|
||||
_mouseY: number,
|
||||
data?: any,
|
||||
): void => {
|
||||
setSelectedTimelineQuery(urlQuery, xValue, location, history, data);
|
||||
},
|
||||
[urlQuery, location, history],
|
||||
);
|
||||
|
||||
return (
|
||||
<Card
|
||||
isDarkMode={isDarkMode}
|
||||
@@ -86,9 +100,7 @@ function MessagingQueuesGraph(): JSX.Element {
|
||||
<GridCard
|
||||
widget={widgetData}
|
||||
headerMenuList={[...ViewMenuAction]}
|
||||
onClickHandler={(xValue, _yValue, _mouseX, _mouseY, data): void => {
|
||||
setSelectedTimelineQuery(urlQuery, xValue, location, history, data);
|
||||
}}
|
||||
onClickHandler={onClickHandler}
|
||||
onDragSelect={onDragSelect}
|
||||
customTooltipElement={messagingQueueCustomTooltipText()}
|
||||
dataAvailable={checkIfDataExists}
|
||||
|
||||
@@ -18,16 +18,6 @@ jest.mock('api/browser/localstorage/get', () => ({
|
||||
default: jest.fn((key: string) => mockLocalStorage[key] || null),
|
||||
}));
|
||||
|
||||
const mockLogsColumns = [
|
||||
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
|
||||
{ name: 'body', signal: 'logs', fieldContext: 'log' },
|
||||
];
|
||||
|
||||
const mockTracesColumns = [
|
||||
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
|
||||
{ name: 'name', signal: 'traces', fieldContext: 'span' },
|
||||
];
|
||||
|
||||
describe('logsLoaderConfig', () => {
|
||||
// Save original location object
|
||||
const originalWindowLocation = window.location;
|
||||
@@ -167,83 +157,4 @@ describe('logsLoaderConfig', () => {
|
||||
} as FormattingOptions,
|
||||
});
|
||||
});
|
||||
|
||||
describe('Column validation - filtering Traces columns', () => {
|
||||
it('should filter out Traces columns (name with traces signal) from URL', async () => {
|
||||
const mixedColumns = [...mockLogsColumns, ...mockTracesColumns];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: mixedColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await logsLoaderConfig.url();
|
||||
|
||||
// Should only keep logs columns
|
||||
expect(result.columns).toEqual(mockLogsColumns);
|
||||
});
|
||||
|
||||
it('should filter out Traces columns from localStorage', async () => {
|
||||
const tracesColumns = [...mockTracesColumns];
|
||||
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: tracesColumns,
|
||||
});
|
||||
|
||||
const result = await logsLoaderConfig.local();
|
||||
|
||||
// Should filter out all Traces columns
|
||||
expect(result.columns).toEqual([]);
|
||||
});
|
||||
|
||||
it('should accept valid Logs columns from URL', async () => {
|
||||
const logsColumns = [...mockLogsColumns];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: logsColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await logsLoaderConfig.url();
|
||||
|
||||
expect(result.columns).toEqual(logsColumns);
|
||||
});
|
||||
|
||||
it('should fall back to defaults when all columns are filtered out from URL', async () => {
|
||||
const tracesColumns = [...mockTracesColumns];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: tracesColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await logsLoaderConfig.url();
|
||||
|
||||
// Should return empty array, which triggers fallback to defaults in preferencesLoader
|
||||
expect(result.columns).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle columns without signal field (legacy data)', async () => {
|
||||
const columnsWithoutSignal = [
|
||||
{ name: 'body', fieldContext: 'log' },
|
||||
{ name: 'service.name', fieldContext: 'resource' },
|
||||
];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: columnsWithoutSignal,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await logsLoaderConfig.url();
|
||||
|
||||
// Without signal field, columns pass through validation
|
||||
// This matches the current implementation behavior where only columns
|
||||
// with signal !== 'logs' are filtered out
|
||||
expect(result.columns).toEqual(columnsWithoutSignal);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import {
|
||||
@@ -127,112 +126,4 @@ describe('tracesLoaderConfig', () => {
|
||||
columns: defaultTraceSelectedColumns as TelemetryFieldKey[],
|
||||
});
|
||||
});
|
||||
|
||||
describe('Column validation - filtering Logs columns', () => {
|
||||
it('should filter out Logs columns (body) from URL', async () => {
|
||||
const logsColumns = [
|
||||
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
|
||||
{ name: 'body', signal: 'logs', fieldContext: 'log' },
|
||||
];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: logsColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await tracesLoaderConfig.url();
|
||||
|
||||
// Should filter out all Logs columns
|
||||
expect(result.columns).toEqual([]);
|
||||
});
|
||||
|
||||
it('should filter out Logs columns (timestamp with logs signal) from URL', async () => {
|
||||
const mixedColumns = [
|
||||
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
|
||||
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
|
||||
];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: mixedColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await tracesLoaderConfig.url();
|
||||
|
||||
// Should only keep trace columns
|
||||
expect(result.columns).toEqual([
|
||||
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should filter out Logs columns from localStorage', async () => {
|
||||
const logsColumns = [
|
||||
{ name: 'body', signal: 'logs', fieldContext: 'log' },
|
||||
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
|
||||
];
|
||||
|
||||
mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: logsColumns,
|
||||
});
|
||||
|
||||
const result = await tracesLoaderConfig.local();
|
||||
|
||||
// Should filter out all Logs columns
|
||||
expect(result.columns).toEqual([]);
|
||||
});
|
||||
|
||||
it('should accept valid Trace columns from URL', async () => {
|
||||
const traceColumns = [
|
||||
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
|
||||
{ name: 'name', signal: 'traces', fieldContext: 'span' },
|
||||
];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: traceColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await tracesLoaderConfig.url();
|
||||
|
||||
expect(result.columns).toEqual(traceColumns);
|
||||
});
|
||||
|
||||
it('should fall back to defaults when all columns are filtered out from URL', async () => {
|
||||
const logsColumns = [{ name: 'body', signal: 'logs' }];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: logsColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await tracesLoaderConfig.url();
|
||||
|
||||
// Should return empty array, which triggers fallback to defaults in preferencesLoader
|
||||
expect(result.columns).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle columns without signal field (legacy data)', async () => {
|
||||
const columnsWithoutSignal = [
|
||||
{ name: 'service.name', fieldContext: 'resource' },
|
||||
{ name: 'body', fieldContext: 'log' },
|
||||
];
|
||||
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: columnsWithoutSignal,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await tracesLoaderConfig.url();
|
||||
|
||||
// Without signal field, columns pass through validation
|
||||
// This matches the current implementation behavior where only columns
|
||||
// with signal !== 'traces' are filtered out
|
||||
expect(result.columns).toEqual(columnsWithoutSignal);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,18 +8,6 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
|
||||
|
||||
import { FormattingOptions } from '../types';
|
||||
|
||||
/**
|
||||
* Validates if a column is valid for Logs Explorer
|
||||
* Filters out Traces-specific columns that would cause query failures
|
||||
*/
|
||||
const isValidLogColumn = (col: {
|
||||
name?: string;
|
||||
signal?: string;
|
||||
[key: string]: unknown;
|
||||
}): boolean =>
|
||||
// If column has signal field, it must be 'logs'
|
||||
!(col?.signal && col.signal !== 'logs');
|
||||
|
||||
// --- LOGS preferences loader config ---
|
||||
const logsLoaders = {
|
||||
local: (): {
|
||||
@@ -30,14 +18,8 @@ const logsLoaders = {
|
||||
if (local) {
|
||||
try {
|
||||
const parsed = JSON.parse(local);
|
||||
|
||||
const localColumns = parsed.selectColumns || [];
|
||||
|
||||
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
|
||||
const validLogColumns = localColumns.filter(isValidLogColumn);
|
||||
|
||||
return {
|
||||
columns: validLogColumns.length > 0 ? validLogColumns : [],
|
||||
columns: parsed.selectColumns || [],
|
||||
formatting: {
|
||||
maxLines: parsed.maxLines ?? 2,
|
||||
format: parsed.format ?? 'table',
|
||||
@@ -56,14 +38,8 @@ const logsLoaders = {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
try {
|
||||
const options = JSON.parse(urlParams.get('options') || '{}');
|
||||
|
||||
const urlColumns = options.selectColumns || [];
|
||||
|
||||
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
|
||||
const validLogColumns = urlColumns.filter(isValidLogColumn);
|
||||
|
||||
return {
|
||||
columns: validLogColumns.length > 0 ? validLogColumns : [],
|
||||
columns: options.selectColumns || [],
|
||||
formatting: {
|
||||
maxLines: options.maxLines ?? 2,
|
||||
format: options.format ?? 'table',
|
||||
|
||||
@@ -5,18 +5,6 @@ import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
/**
|
||||
* Validates if a column is valid for Traces Explorer
|
||||
* Filters out Logs-specific columns that would cause query failures
|
||||
*/
|
||||
const isValidTraceColumn = (col: {
|
||||
name?: string;
|
||||
signal?: string;
|
||||
[key: string]: unknown;
|
||||
}): boolean =>
|
||||
// If column has signal field, it must be 'traces'
|
||||
!(col?.signal && col.signal !== 'traces');
|
||||
|
||||
// --- TRACES preferences loader config ---
|
||||
const tracesLoaders = {
|
||||
local: (): {
|
||||
@@ -26,13 +14,8 @@ const tracesLoaders = {
|
||||
if (local) {
|
||||
try {
|
||||
const parsed = JSON.parse(local);
|
||||
const localColumns = parsed.selectColumns || [];
|
||||
|
||||
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
|
||||
const validTraceColumns = localColumns.filter(isValidTraceColumn);
|
||||
|
||||
return {
|
||||
columns: validTraceColumns.length > 0 ? validTraceColumns : [],
|
||||
columns: parsed.selectColumns || [],
|
||||
};
|
||||
} catch {}
|
||||
}
|
||||
@@ -44,15 +27,8 @@ const tracesLoaders = {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
try {
|
||||
const options = JSON.parse(urlParams.get('options') || '{}');
|
||||
const urlColumns = options.selectColumns || [];
|
||||
|
||||
// Filter out invalid columns (e.g., Logs columns)
|
||||
// Only accept columns that are valid for Traces (signal='traces' or columns without signal that aren't logs-specific)
|
||||
const validTraceColumns = urlColumns.filter(isValidTraceColumn);
|
||||
|
||||
// Only return columns if we have valid trace columns, otherwise return empty to fall back to defaults
|
||||
return {
|
||||
columns: validTraceColumns.length > 0 ? validTraceColumns : [],
|
||||
columns: options.selectColumns || [],
|
||||
};
|
||||
} catch {}
|
||||
return { columns: [] };
|
||||
|
||||
@@ -96,11 +96,11 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addPreferenceRoutes(router); err != nil {
|
||||
if err := provider.addUserRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addUserRoutes(router); err != nil {
|
||||
if err := provider.addPreferenceRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ package impldashboard
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -11,34 +11,30 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store dashboardtypes.Store
|
||||
settings factory.ScopedProviderSettings
|
||||
analytics analytics.Analytics
|
||||
orgGetter organization.Getter
|
||||
role role.Module
|
||||
queryParser queryparser.QueryParser
|
||||
store dashboardtypes.Store
|
||||
settings factory.ScopedProviderSettings
|
||||
analytics analytics.Analytics
|
||||
orgGetter organization.Getter
|
||||
role role.Module
|
||||
}
|
||||
|
||||
func NewModule(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module, queryParser queryparser.QueryParser) dashboard.Module {
|
||||
func NewModule(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module) dashboard.Module {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/modules/impldashboard")
|
||||
return &module{
|
||||
store: NewStore(sqlstore),
|
||||
settings: scopedProviderSettings,
|
||||
analytics: analytics,
|
||||
orgGetter: orgGetter,
|
||||
role: role,
|
||||
queryParser: queryParser,
|
||||
store: NewStore(sqlstore),
|
||||
settings: scopedProviderSettings,
|
||||
analytics: analytics,
|
||||
orgGetter: orgGetter,
|
||||
role: role,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,10 +269,13 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize result map for each metric
|
||||
result := make(map[string][]map[string]string)
|
||||
|
||||
// Process the JSON data in Go
|
||||
for _, dashboard := range dashboards {
|
||||
dashData := dashboard.Data
|
||||
var dashData = dashboard.Data
|
||||
|
||||
dashTitle, _ := dashData["title"].(string)
|
||||
widgets, ok := dashData["widgets"].([]interface{})
|
||||
if !ok {
|
||||
@@ -297,22 +296,44 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
|
||||
continue
|
||||
}
|
||||
|
||||
// Track which metrics were found in this widget
|
||||
foundMetrics := make(map[string]bool)
|
||||
builder, ok := query["builder"].(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check all three query types
|
||||
module.checkBuilderQueriesForMetricNames(query, metricNames, foundMetrics)
|
||||
module.checkClickHouseQueriesForMetricNames(ctx, query, metricNames, foundMetrics)
|
||||
module.checkPromQLQueriesForMetricNames(ctx, query, metricNames, foundMetrics)
|
||||
queryData, ok := builder["queryData"].([]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Add widget to results for all found metrics
|
||||
for metricName := range foundMetrics {
|
||||
result[metricName] = append(result[metricName], map[string]string{
|
||||
"dashboard_id": dashboard.ID,
|
||||
"widget_name": widgetTitle,
|
||||
"widget_id": widgetID,
|
||||
"dashboard_name": dashTitle,
|
||||
})
|
||||
for _, qd := range queryData {
|
||||
data, ok := qd.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if dataSource, ok := data["dataSource"].(string); !ok || dataSource != "metrics" {
|
||||
continue
|
||||
}
|
||||
|
||||
aggregateAttr, ok := data["aggregateAttribute"].(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if key, ok := aggregateAttr["key"].(string); ok {
|
||||
// Check if this metric is in our list of interest
|
||||
for _, metricName := range metricNames {
|
||||
if strings.TrimSpace(key) == metricName {
|
||||
result[metricName] = append(result[metricName], map[string]string{
|
||||
"dashboard_id": dashboard.ID,
|
||||
"widget_name": widgetTitle,
|
||||
"widget_id": widgetID,
|
||||
"dashboard_name": dashTitle,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -340,120 +361,3 @@ func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
|
||||
func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
|
||||
}
|
||||
|
||||
// checkBuilderQueriesForMetricNames checks builder.queryData[] for aggregations[].metricName
|
||||
func (module *module) checkBuilderQueriesForMetricNames(query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
|
||||
builder, ok := query["builder"].(map[string]interface{})
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
queryData, ok := builder["queryData"].([]interface{})
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
for _, qd := range queryData {
|
||||
data, ok := qd.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check dataSource is metrics
|
||||
if dataSource, ok := data["dataSource"].(string); !ok || dataSource != "metrics" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check aggregations[].metricName
|
||||
aggregations, ok := data["aggregations"].([]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, agg := range aggregations {
|
||||
aggMap, ok := agg.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
metricName, ok := aggMap["metricName"].(string)
|
||||
if !ok || metricName == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if slices.Contains(metricNames, metricName) {
|
||||
foundMetrics[metricName] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkClickHouseQueriesForMetricNames checks clickhouse_sql[] array for metric names in query strings
|
||||
func (module *module) checkClickHouseQueriesForMetricNames(ctx context.Context, query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
|
||||
clickhouseSQL, ok := query["clickhouse_sql"].([]interface{})
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
for _, chQuery := range clickhouseSQL {
|
||||
chQueryMap, ok := chQuery.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
queryStr, ok := chQueryMap["query"].(string)
|
||||
if !ok || queryStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse query to extract metric names
|
||||
result, err := module.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypeClickHouseSQL, queryStr)
|
||||
if err != nil {
|
||||
// Log warning and continue - parsing errors shouldn't break the search
|
||||
module.settings.Logger().WarnContext(ctx, "failed to parse ClickHouse query", "query", queryStr, "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if any of the search metric names are in the extracted metric names
|
||||
for _, metricName := range metricNames {
|
||||
if slices.Contains(result.MetricNames, metricName) {
|
||||
foundMetrics[metricName] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkPromQLQueriesForMetricNames checks promql[] array for metric names in query strings
|
||||
func (module *module) checkPromQLQueriesForMetricNames(ctx context.Context, query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
|
||||
promQL, ok := query["promql"].([]interface{})
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
for _, promQuery := range promQL {
|
||||
promQueryMap, ok := promQuery.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
queryStr, ok := promQueryMap["query"].(string)
|
||||
if !ok || queryStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse query to extract metric names
|
||||
result, err := module.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypePromQL, queryStr)
|
||||
if err != nil {
|
||||
// Log warning and continue - parsing errors shouldn't break the search
|
||||
module.settings.Logger().WarnContext(ctx, "failed to parse PromQL query", "query", queryStr, "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if any of the search metric names are in the extracted metric names
|
||||
for _, metricName := range metricNames {
|
||||
if slices.Contains(result.MetricNames, metricName) {
|
||||
foundMetrics[metricName] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,28 +137,6 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
|
||||
render.Success(rw, http.StatusOK, metadata)
|
||||
}
|
||||
|
||||
func (h *handler) GetMetricDashboards(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
|
||||
if metricName == "" {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
out, err := h.module.GetMetricDashboards(req.Context(), orgID, metricName)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
render.Success(rw, http.StatusOK, out)
|
||||
}
|
||||
|
||||
func (h *handler) GetMetricHighlights(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
@@ -187,6 +165,7 @@ func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var in metricsexplorertypes.MetricAttributesRequest
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
@@ -33,12 +32,11 @@ type module struct {
|
||||
condBuilder qbtypes.ConditionBuilder
|
||||
logger *slog.Logger
|
||||
cache cache.Cache
|
||||
dashboardModule dashboard.Module
|
||||
config metricsexplorer.Config
|
||||
}
|
||||
|
||||
// NewModule constructs the metrics module with the provided dependencies.
|
||||
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, dashboardModule dashboard.Module, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
|
||||
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
|
||||
fieldMapper := telemetrymetrics.NewFieldMapper()
|
||||
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
|
||||
return &module{
|
||||
@@ -48,7 +46,6 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
|
||||
logger: providerSettings.Logger,
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
cache: cache,
|
||||
dashboardModule: dashboardModule,
|
||||
config: cfg,
|
||||
}
|
||||
}
|
||||
@@ -197,34 +194,6 @@ func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, re
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error) {
|
||||
if metricName == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName is required")
|
||||
}
|
||||
|
||||
data, err := m.dashboardModule.GetByMetricNames(ctx, orgID, []string{metricName})
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get dashboards for metric")
|
||||
}
|
||||
|
||||
dashboards := make([]metricsexplorertypes.MetricDashboard, 0)
|
||||
if dashboardList, ok := data[metricName]; ok {
|
||||
dashboards = make([]metricsexplorertypes.MetricDashboard, 0, len(dashboardList))
|
||||
for _, item := range dashboardList {
|
||||
dashboards = append(dashboards, metricsexplorertypes.MetricDashboard{
|
||||
DashboardName: item["dashboard_name"],
|
||||
DashboardID: item["dashboard_id"],
|
||||
WidgetID: item["widget_id"],
|
||||
WidgetName: item["widget_name"],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return &metricsexplorertypes.MetricDashboardsResponse{
|
||||
Dashboards: dashboards,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetMetricHighlights returns highlights for a metric including data points, last received, total time series, and active time series.
|
||||
func (m *module) GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error) {
|
||||
if metricName == "" {
|
||||
|
||||
@@ -15,7 +15,6 @@ type Handler interface {
|
||||
GetMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
GetMetricAttributes(http.ResponseWriter, *http.Request)
|
||||
UpdateMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
GetMetricDashboards(http.ResponseWriter, *http.Request)
|
||||
GetMetricHighlights(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
@@ -25,7 +24,6 @@ type Module interface {
|
||||
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
|
||||
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
|
||||
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
|
||||
GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error)
|
||||
GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error)
|
||||
GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error)
|
||||
}
|
||||
|
||||
@@ -630,7 +630,6 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
|
||||
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricMetadata)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.EditAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metric/highlights", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricHighlights)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metric/dashboards", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricDashboards)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func Intersection(a, b []int) (c []int) {
|
||||
|
||||
@@ -1,224 +0,0 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
)
|
||||
|
||||
// CalculateEvalDelay determines if the default evaluation delay can be removed (set to 0)
|
||||
// based on the rule's match type, compare operator, and aggregation type.
|
||||
// If the combination ensures that new data will not invalidate the alert condition
|
||||
// (e.g. values only increase for a "Greater Than" check), the delay is removed.
|
||||
//
|
||||
// A combination is considered "safe" if new data arriving late cannot invalidate
|
||||
// a previously triggered alert condition. This happens when:
|
||||
// - The aggregation function is monotonic (only increases or only decreases)
|
||||
// - The comparison operator aligns with the monotonic direction
|
||||
// - The match type allows the safety property to hold
|
||||
//
|
||||
// Safe combinations include:
|
||||
// - Min aggregation + Below/BelowOrEq operators (Min can only decrease)
|
||||
// - Max aggregation + Above/AboveOrEq operators (Max can only increase)
|
||||
// - Count/CountDistinct + Above/AboveOrEq operators (Count can only increase)
|
||||
//
|
||||
// Returns 0 if all queries are safe, otherwise returns defaultDelay.
|
||||
func CalculateEvalDelay(rule *ruletypes.PostableRule, defaultDelay time.Duration) time.Duration {
|
||||
// Phase 1: Validate rule condition
|
||||
if !isRuleConditionValid(rule) {
|
||||
return defaultDelay
|
||||
}
|
||||
|
||||
// Phase 2: Get match type and compare operator from thresholds
|
||||
matchType, compareOp, ok := getThresholdMatchTypeAndCompareOp(rule)
|
||||
if !ok {
|
||||
return defaultDelay
|
||||
}
|
||||
|
||||
// Phase 3: Check if all queries are safe
|
||||
for _, query := range rule.RuleCondition.CompositeQuery.Queries {
|
||||
if !isQuerySafe(query, matchType, compareOp) {
|
||||
return defaultDelay
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 4: All queries are safe, delay can be removed
|
||||
return 0
|
||||
}
|
||||
|
||||
// isRuleConditionValid checks if the rule condition is valid for delay calculation.
|
||||
// Returns false if the rule condition is nil, has no queries, or has invalid thresholds.
|
||||
func isRuleConditionValid(rule *ruletypes.PostableRule) bool {
|
||||
if rule.RuleCondition == nil || rule.RuleCondition.CompositeQuery == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
// BuilderQueries, PromQL Queries, ClickHouse SQL Queries attributes of CompositeQuery
|
||||
// are not supported for now, only Queries attribute is supported
|
||||
if len(rule.RuleCondition.CompositeQuery.Queries) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate that thresholds exist and contain valid match type and compare operator
|
||||
matchType, compareOp, ok := getThresholdMatchTypeAndCompareOp(rule)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if matchType == ruletypes.MatchTypeNone || compareOp == ruletypes.CompareOpNone {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// getThresholdMatchTypeAndCompareOp extracts match type and compare operator from the rule's thresholds.
|
||||
// Returns the match type, compare operator, and a boolean indicating success.
|
||||
// All thresholds share the same match type and compare operator, so we use the first threshold's values.
|
||||
func getThresholdMatchTypeAndCompareOp(rule *ruletypes.PostableRule) (ruletypes.MatchType, ruletypes.CompareOp, bool) {
|
||||
if rule.RuleCondition == nil || rule.RuleCondition.Thresholds == nil {
|
||||
return ruletypes.MatchTypeNone, ruletypes.CompareOpNone, false
|
||||
}
|
||||
|
||||
// Get the threshold interface
|
||||
threshold, err := rule.RuleCondition.Thresholds.GetRuleThreshold()
|
||||
if err != nil {
|
||||
return ruletypes.MatchTypeNone, ruletypes.CompareOpNone, false
|
||||
}
|
||||
|
||||
// Cast to BasicRuleThresholds (only supported kind)
|
||||
basicThresholds, ok := threshold.(ruletypes.BasicRuleThresholds)
|
||||
if !ok || len(basicThresholds) == 0 {
|
||||
return ruletypes.MatchTypeNone, ruletypes.CompareOpNone, false
|
||||
}
|
||||
|
||||
// Use first threshold's MatchType and CompareOp (all thresholds share the same values)
|
||||
matchType := basicThresholds[0].MatchType
|
||||
compareOp := basicThresholds[0].CompareOp
|
||||
|
||||
return matchType, compareOp, true
|
||||
}
|
||||
|
||||
// aggregationExpressionToTimeAggregation converts the aggregation expression to the corresponding time aggregation
|
||||
// based on the expression
|
||||
// if the expression is not a valid aggregation expression, it returns the unspecified time aggregation
|
||||
// Note: Longer/more specific prefixes (e.g., "count_distinct") must be checked before shorter ones (e.g., "count")
|
||||
func aggregationExpressionToTimeAggregation(expression string) metrictypes.TimeAggregation {
|
||||
expression = strings.TrimSpace(strings.ToLower(expression))
|
||||
switch {
|
||||
case strings.HasPrefix(expression, "count_distinct"):
|
||||
return metrictypes.TimeAggregationCountDistinct
|
||||
case strings.HasPrefix(expression, "count"):
|
||||
return metrictypes.TimeAggregationCount
|
||||
case strings.HasPrefix(expression, "min"):
|
||||
return metrictypes.TimeAggregationMin
|
||||
case strings.HasPrefix(expression, "max"):
|
||||
return metrictypes.TimeAggregationMax
|
||||
case strings.HasPrefix(expression, "avg"):
|
||||
return metrictypes.TimeAggregationAvg
|
||||
case strings.HasPrefix(expression, "sum"):
|
||||
return metrictypes.TimeAggregationSum
|
||||
case strings.HasPrefix(expression, "rate"):
|
||||
return metrictypes.TimeAggregationRate
|
||||
case strings.HasPrefix(expression, "increase"):
|
||||
return metrictypes.TimeAggregationIncrease
|
||||
case strings.HasPrefix(expression, "latest"):
|
||||
return metrictypes.TimeAggregationLatest
|
||||
default:
|
||||
return metrictypes.TimeAggregationUnspecified
|
||||
}
|
||||
}
|
||||
|
||||
// extractTimeAggFromQuerySpec extracts the time aggregation from the query spec of the QueryEnvelope
|
||||
func extractTimeAggFromQuerySpec(spec any) []metrictypes.TimeAggregation {
|
||||
timeAggs := []metrictypes.TimeAggregation{}
|
||||
|
||||
// Extract the time aggregation from the query spec
|
||||
// based on different types of query spec
|
||||
switch spec := spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
for _, agg := range spec.Aggregations {
|
||||
timeAggs = append(timeAggs, agg.TimeAggregation)
|
||||
}
|
||||
// the log and trace aggregations don't store the time aggregation directly but expression for the aggregation
|
||||
// so we need to convert the expression to the corresponding time aggregation
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
for _, agg := range spec.Aggregations {
|
||||
timeAggs = append(timeAggs, aggregationExpressionToTimeAggregation(agg.Expression))
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
for _, agg := range spec.Aggregations {
|
||||
timeAggs = append(timeAggs, aggregationExpressionToTimeAggregation(agg.Expression))
|
||||
}
|
||||
}
|
||||
return timeAggs
|
||||
}
|
||||
|
||||
// isQuerySafe determines if a single query is safe to remove the eval delay.
|
||||
// A query is safe only if it's a Builder query (with MetricAggregation, LogAggregation, or TraceAggregation type)
|
||||
// and all its aggregations are safe.
|
||||
func isQuerySafe(query qbtypes.QueryEnvelope, matchType ruletypes.MatchType, compareOp ruletypes.CompareOp) bool {
|
||||
// We only handle Builder Queries for now
|
||||
if query.Type != qbtypes.QueryTypeBuilder {
|
||||
return false
|
||||
}
|
||||
|
||||
// extract time aggregations from the query spec
|
||||
timeAggs := extractTimeAggFromQuerySpec(query.Spec)
|
||||
|
||||
// A query must have at least one aggregation
|
||||
if len(timeAggs) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// All aggregations in the query must be safe
|
||||
for _, timeAgg := range timeAggs {
|
||||
if !isAggregationSafe(timeAgg, matchType, compareOp) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isAggregationSafe checks if the aggregation is safe to remove the eval delay
|
||||
func isAggregationSafe(timeAgg metrictypes.TimeAggregation, matchType ruletypes.MatchType, compareOp ruletypes.CompareOp) bool {
|
||||
switch timeAgg {
|
||||
case metrictypes.TimeAggregationMin:
|
||||
// Group: Min, MinIf
|
||||
// Value can only go down or remain same.
|
||||
|
||||
if matchType == ruletypes.AtleastOnce || matchType == ruletypes.AllTheTimes {
|
||||
if compareOp == ruletypes.ValueIsBelow || compareOp == ruletypes.ValueBelowOrEq {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
case metrictypes.TimeAggregationMax:
|
||||
// Group: Max, MaxIf
|
||||
// Value can only go up or remain same.
|
||||
|
||||
if matchType == ruletypes.AtleastOnce || matchType == ruletypes.AllTheTimes {
|
||||
if compareOp == ruletypes.ValueIsAbove || compareOp == ruletypes.ValueAboveOrEq {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
case metrictypes.TimeAggregationCount, metrictypes.TimeAggregationCountDistinct:
|
||||
// Group: Count
|
||||
// Value can only go up or remain same.
|
||||
|
||||
if matchType == ruletypes.AtleastOnce || matchType == ruletypes.AllTheTimes {
|
||||
if compareOp == ruletypes.ValueIsAbove || compareOp == ruletypes.ValueAboveOrEq {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Other aggregations (Sum, Avg, Rate, etc.) are not safe.
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -154,8 +154,6 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err)
|
||||
}
|
||||
// calculate eval delay based on rule config
|
||||
evalDelay := CalculateEvalDelay(opts.Rule, opts.ManagerOpts.EvalDelay)
|
||||
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
@@ -166,7 +164,7 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.SLogger,
|
||||
WithEvalDelay(evalDelay),
|
||||
WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
WithSQLStore(opts.SQLStore),
|
||||
)
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -36,9 +35,8 @@ func TestNewHandlers(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
tokenizer := tokenizertest.New()
|
||||
emailing := emailingtest.New()
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil)
|
||||
|
||||
|
||||
@@ -38,7 +38,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/tokenizer"
|
||||
@@ -80,14 +79,12 @@ func NewModules(
|
||||
authNs map[authtypes.AuthNProvider]authn.AuthN,
|
||||
authz authz.AuthZ,
|
||||
cache cache.Cache,
|
||||
queryParser queryparser.QueryParser,
|
||||
config Config,
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, analytics)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
dashboard := impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil), queryParser)
|
||||
|
||||
return Modules{
|
||||
OrgGetter: orgGetter,
|
||||
@@ -95,7 +92,7 @@ func NewModules(
|
||||
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
|
||||
SavedView: implsavedview.NewModule(sqlstore),
|
||||
Apdex: implapdex.NewModule(sqlstore),
|
||||
Dashboard: dashboard,
|
||||
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil)),
|
||||
User: user,
|
||||
UserGetter: userGetter,
|
||||
QuickFilter: quickfilter,
|
||||
@@ -105,6 +102,6 @@ func NewModules(
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, dashboard, providerSettings, config.MetricsExplorer),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings, config.MetricsExplorer),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -36,9 +35,8 @@ func TestNewModules(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
tokenizer := tokenizertest.New()
|
||||
emailing := emailingtest.New()
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
|
||||
|
||||
reflectVal := reflect.ValueOf(modules)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -346,7 +346,7 @@ func New(
|
||||
)
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, config)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing)
|
||||
|
||||
@@ -221,19 +221,6 @@ type TreemapResponse struct {
|
||||
Samples []TreemapEntry `json:"samples"`
|
||||
}
|
||||
|
||||
// MetricDashboard represents a dashboard/widget referencing a metric.
|
||||
type MetricDashboard struct {
|
||||
DashboardName string `json:"dashboardName"`
|
||||
DashboardID string `json:"dashboardId"`
|
||||
WidgetID string `json:"widgetId"`
|
||||
WidgetName string `json:"widgetName"`
|
||||
}
|
||||
|
||||
// MetricDashboardsResponse represents the response for metric dashboards endpoint.
|
||||
type MetricDashboardsResponse struct {
|
||||
Dashboards []MetricDashboard `json:"dashboards"`
|
||||
}
|
||||
|
||||
// MetricHighlightsResponse is the output structure for the metric highlights endpoint.
|
||||
type MetricHighlightsResponse struct {
|
||||
DataPoints uint64 `json:"dataPoints"`
|
||||
|
||||
Reference in New Issue
Block a user