mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-29 17:24:16 +00:00
Compare commits
1 Commits
main
...
chore/buil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26256ffd20 |
@@ -1,8 +1,52 @@
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"path": "./build/**.js",
|
||||
"maxSize": "1.2MB"
|
||||
"path": "./build/runtime~*.js",
|
||||
"maxSize": "50KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-react.*.js",
|
||||
"maxSize": "300KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-antd.*.js",
|
||||
"maxSize": "1MB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-antd-icons.*.js",
|
||||
"maxSize": "2.5MB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-charts.*.js",
|
||||
"maxSize": "400KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-react-query.*.js",
|
||||
"maxSize": "100KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-utilities.*.js",
|
||||
"maxSize": "600KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-monaco.*.js",
|
||||
"maxSize": "3MB"
|
||||
},
|
||||
{
|
||||
"path": "./build/vendors-common.*.js",
|
||||
"maxSize": "800KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/main.*.js",
|
||||
"maxSize": "500KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/Home.*.js",
|
||||
"maxSize": "300KB"
|
||||
},
|
||||
{
|
||||
"path": "./build/*.js",
|
||||
"maxSize": "1MB"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -132,20 +132,11 @@ function UplotPanelWrapper({
|
||||
[selectedGraph, widget?.panelTypes, widget?.stackedBarChart],
|
||||
);
|
||||
|
||||
const chartData = useMemo(
|
||||
() =>
|
||||
getUPlotChartData(
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
),
|
||||
[
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
],
|
||||
const chartData = getUPlotChartData(
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -302,7 +293,7 @@ function UplotPanelWrapper({
|
||||
)}
|
||||
{isFullViewMode && setGraphVisibility && !stackedBarChart && (
|
||||
<GraphManager
|
||||
data={chartData}
|
||||
data={getUPlotChartData(queryResponse?.data?.payload, widget.fillSpans)}
|
||||
name={widget.id}
|
||||
options={options}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { ErrorResponse } from 'react-router-dom-v5-compat';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { QueryKeyValueSuggestionsResponseProps } from 'types/api/querySuggestions/types';
|
||||
|
||||
export const useGetQueryKeyValueSuggestions = ({
|
||||
@@ -9,15 +11,13 @@ export const useGetQueryKeyValueSuggestions = ({
|
||||
searchText,
|
||||
signalSource,
|
||||
metricName,
|
||||
options,
|
||||
}: {
|
||||
key: string;
|
||||
signal: 'traces' | 'logs' | 'metrics';
|
||||
searchText?: string;
|
||||
signalSource?: 'meter' | '';
|
||||
options?: UseQueryOptions<
|
||||
AxiosResponse<QueryKeyValueSuggestionsResponseProps>,
|
||||
AxiosError
|
||||
SuccessResponse<QueryKeyValueSuggestionsResponseProps> | ErrorResponse
|
||||
>;
|
||||
metricName?: string;
|
||||
}): UseQueryResult<
|
||||
@@ -41,5 +41,4 @@ export const useGetQueryKeyValueSuggestions = ({
|
||||
signalSource: signalSource as 'meter' | '',
|
||||
metricName: metricName || '',
|
||||
}),
|
||||
...options,
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { themeColors } from 'constants/theme';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import { isUndefined } from 'lodash-es';
|
||||
import { cloneDeep, isUndefined } from 'lodash-es';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
@@ -8,7 +8,7 @@ import { normalizePlotValue } from './dataUtils';
|
||||
import { generateColor } from './generateColor';
|
||||
|
||||
function getXAxisTimestamps(seriesList: QueryData[]): number[] {
|
||||
const timestamps = new Set<number>();
|
||||
const timestamps = new Set();
|
||||
|
||||
seriesList.forEach((series: { values?: [number, string][] }) => {
|
||||
if (series?.values) {
|
||||
@@ -18,71 +18,54 @@ function getXAxisTimestamps(seriesList: QueryData[]): number[] {
|
||||
}
|
||||
});
|
||||
|
||||
const timestampsArr = Array.from(timestamps);
|
||||
timestampsArr.sort((a, b) => a - b);
|
||||
|
||||
return timestampsArr;
|
||||
const timestampsArr: number[] | unknown[] = Array.from(timestamps) || [];
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
return timestampsArr.sort((a, b) => a - b);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function fillMissingXAxisTimestamps(
|
||||
timestampArr: number[],
|
||||
data: Array<{ values?: [number, string][] }>,
|
||||
): (number | null)[][] {
|
||||
function fillMissingXAxisTimestamps(timestampArr: number[], data: any[]): any {
|
||||
// Generate a set of all timestamps in the range
|
||||
const allTimestampsSet = new Set(timestampArr);
|
||||
const result: (number | null)[][] = [];
|
||||
const processedData = cloneDeep(data);
|
||||
|
||||
// Process each series entry
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const entry = data[i];
|
||||
if (!entry?.values) {
|
||||
result.push([]);
|
||||
} else {
|
||||
// Build Set of existing timestamps directly (avoid intermediate array)
|
||||
const existingTimestamps = new Set<number>();
|
||||
const valuesMap = new Map<number, number | null>();
|
||||
// Fill missing timestamps with null values
|
||||
processedData.forEach((entry: { values: (number | null)[][] }) => {
|
||||
const existingTimestamps = new Set(
|
||||
(entry?.values ?? []).map((value) => value[0]),
|
||||
);
|
||||
|
||||
for (let j = 0; j < entry.values.length; j++) {
|
||||
const [timestamp, value] = entry.values[j];
|
||||
existingTimestamps.add(timestamp);
|
||||
valuesMap.set(timestamp, normalizePlotValue(value));
|
||||
}
|
||||
const missingTimestamps = Array.from(allTimestampsSet).filter(
|
||||
(timestamp) => !existingTimestamps.has(timestamp),
|
||||
);
|
||||
|
||||
// Find missing timestamps by iterating Set directly (avoid Array.from + filter)
|
||||
const missingTimestamps: number[] = [];
|
||||
const allTimestampsArray = Array.from(allTimestampsSet);
|
||||
for (let k = 0; k < allTimestampsArray.length; k++) {
|
||||
const timestamp = allTimestampsArray[k];
|
||||
if (!existingTimestamps.has(timestamp)) {
|
||||
missingTimestamps.push(timestamp);
|
||||
}
|
||||
}
|
||||
missingTimestamps.forEach((timestamp) => {
|
||||
const value = null;
|
||||
|
||||
// Add missing timestamps to map
|
||||
for (let j = 0; j < missingTimestamps.length; j++) {
|
||||
valuesMap.set(missingTimestamps[j], null);
|
||||
}
|
||||
entry?.values?.push([timestamp, value]);
|
||||
});
|
||||
|
||||
// Build sorted array of values
|
||||
const sortedTimestamps = Array.from(valuesMap.keys()).sort((a, b) => a - b);
|
||||
const yValues = sortedTimestamps.map((timestamp) => {
|
||||
const value = valuesMap.get(timestamp);
|
||||
return value !== undefined ? value : null;
|
||||
});
|
||||
result.push(yValues);
|
||||
}
|
||||
}
|
||||
entry?.values?.forEach((v) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
v[1] = normalizePlotValue(v[1]);
|
||||
});
|
||||
|
||||
return result;
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
entry?.values?.sort((a, b) => a[0] - b[0]);
|
||||
});
|
||||
|
||||
return processedData.map((entry: { values: [number, string][] }) =>
|
||||
entry?.values?.map((value) => value[1]),
|
||||
);
|
||||
}
|
||||
|
||||
function getStackedSeries(val: (number | null)[][]): (number | null)[][] {
|
||||
const series = val ? val.map((row: (number | null)[]) => [...row]) : [];
|
||||
function getStackedSeries(val: any): any {
|
||||
const series = cloneDeep(val) || [];
|
||||
|
||||
for (let i = series.length - 2; i >= 0; i--) {
|
||||
for (let j = 0; j < series[i].length; j++) {
|
||||
series[i][j] = (series[i][j] || 0) + (series[i + 1][j] || 0);
|
||||
series[i][j] += series[i + 1][j];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,7 +110,6 @@ const processAnomalyDetectionData = (
|
||||
queryIndex < anomalyDetectionData.length;
|
||||
queryIndex++
|
||||
) {
|
||||
const queryData = anomalyDetectionData[queryIndex];
|
||||
const {
|
||||
series,
|
||||
predictedSeries,
|
||||
@@ -135,7 +117,7 @@ const processAnomalyDetectionData = (
|
||||
lowerBoundSeries,
|
||||
queryName,
|
||||
legend,
|
||||
} = queryData;
|
||||
} = anomalyDetectionData[queryIndex];
|
||||
|
||||
for (let index = 0; index < series?.length; index++) {
|
||||
const label = getLabelName(
|
||||
@@ -147,30 +129,14 @@ const processAnomalyDetectionData = (
|
||||
const objKey =
|
||||
anomalyDetectionData.length > 1 ? `${queryName}-${label}` : label;
|
||||
|
||||
// Single iteration instead of 5 separate map operations
|
||||
const { values: seriesValues } = series[index];
|
||||
const { values: predictedValues } = predictedSeries[index];
|
||||
const { values: upperBoundValues } = upperBoundSeries[index];
|
||||
const { values: lowerBoundValues } = lowerBoundSeries[index];
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
const length = seriesValues.length;
|
||||
|
||||
const timestamps: number[] = new Array(length);
|
||||
const values: number[] = new Array(length);
|
||||
const predicted: number[] = new Array(length);
|
||||
const upperBound: number[] = new Array(length);
|
||||
const lowerBound: number[] = new Array(length);
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
timestamps[i] = seriesValues[i].timestamp / 1000;
|
||||
values[i] = seriesValues[i].value;
|
||||
predicted[i] = predictedValues[i].value;
|
||||
upperBound[i] = upperBoundValues[i].value;
|
||||
lowerBound[i] = lowerBoundValues[i].value;
|
||||
}
|
||||
|
||||
processedData[objKey] = {
|
||||
data: [timestamps, values, predicted, upperBound, lowerBound],
|
||||
data: [
|
||||
series[index].values.map((v: { timestamp: number }) => v.timestamp / 1000),
|
||||
series[index].values.map((v: { value: number }) => v.value),
|
||||
predictedSeries[index].values.map((v: { value: number }) => v.value),
|
||||
upperBoundSeries[index].values.map((v: { value: number }) => v.value),
|
||||
lowerBoundSeries[index].values.map((v: { value: number }) => v.value),
|
||||
],
|
||||
color: generateColor(
|
||||
objKey,
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
@@ -186,7 +152,14 @@ const processAnomalyDetectionData = (
|
||||
export const getUplotChartDataForAnomalyDetection = (
|
||||
apiResponse: MetricRangePayloadProps,
|
||||
isDarkMode: boolean,
|
||||
): Record<string, { [x: string]: any; data: number[][]; color: string }> => {
|
||||
): Record<
|
||||
string,
|
||||
{
|
||||
[x: string]: any;
|
||||
data: number[][];
|
||||
color: string;
|
||||
}
|
||||
> => {
|
||||
const anomalyDetectionData = apiResponse?.data?.newResult?.data?.result;
|
||||
return processAnomalyDetectionData(anomalyDetectionData, isDarkMode);
|
||||
};
|
||||
|
||||
@@ -20,7 +20,7 @@ const store = createStore(
|
||||
|
||||
export type AppDispatch = typeof store.dispatch;
|
||||
|
||||
if (window !== undefined) {
|
||||
if (window !== undefined && process.env.NODE_ENV === 'development') {
|
||||
window.store = store;
|
||||
}
|
||||
|
||||
|
||||
@@ -170,7 +170,7 @@ const config = {
|
||||
plugins,
|
||||
optimization: {
|
||||
chunkIds: 'named',
|
||||
concatenateModules: false,
|
||||
concatenateModules: true, // Enable module concatenation for better tree-shaking and smaller bundles
|
||||
emitOnErrors: true,
|
||||
flagIncludedChunks: true,
|
||||
innerGraph: true, // tells webpack whether to conduct inner graph analysis for unused exports.
|
||||
@@ -181,6 +181,85 @@ const config = {
|
||||
runtimeChunk: {
|
||||
name: (entrypoint) => `runtime~${entrypoint.name}`,
|
||||
},
|
||||
splitChunks: {
|
||||
chunks: 'all',
|
||||
maxInitialRequests: 30,
|
||||
minSize: 20000,
|
||||
cacheGroups: {
|
||||
// Vendor libraries - React, React-DOM, Redux, Router
|
||||
vendor: {
|
||||
test: /[\\/]node_modules[\\/](react|react-dom|react-router|react-router-dom|react-redux|redux|@reduxjs)[\\/]/,
|
||||
name: 'vendors-react',
|
||||
priority: 30,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// Ant Design icons (separate from core - icons are huge)
|
||||
antdIcons: {
|
||||
test: /[\\/]node_modules[\\/](@ant-design\/icons)[\\/]/,
|
||||
name: 'vendors-antd-icons',
|
||||
priority: 25,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// Ant Design core (without icons) - matches antd and @ant-design but not @ant-design/icons
|
||||
antd: {
|
||||
test: /[\\/]node_modules[\\/](antd|@ant-design(?!\/icons))[\\/]/,
|
||||
name: 'vendors-antd',
|
||||
priority: 20,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// SigNoz UI components
|
||||
signozhq: {
|
||||
test: /[\\/]node_modules[\\/](@signozhq)[\\/]/,
|
||||
name: 'vendors-signozhq',
|
||||
priority: 19,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// Chart libraries
|
||||
charts: {
|
||||
test: /[\\/]node_modules[\\/](uplot|chart\.js|@visx|@tanstack\/react-table|@tanstack\/react-virtual)[\\/]/,
|
||||
name: 'vendors-charts',
|
||||
priority: 18,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// React Query
|
||||
reactQuery: {
|
||||
test: /[\\/]node_modules[\\/](react-query|@tanstack\/react-query)[\\/]/,
|
||||
name: 'vendors-react-query',
|
||||
priority: 17,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// Large utility libraries
|
||||
utilities: {
|
||||
test: /[\\/]node_modules[\\/](lodash-es|@dnd-kit|dayjs|axios|i18next)[\\/]/,
|
||||
name: 'vendors-utilities',
|
||||
priority: 15,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// Monaco editor (very large)
|
||||
monaco: {
|
||||
test: /[\\/]node_modules[\\/](@monaco-editor|monaco-editor)[\\/]/,
|
||||
name: 'vendors-monaco',
|
||||
priority: 16,
|
||||
reuseExistingChunk: true,
|
||||
enforce: true,
|
||||
},
|
||||
// Other vendor libraries
|
||||
common: {
|
||||
test: /[\\/]node_modules[\\/]/,
|
||||
name: 'vendors-common',
|
||||
priority: 10,
|
||||
minChunks: 2,
|
||||
reuseExistingChunk: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
minimizer: [
|
||||
new TerserPlugin({
|
||||
parallel: true,
|
||||
|
||||
@@ -80,17 +80,6 @@ func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, er
|
||||
|
||||
name := r.URL.Query().Get("searchText")
|
||||
|
||||
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
|
||||
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
|
||||
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
|
||||
name = parsedFieldKey.Name
|
||||
fieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
req = telemetrytypes.FieldKeySelector{
|
||||
StartUnixMilli: startUnixMilli,
|
||||
EndUnixMilli: endUnixMilli,
|
||||
@@ -113,16 +102,6 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("name")
|
||||
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
|
||||
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
|
||||
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
|
||||
name = parsedFieldKey.Name
|
||||
keySelector.FieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
keySelector.Name = name
|
||||
existingQuery := r.URL.Query().Get("existingQuery")
|
||||
value := r.URL.Query().Get("searchText")
|
||||
@@ -142,21 +121,3 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
|
||||
|
||||
return &req, nil
|
||||
}
|
||||
|
||||
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
|
||||
if ctx == telemetrytypes.FieldContextResource ||
|
||||
ctx == telemetrytypes.FieldContextAttribute ||
|
||||
ctx == telemetrytypes.FieldContextScope {
|
||||
return true
|
||||
}
|
||||
|
||||
switch signal.StringValue() {
|
||||
case telemetrytypes.SignalLogs.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
|
||||
case telemetrytypes.SignalTraces.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
|
||||
case telemetrytypes.SignalMetrics.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextMetric
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -153,28 +153,10 @@ func NewFormulaEvaluator(expressionStr string, canDefaultZero map[string]bool) (
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse expression")
|
||||
}
|
||||
|
||||
// Normalize canDefaultZero keys to match variable casing from expression
|
||||
normalizedCanDefaultZero := make(map[string]bool)
|
||||
vars := expression.Vars()
|
||||
for _, variable := range vars {
|
||||
// If exact match exists, use it
|
||||
if val, ok := canDefaultZero[variable]; ok {
|
||||
normalizedCanDefaultZero[variable] = val
|
||||
continue
|
||||
}
|
||||
// Otherwise try case-insensitive lookup
|
||||
for k, v := range canDefaultZero {
|
||||
if strings.EqualFold(k, variable) {
|
||||
normalizedCanDefaultZero[variable] = v
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
evaluator := &FormulaEvaluator{
|
||||
expression: expression,
|
||||
variables: vars,
|
||||
canDefaultZero: normalizedCanDefaultZero,
|
||||
variables: expression.Vars(),
|
||||
canDefaultZero: canDefaultZero,
|
||||
aggRefs: make(map[string]aggregationRef),
|
||||
}
|
||||
|
||||
@@ -299,16 +281,6 @@ func (fe *FormulaEvaluator) buildSeriesLookup(timeSeriesData map[string]*TimeSer
|
||||
// We are only interested in the time series data for the queries that are
|
||||
// involved in the formula expression.
|
||||
data, exists := timeSeriesData[aggRef.QueryName]
|
||||
if !exists {
|
||||
// try case-insensitive lookup
|
||||
for k, v := range timeSeriesData {
|
||||
if strings.EqualFold(k, aggRef.QueryName) {
|
||||
data = v
|
||||
exists = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !exists {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -864,158 +864,6 @@ func TestComplexExpression(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseInsensitiveQueryNames(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
tsData map[string]*TimeSeriesData
|
||||
expectedValues []float64
|
||||
}{
|
||||
{
|
||||
name: "lowercase query names",
|
||||
expression: "a / b",
|
||||
tsData: map[string]*TimeSeriesData{
|
||||
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 10}),
|
||||
},
|
||||
}),
|
||||
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 2}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
expectedValues: []float64{5.0},
|
||||
},
|
||||
{
|
||||
name: "mixed case query names",
|
||||
expression: "A / b",
|
||||
tsData: map[string]*TimeSeriesData{
|
||||
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 10}),
|
||||
},
|
||||
}),
|
||||
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 2}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
expectedValues: []float64{5.0},
|
||||
},
|
||||
{
|
||||
name: "uppercase query names with lowercase data keys",
|
||||
expression: "A / B",
|
||||
tsData: map[string]*TimeSeriesData{
|
||||
"a": createFormulaTestTimeSeriesData("a", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 10}),
|
||||
},
|
||||
}),
|
||||
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 2}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
expectedValues: []float64{5.0},
|
||||
},
|
||||
{
|
||||
name: "all lowercase",
|
||||
expression: "a/b",
|
||||
tsData: map[string]*TimeSeriesData{
|
||||
"a": createFormulaTestTimeSeriesData("a", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 100}),
|
||||
},
|
||||
}),
|
||||
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 10}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
expectedValues: []float64{10.0},
|
||||
},
|
||||
{
|
||||
name: "complex expression with mixed case",
|
||||
expression: "a + B * c",
|
||||
tsData: map[string]*TimeSeriesData{
|
||||
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 5}),
|
||||
},
|
||||
}),
|
||||
"b": createFormulaTestTimeSeriesData("b", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 3}),
|
||||
},
|
||||
}),
|
||||
"C": createFormulaTestTimeSeriesData("C", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{1: 2}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
expectedValues: []float64{11.0}, // 5 + 3 * 2 = 11
|
||||
},
|
||||
{
|
||||
name: "lowercase variables with default zero missing point",
|
||||
expression: "a + b",
|
||||
tsData: map[string]*TimeSeriesData{
|
||||
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{
|
||||
1: 10,
|
||||
2: 20,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
|
||||
{
|
||||
Labels: createLabels(map[string]string{}),
|
||||
Values: createValues(map[int64]float64{
|
||||
1: 5,
|
||||
}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
expectedValues: []float64{15.0, 20.0}, // t1: 10+5, t2: 20+0
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
evaluator, err := NewFormulaEvaluator(tt.expression, map[string]bool{"a": true, "A": true, "b": true, "B": true, "c": true, "C": true})
|
||||
require.NoError(t, err)
|
||||
|
||||
result, err := evaluator.EvaluateFormula(tt.tsData)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result)
|
||||
|
||||
assert.Equal(t, 1, len(result), "should have exactly one result series")
|
||||
assert.Equal(t, len(tt.expectedValues), len(result[0].Values), "should match expected number of values")
|
||||
for i, v := range tt.expectedValues {
|
||||
assert.InDelta(t, v, result[0].Values[i].Value, 0.0001, "value at index %d should match", i)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAbsValueExpression(t *testing.T) {
|
||||
tsData := map[string]*TimeSeriesData{
|
||||
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
|
||||
|
||||
@@ -67,7 +67,6 @@ def test_logs_list(
|
||||
"code.file": "/opt/integration.go",
|
||||
"code.function": "com.example.Integration.process",
|
||||
"code.line": 120,
|
||||
"metric.domain_id": "d-001",
|
||||
"telemetry.sdk.language": "go",
|
||||
},
|
||||
body="This is a log message, coming from a go application",
|
||||
@@ -142,7 +141,6 @@ def test_logs_list(
|
||||
"code.function": "com.example.Integration.process",
|
||||
"log.iostream": "stdout",
|
||||
"logtag": "F",
|
||||
"metric.domain_id": "d-001",
|
||||
"telemetry.sdk.language": "go",
|
||||
}
|
||||
assert rows[0]["data"]["attributes_number"] == {"code.line": 120}
|
||||
@@ -310,86 +308,6 @@ def test_logs_list(
|
||||
assert len(values) == 1
|
||||
assert 120 in values
|
||||
|
||||
# Query keys from the fields API with context specified in the key
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/fields/keys"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
params={
|
||||
"signal": "logs",
|
||||
"searchText": "resource.servic",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
keys = response.json()["data"]["keys"]
|
||||
assert "service.name" in keys
|
||||
assert any(k["fieldContext"] == "resource" for k in keys["service.name"])
|
||||
|
||||
# Do not treat `metric.` as a context prefix for logs
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/fields/keys"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
params={
|
||||
"signal": "logs",
|
||||
"searchText": "metric.do",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
keys = response.json()["data"]["keys"]
|
||||
assert "metric.domain_id" in keys
|
||||
|
||||
# Query values of service.name resource attribute using context-prefixed key
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
params={
|
||||
"signal": "logs",
|
||||
"name": "resource.service.name",
|
||||
"searchText": "",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = response.json()["data"]["values"]["stringValues"]
|
||||
assert "go" in values
|
||||
assert "java" in values
|
||||
|
||||
# Query values of metric.domain_id (string attribute) and ensure context collision doesn't break it
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
params={
|
||||
"signal": "logs",
|
||||
"name": "metric.domain_id",
|
||||
"searchText": "",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = response.json()["data"]["values"]["stringValues"]
|
||||
assert "d-001" in values
|
||||
|
||||
|
||||
def test_logs_time_series_count(
|
||||
signoz: types.SigNoz,
|
||||
|
||||
@@ -373,43 +373,3 @@ def test_traces_list(
|
||||
assert len(values) == 2
|
||||
|
||||
assert set(values) == set(["POST", "PATCH"])
|
||||
|
||||
# Query keys from the fields API with context specified in the key
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/fields/keys"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
params={
|
||||
"signal": "traces",
|
||||
"searchText": "resource.servic",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
keys = response.json()["data"]["keys"]
|
||||
assert "service.name" in keys
|
||||
assert any(k["fieldContext"] == "resource" for k in keys["service.name"])
|
||||
|
||||
# Query values of service.name resource attribute using context-prefixed key
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"authorization": f"Bearer {token}",
|
||||
},
|
||||
params={
|
||||
"signal": "traces",
|
||||
"name": "resource.service.name",
|
||||
"searchText": "",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = response.json()["data"]["values"]["stringValues"]
|
||||
assert set(values) == set(["topic-service", "http-service"])
|
||||
|
||||
Reference in New Issue
Block a user