mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-28 04:22:12 +00:00
Compare commits
18 Commits
json
...
feat/servi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
634e8923c7 | ||
|
|
69855a1136 | ||
|
|
cc1513c7e3 | ||
|
|
ba60d9ee50 | ||
|
|
261622616f | ||
|
|
592f5a7e11 | ||
|
|
dc390c813d | ||
|
|
ba6690cddb | ||
|
|
3a45ae12d1 | ||
|
|
5aef65fc11 | ||
|
|
efd2d961d0 | ||
|
|
ec618a00ce | ||
|
|
c896753d3a | ||
|
|
cc616602eb | ||
|
|
255847ac61 | ||
|
|
a1e4461865 | ||
|
|
146fd9892b | ||
|
|
b9ecdcf210 |
1
Makefile
1
Makefile
@@ -101,6 +101,7 @@ go-test: ## Runs go unit tests
|
||||
.PHONY: go-run-community
|
||||
go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
USE_SPAN_METRICS=true \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
import { OPERATORS } from 'constants/queryBuilder';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
DataSource,
|
||||
MetricAggregateOperator,
|
||||
QueryBuilderData,
|
||||
} from 'types/common/queryBuilder';
|
||||
|
||||
import {
|
||||
GraphTitle,
|
||||
KeyOperationTableHeader,
|
||||
MetricsType,
|
||||
WidgetKeys,
|
||||
} from '../constant';
|
||||
import { TopOperationQueryFactoryProps } from '../Tabs/types';
|
||||
import { getQueryBuilderQuerieswithFormula } from './MetricsPageQueriesFactory';
|
||||
|
||||
export const topOperationQueries = ({
|
||||
servicename,
|
||||
dotMetricsEnabled,
|
||||
}: TopOperationQueryFactoryProps): QueryBuilderData => {
|
||||
const latencyAutoCompleteData: BaseAutocompleteData = {
|
||||
key: dotMetricsEnabled
|
||||
? WidgetKeys.Signoz_latency_bucket
|
||||
: WidgetKeys.Signoz_latency_bucket_norm,
|
||||
dataType: DataTypes.Float64,
|
||||
type: '',
|
||||
};
|
||||
|
||||
const errorRateAutoCompleteData: BaseAutocompleteData = {
|
||||
key: WidgetKeys.SignozCallsTotal,
|
||||
dataType: DataTypes.Float64,
|
||||
type: '',
|
||||
};
|
||||
|
||||
const numOfCallAutoCompleteData: BaseAutocompleteData = {
|
||||
key: dotMetricsEnabled
|
||||
? WidgetKeys.SignozLatencyCount
|
||||
: WidgetKeys.SignozLatencyCountNorm,
|
||||
dataType: DataTypes.Float64,
|
||||
type: '',
|
||||
};
|
||||
|
||||
const latencyAndNumberOfCallAdditionalItems: TagFilterItem[] = [
|
||||
{
|
||||
id: '',
|
||||
key: {
|
||||
key: dotMetricsEnabled
|
||||
? WidgetKeys.Service_name
|
||||
: WidgetKeys.Service_name_norm,
|
||||
dataType: DataTypes.String,
|
||||
type: MetricsType.Resource,
|
||||
},
|
||||
value: [servicename],
|
||||
op: OPERATORS.IN,
|
||||
},
|
||||
];
|
||||
|
||||
const errorRateAdditionalItemsA: TagFilterItem[] = [
|
||||
{
|
||||
id: '',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: dotMetricsEnabled
|
||||
? WidgetKeys.Service_name
|
||||
: WidgetKeys.Service_name_norm,
|
||||
type: MetricsType.Resource,
|
||||
},
|
||||
op: OPERATORS.IN,
|
||||
value: [servicename],
|
||||
},
|
||||
{
|
||||
id: '',
|
||||
key: {
|
||||
dataType: DataTypes.Int64,
|
||||
key: dotMetricsEnabled ? WidgetKeys.StatusCode : WidgetKeys.StatusCodeNorm,
|
||||
type: MetricsType.Tag,
|
||||
},
|
||||
op: OPERATORS.IN,
|
||||
value: ['STATUS_CODE_ERROR'],
|
||||
},
|
||||
];
|
||||
|
||||
const errorRateAdditionalItemsB = latencyAndNumberOfCallAdditionalItems;
|
||||
|
||||
const groupBy: BaseAutocompleteData[] = [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: WidgetKeys.Operation,
|
||||
type: MetricsType.Tag,
|
||||
},
|
||||
];
|
||||
|
||||
const autocompleteData = [
|
||||
latencyAutoCompleteData,
|
||||
latencyAutoCompleteData,
|
||||
latencyAutoCompleteData,
|
||||
errorRateAutoCompleteData,
|
||||
errorRateAutoCompleteData,
|
||||
numOfCallAutoCompleteData,
|
||||
];
|
||||
const additionalItems = [
|
||||
latencyAndNumberOfCallAdditionalItems,
|
||||
latencyAndNumberOfCallAdditionalItems,
|
||||
latencyAndNumberOfCallAdditionalItems,
|
||||
errorRateAdditionalItemsA,
|
||||
errorRateAdditionalItemsB,
|
||||
latencyAndNumberOfCallAdditionalItems,
|
||||
];
|
||||
const disabled = [false, false, false, true, true, false];
|
||||
const legends = [
|
||||
KeyOperationTableHeader.P50,
|
||||
KeyOperationTableHeader.P90,
|
||||
KeyOperationTableHeader.P99,
|
||||
KeyOperationTableHeader.ERROR_RATE,
|
||||
KeyOperationTableHeader.ERROR_RATE,
|
||||
KeyOperationTableHeader.NUM_OF_CALLS,
|
||||
];
|
||||
const timeAggregateOperators = [
|
||||
MetricAggregateOperator.EMPTY,
|
||||
MetricAggregateOperator.EMPTY,
|
||||
MetricAggregateOperator.EMPTY,
|
||||
MetricAggregateOperator.RATE,
|
||||
MetricAggregateOperator.RATE,
|
||||
MetricAggregateOperator.RATE,
|
||||
];
|
||||
const spaceAggregateOperators = [
|
||||
MetricAggregateOperator.P50,
|
||||
MetricAggregateOperator.P90,
|
||||
MetricAggregateOperator.P99,
|
||||
MetricAggregateOperator.SUM,
|
||||
MetricAggregateOperator.SUM,
|
||||
MetricAggregateOperator.SUM,
|
||||
];
|
||||
const expressions = ['D*100/E'];
|
||||
const legendFormulas = [GraphTitle.ERROR_PERCENTAGE];
|
||||
const dataSource = DataSource.METRICS;
|
||||
|
||||
return getQueryBuilderQuerieswithFormula({
|
||||
autocompleteData,
|
||||
additionalItems,
|
||||
disabled,
|
||||
legends,
|
||||
timeAggregateOperators,
|
||||
spaceAggregateOperators,
|
||||
expressions,
|
||||
legendFormulas,
|
||||
dataSource,
|
||||
groupBy,
|
||||
});
|
||||
};
|
||||
@@ -46,7 +46,6 @@ import GraphControlsPanel from './Overview/GraphControlsPanel/GraphControlsPanel
|
||||
import ServiceOverview from './Overview/ServiceOverview';
|
||||
import TopLevelOperation from './Overview/TopLevelOperations';
|
||||
import TopOperation from './Overview/TopOperation';
|
||||
import TopOperationMetrics from './Overview/TopOperationMetrics';
|
||||
import { Button, Card } from './styles';
|
||||
import { IServiceName } from './types';
|
||||
import {
|
||||
@@ -72,10 +71,6 @@ function Application(): JSX.Element {
|
||||
const urlQuery = useUrlQuery();
|
||||
|
||||
const { featureFlags } = useAppContext();
|
||||
const isSpanMetricEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.USE_SPAN_METRICS)
|
||||
?.active || false;
|
||||
|
||||
const handleSetTimeStamp = useCallback((selectTime: number) => {
|
||||
setSelectedTimeStamp(selectTime);
|
||||
}, []);
|
||||
@@ -396,7 +391,7 @@ function Application(): JSX.Element {
|
||||
|
||||
<Col span={12}>
|
||||
<Card>
|
||||
{isSpanMetricEnabled ? <TopOperationMetrics /> : <TopOperation />}{' '}
|
||||
<TopOperation />
|
||||
</Card>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { topOperationMetricsDownloadOptions } from 'container/MetricsApplication/constant';
|
||||
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
|
||||
import { topOperationQueries } from 'container/MetricsApplication/MetricsPageQueries/TopOperationQueries';
|
||||
import { QueryTable } from 'container/QueryTable';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import useResourceAttribute from 'hooks/useResourceAttribute';
|
||||
import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { ReactNode, useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import { FeatureKeys } from '../../../../constants/features';
|
||||
import { useAppContext } from '../../../../providers/App/App';
|
||||
import { IServiceName } from '../types';
|
||||
import { title } from './config';
|
||||
import ColumnWithLink from './TableRenderer/ColumnWithLink';
|
||||
import { getTableColumnRenderer } from './TableRenderer/TableColumnRenderer';
|
||||
|
||||
function TopOperationMetrics(): JSX.Element {
|
||||
const { servicename: encodedServiceName } = useParams<IServiceName>();
|
||||
const servicename = decodeURIComponent(encodedServiceName);
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const { queries } = useResourceAttribute();
|
||||
|
||||
const selectedTraceTags = JSON.stringify(
|
||||
convertRawQueriesToTraceSelectedTags(queries) || [],
|
||||
);
|
||||
|
||||
const { featureFlags } = useAppContext();
|
||||
const dotMetricsEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.DOT_METRICS_ENABLED)
|
||||
?.active || false;
|
||||
|
||||
const keyOperationWidget = useMemo(
|
||||
() =>
|
||||
getWidgetQueryBuilder({
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
promql: [],
|
||||
builder: topOperationQueries({
|
||||
servicename,
|
||||
dotMetricsEnabled,
|
||||
}),
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
},
|
||||
panelTypes: PANEL_TYPES.TABLE,
|
||||
}),
|
||||
[servicename, dotMetricsEnabled],
|
||||
);
|
||||
|
||||
const updatedQuery = updateStepInterval(keyOperationWidget.query);
|
||||
|
||||
const isEmptyWidget = keyOperationWidget.id === PANEL_TYPES.EMPTY_WIDGET;
|
||||
|
||||
const { data, isLoading } = useGetQueryRange(
|
||||
{
|
||||
selectedTime: keyOperationWidget?.timePreferance,
|
||||
graphType: keyOperationWidget?.panelTypes,
|
||||
query: updatedQuery,
|
||||
globalSelectedInterval,
|
||||
variables: {},
|
||||
},
|
||||
ENTITY_VERSION_V4,
|
||||
{
|
||||
queryKey: [
|
||||
`GetMetricsQueryRange-${keyOperationWidget?.timePreferance}-${globalSelectedInterval}-${keyOperationWidget?.id}`,
|
||||
keyOperationWidget,
|
||||
maxTime,
|
||||
minTime,
|
||||
globalSelectedInterval,
|
||||
],
|
||||
keepPreviousData: true,
|
||||
enabled: !isEmptyWidget,
|
||||
refetchOnMount: false,
|
||||
onError: (error) => {
|
||||
notifications.error({ message: error.message });
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const queryTableData = data?.payload?.data?.newResult?.data?.result || [];
|
||||
|
||||
const renderColumnCell = useMemo(
|
||||
() =>
|
||||
getTableColumnRenderer({
|
||||
columnName: 'operation',
|
||||
renderFunction: (record: RowData): ReactNode => (
|
||||
<ColumnWithLink
|
||||
servicename={servicename}
|
||||
minTime={minTime}
|
||||
maxTime={maxTime}
|
||||
selectedTraceTags={selectedTraceTags}
|
||||
record={record}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
[servicename, minTime, maxTime, selectedTraceTags],
|
||||
);
|
||||
|
||||
return (
|
||||
<QueryTable
|
||||
title={title}
|
||||
query={updatedQuery}
|
||||
queryTableData={queryTableData}
|
||||
loading={isLoading}
|
||||
renderColumnCell={renderColumnCell}
|
||||
downloadOption={topOperationMetricsDownloadOptions}
|
||||
sticky
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default TopOperationMetrics;
|
||||
@@ -1,6 +1,5 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
|
||||
import { DownloadOptions } from 'container/Download/Download.types';
|
||||
import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
|
||||
import {
|
||||
MetricAggregateOperator,
|
||||
@@ -107,11 +106,6 @@ export enum WidgetKeys {
|
||||
Db_system_norm = 'db_system',
|
||||
}
|
||||
|
||||
export const topOperationMetricsDownloadOptions: DownloadOptions = {
|
||||
isDownloadEnabled: true,
|
||||
fileName: 'top-operation',
|
||||
} as const;
|
||||
|
||||
export const SERVICE_CHART_ID = {
|
||||
latency: 'SERVICE_OVERVIEW_LATENCY',
|
||||
error: 'SERVICE_OVERVIEW_ERROR',
|
||||
|
||||
@@ -1,22 +1,14 @@
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
|
||||
import ServiceMetrics from './ServiceMetrics';
|
||||
import ServiceTraces from './ServiceTraces';
|
||||
import { Container } from './styles';
|
||||
|
||||
function Services(): JSX.Element {
|
||||
const { featureFlags } = useAppContext();
|
||||
const isSpanMetricEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.USE_SPAN_METRICS)
|
||||
?.active || false;
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<Container style={{ marginTop: 0 }}>
|
||||
{isSpanMetricEnabled ? <ServiceMetrics /> : <ServiceTraces />}
|
||||
<ServiceTraces />
|
||||
</Container>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
|
||||
2
go.mod
2
go.mod
@@ -11,7 +11,6 @@ require (
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
@@ -90,6 +89,7 @@ require (
|
||||
|
||||
require (
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
|
||||
@@ -61,7 +61,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
response := []promotetypes.PromotePath{}
|
||||
for _, path := range promotedPaths {
|
||||
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
item := promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Promote: true,
|
||||
@@ -77,7 +77,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Indexes: indexes,
|
||||
|
||||
@@ -3,16 +3,19 @@ package implservices
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"math"
|
||||
"sort"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/services"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/servicetypes/servicetypesv1"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -74,10 +77,25 @@ func (m *module) Get(ctx context.Context, orgUUID valuer.UUID, req *servicetypes
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
|
||||
}
|
||||
|
||||
// Prepare phase
|
||||
queryRangeReq, startMs, endMs, err := m.buildQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var (
|
||||
startMs uint64
|
||||
endMs uint64
|
||||
err error
|
||||
queryRangeReq *qbtypes.QueryRangeRequest
|
||||
)
|
||||
// Prefer span metrics path when enabled via flag or explicit override
|
||||
// TODO(nikhilmantri0902): the following constant should be read from the en variable in this module itself.
|
||||
useSpanMetrics := constants.PreferSpanMetrics
|
||||
if useSpanMetrics {
|
||||
queryRangeReq, startMs, endMs, err = m.buildSpanMetricsQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
queryRangeReq, startMs, endMs, err = m.buildQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch phase
|
||||
@@ -87,7 +105,14 @@ func (m *module) Get(ctx context.Context, orgUUID valuer.UUID, req *servicetypes
|
||||
}
|
||||
|
||||
// Process phase
|
||||
items, serviceNames := m.mapQueryRangeRespToServices(resp, startMs, endMs)
|
||||
var items []*servicetypesv1.ResponseItem
|
||||
var serviceNames []string
|
||||
if useSpanMetrics {
|
||||
items, serviceNames = m.mapSpanMetricsRespToServices(resp, startMs, endMs)
|
||||
} else {
|
||||
items, serviceNames = m.mapQueryRangeRespToServices(resp, startMs, endMs)
|
||||
}
|
||||
|
||||
if len(items) == 0 {
|
||||
return []*servicetypesv1.ResponseItem{}, nil
|
||||
}
|
||||
@@ -108,9 +133,22 @@ func (m *module) GetTopOperations(ctx context.Context, orgUUID valuer.UUID, req
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
|
||||
}
|
||||
|
||||
qr, err := m.buildTopOpsQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var (
|
||||
qr *qbtypes.QueryRangeRequest
|
||||
err error
|
||||
)
|
||||
// Prefer span metrics path when enabled via flag
|
||||
useSpanMetrics := constants.PreferSpanMetrics
|
||||
if useSpanMetrics {
|
||||
qr, err = m.buildSpanMetricsTopOpsQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
qr, err = m.buildTopOpsQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := m.executeQuery(ctx, orgUUID, qr)
|
||||
@@ -118,7 +156,17 @@ func (m *module) GetTopOperations(ctx context.Context, orgUUID valuer.UUID, req
|
||||
return nil, err
|
||||
}
|
||||
|
||||
items := m.mapTopOpsQueryRangeResp(resp)
|
||||
var items []servicetypesv1.OperationItem
|
||||
if useSpanMetrics {
|
||||
items = m.mapSpanMetricsTopOpsResp(resp)
|
||||
// Apply limit after merging multiple queries
|
||||
if req.Limit > 0 && len(items) > req.Limit {
|
||||
items = items[:req.Limit]
|
||||
}
|
||||
} else {
|
||||
items = m.mapTopOpsQueryRangeResp(resp)
|
||||
}
|
||||
|
||||
return items, nil
|
||||
}
|
||||
|
||||
@@ -128,9 +176,22 @@ func (m *module) GetEntryPointOperations(ctx context.Context, orgUUID valuer.UUI
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
|
||||
}
|
||||
|
||||
qr, err := m.buildEntryPointOpsQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var (
|
||||
qr *qbtypes.QueryRangeRequest
|
||||
err error
|
||||
)
|
||||
// Prefer span metrics path when enabled via flag
|
||||
useSpanMetrics := constants.PreferSpanMetrics
|
||||
if useSpanMetrics {
|
||||
qr, err = m.buildSpanMetricsEntryPointOpsQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
qr, err = m.buildEntryPointOpsQueryRangeRequest(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := m.executeQuery(ctx, orgUUID, qr)
|
||||
@@ -138,7 +199,17 @@ func (m *module) GetEntryPointOperations(ctx context.Context, orgUUID valuer.UUI
|
||||
return nil, err
|
||||
}
|
||||
|
||||
items := m.mapEntryPointOpsQueryRangeResp(resp)
|
||||
var items []servicetypesv1.OperationItem
|
||||
if useSpanMetrics {
|
||||
items = m.mapSpanMetricsEntryPointOpsResp(resp)
|
||||
// Apply limit after merging multiple queries
|
||||
if req.Limit > 0 && len(items) > req.Limit {
|
||||
items = items[:req.Limit]
|
||||
}
|
||||
} else {
|
||||
items = m.mapEntryPointOpsQueryRangeResp(resp)
|
||||
}
|
||||
|
||||
return items, nil
|
||||
}
|
||||
|
||||
@@ -211,6 +282,162 @@ func (m *module) buildQueryRangeRequest(req *servicetypesv1.Request) (*qbtypes.Q
|
||||
return &reqV5, startMs, endMs, nil
|
||||
}
|
||||
|
||||
// buildSpanMetricsQueryRangeRequest constructs span-metrics queries for services.
|
||||
func (m *module) buildSpanMetricsQueryRangeRequest(req *servicetypesv1.Request) (*qbtypes.QueryRangeRequest, uint64, uint64, error) {
|
||||
// base filters from request
|
||||
// Parse start/end (nanoseconds) from strings and convert to milliseconds for QBv5
|
||||
startNs, err := strconv.ParseUint(req.Start, 10, 64)
|
||||
if err != nil {
|
||||
return nil, 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time: %v", err)
|
||||
}
|
||||
endNs, err := strconv.ParseUint(req.End, 10, 64)
|
||||
if err != nil {
|
||||
return nil, 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time: %v", err)
|
||||
}
|
||||
if startNs >= endNs {
|
||||
return nil, 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "start must be before end")
|
||||
}
|
||||
if err := validateTagFilterItems(req.Tags); err != nil {
|
||||
return nil, 0, 0, err
|
||||
}
|
||||
|
||||
startMs := startNs / 1_000_000
|
||||
endMs := endNs / 1_000_000
|
||||
|
||||
filterExpr, variables := buildFilterExpression(req.Tags)
|
||||
|
||||
// enforce top-level scope via synthetic field
|
||||
scopeExpr := "isTopLevelOperation = 'true'"
|
||||
if filterExpr != "" {
|
||||
filterExpr = "(" + filterExpr + ") AND (" + scopeExpr + ")"
|
||||
} else {
|
||||
filterExpr = scopeExpr
|
||||
}
|
||||
|
||||
// Build error filter for num_errors query
|
||||
var errorFilterExpr string
|
||||
if filterExpr != "" {
|
||||
errorFilterExpr = "(" + filterExpr + ") AND (status.code = 'STATUS_CODE_ERROR')"
|
||||
} else {
|
||||
errorFilterExpr = "status.code = 'STATUS_CODE_ERROR'"
|
||||
}
|
||||
|
||||
// common groupBy on service.name
|
||||
groupByService := []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute, // aligns with working payload
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
}},
|
||||
}
|
||||
|
||||
queries := []qbtypes.QueryEnvelope{
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "p99_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByService,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.bucket",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile99,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "avg_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByService,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.sum",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "num_calls",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByService,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_calls_total",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "num_errors",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: errorFilterExpr},
|
||||
GroupBy: groupByService,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_calls_total",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "num_4xx",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
// TODO: fix this, below we should add filter for 4xx http status codes
|
||||
Filter: &qbtypes.Filter{Expression: errorFilterExpr},
|
||||
GroupBy: groupByService,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_calls_total",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
reqV5 := qbtypes.QueryRangeRequest{
|
||||
Start: startMs,
|
||||
End: endMs,
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
Variables: variables,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: queries,
|
||||
},
|
||||
FormatOptions: &qbtypes.FormatOptions{
|
||||
FormatTableResultForUI: true,
|
||||
FillGaps: false,
|
||||
},
|
||||
}
|
||||
|
||||
return &reqV5, startMs, endMs, nil
|
||||
}
|
||||
|
||||
// executeQuery calls the underlying Querier with the provided request.
|
||||
func (m *module) executeQuery(ctx context.Context, orgUUID valuer.UUID, qr *qbtypes.QueryRangeRequest) (*qbtypes.QueryRangeResponse, error) {
|
||||
return m.Querier.QueryRange(ctx, orgUUID, qr)
|
||||
@@ -227,6 +454,7 @@ func (m *module) mapQueryRangeRespToServices(resp *qbtypes.QueryRangeResponse, s
|
||||
return []*servicetypesv1.ResponseItem{}, []string{}
|
||||
}
|
||||
|
||||
// traces path (original behavior)
|
||||
// this stores the index at which service name is found in the response
|
||||
serviceNameRespIndex := -1
|
||||
aggIndexMappings := map[int]int{}
|
||||
@@ -285,6 +513,97 @@ func (m *module) mapQueryRangeRespToServices(resp *qbtypes.QueryRangeResponse, s
|
||||
return out, serviceNames
|
||||
}
|
||||
|
||||
// TODO(nikhilmantri0902): add test cases for the functions in this PR
|
||||
// mapSpanMetricsRespToServices merges span-metrics scalar results keyed by service.name using queryName for aggregation mapping.
|
||||
func (m *module) mapSpanMetricsRespToServices(resp *qbtypes.QueryRangeResponse, startMs, endMs uint64) ([]*servicetypesv1.ResponseItem, []string) {
|
||||
// TODO(nikhilmantri0902, in case of nil response, should we return nil directly from here for both values)
|
||||
if resp == nil || len(resp.Data.Results) == 0 {
|
||||
return []*servicetypesv1.ResponseItem{}, []string{}
|
||||
}
|
||||
sd, ok := resp.Data.Results[0].(*qbtypes.ScalarData)
|
||||
if !ok || sd == nil {
|
||||
return []*servicetypesv1.ResponseItem{}, []string{}
|
||||
}
|
||||
// locate service.name column and aggregation columns by queryName
|
||||
serviceNameRespIndex := -1
|
||||
aggCols := make(map[string]int)
|
||||
for i, c := range sd.Columns {
|
||||
switch c.Type {
|
||||
case qbtypes.ColumnTypeGroup:
|
||||
if c.Name == "service.name" {
|
||||
serviceNameRespIndex = i
|
||||
}
|
||||
case qbtypes.ColumnTypeAggregation:
|
||||
if c.QueryName != "" {
|
||||
aggCols[c.QueryName] = i
|
||||
}
|
||||
}
|
||||
}
|
||||
if serviceNameRespIndex == -1 {
|
||||
return []*servicetypesv1.ResponseItem{}, []string{}
|
||||
}
|
||||
|
||||
type agg struct {
|
||||
p99 float64
|
||||
avg float64
|
||||
callRate float64
|
||||
errorRate float64
|
||||
fourxxRate float64
|
||||
}
|
||||
perSvc := make(map[string]*agg)
|
||||
|
||||
for _, row := range sd.Data {
|
||||
svcName := fmt.Sprintf("%v", row[serviceNameRespIndex])
|
||||
a := perSvc[svcName]
|
||||
if a == nil {
|
||||
a = &agg{}
|
||||
perSvc[svcName] = a
|
||||
}
|
||||
for qn, idx := range aggCols {
|
||||
val := toFloat(row, idx)
|
||||
switch qn {
|
||||
case "p99_latency":
|
||||
a.p99 = val * math.Pow(10, 6)
|
||||
case "avg_latency":
|
||||
a.avg = val * math.Pow(10, 6)
|
||||
case "num_calls":
|
||||
a.callRate = val
|
||||
case "num_errors":
|
||||
a.errorRate = val
|
||||
case "num_4xx":
|
||||
a.fourxxRate = val
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out := make([]*servicetypesv1.ResponseItem, 0, len(perSvc))
|
||||
serviceNames := make([]string, 0, len(perSvc))
|
||||
for svcName, a := range perSvc {
|
||||
// a.calls is already a rate (calls/second) from TimeAggregationRate, no need to divide by periodSeconds
|
||||
errorRate := 0.0
|
||||
if a.callRate > 0 {
|
||||
errorRate = a.errorRate * 100 / a.callRate
|
||||
}
|
||||
fourXXRate := 0.0
|
||||
if a.callRate > 0 {
|
||||
fourXXRate = a.fourxxRate * 100 / a.callRate
|
||||
}
|
||||
|
||||
out = append(out, &servicetypesv1.ResponseItem{
|
||||
ServiceName: svcName,
|
||||
Percentile99: a.p99,
|
||||
AvgDuration: a.avg,
|
||||
CallRate: a.callRate,
|
||||
ErrorRate: errorRate,
|
||||
FourXXRate: fourXXRate,
|
||||
DataWarning: servicetypesv1.DataWarning{TopLevelOps: []string{}},
|
||||
})
|
||||
serviceNames = append(serviceNames, svcName)
|
||||
}
|
||||
|
||||
return out, serviceNames
|
||||
}
|
||||
|
||||
// attachTopLevelOps fetches top-level ops from TelemetryStore and attaches them to items.
|
||||
func (m *module) attachTopLevelOps(ctx context.Context, serviceNames []string, startMs uint64, items []*servicetypesv1.ResponseItem) error {
|
||||
startTime := time.UnixMilli(int64(startMs)).UTC()
|
||||
@@ -404,6 +723,546 @@ func (m *module) mapTopOpsQueryRangeResp(resp *qbtypes.QueryRangeResponse) []ser
|
||||
return out
|
||||
}
|
||||
|
||||
// buildSpanMetricsTopOpsQueryRangeRequest constructs span-metrics queries for top operations.
|
||||
func (m *module) buildSpanMetricsTopOpsQueryRangeRequest(req *servicetypesv1.OperationsRequest) (*qbtypes.QueryRangeRequest, error) {
|
||||
if req.Service == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "service is required")
|
||||
}
|
||||
startNs, err := strconv.ParseUint(req.Start, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time: %v", err)
|
||||
}
|
||||
endNs, err := strconv.ParseUint(req.End, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time: %v", err)
|
||||
}
|
||||
if startNs >= endNs {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "start must be before end")
|
||||
}
|
||||
if req.Limit < 1 || req.Limit > 5000 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000")
|
||||
}
|
||||
if err := validateTagFilterItems(req.Tags); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
startMs := startNs / 1_000_000
|
||||
endMs := endNs / 1_000_000
|
||||
|
||||
// Build service filter
|
||||
serviceTag := servicetypesv1.TagFilterItem{
|
||||
Key: "service.name",
|
||||
Operator: "in",
|
||||
StringValues: []string{req.Service},
|
||||
}
|
||||
tags := append([]servicetypesv1.TagFilterItem{serviceTag}, req.Tags...)
|
||||
filterExpr, variables := buildFilterExpression(tags)
|
||||
|
||||
// Build error filter for num_errors query
|
||||
var errorFilterExpr string
|
||||
if filterExpr != "" {
|
||||
errorFilterExpr = "(" + filterExpr + ") AND (status.code = 'STATUS_CODE_ERROR')"
|
||||
} else {
|
||||
errorFilterExpr = "status.code = 'STATUS_CODE_ERROR'"
|
||||
}
|
||||
|
||||
// Common groupBy on operation
|
||||
groupByOperation := []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "operation",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
}},
|
||||
}
|
||||
|
||||
queries := []qbtypes.QueryEnvelope{
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "p50_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.bucket",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile50,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "p95_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.bucket",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "p99_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.bucket",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile99,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "num_calls",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_calls_total",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationIncrease,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "num_errors",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: errorFilterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_calls_total",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationIncrease,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
reqV5 := qbtypes.QueryRangeRequest{
|
||||
Start: startMs,
|
||||
End: endMs,
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
Variables: variables,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: queries,
|
||||
},
|
||||
}
|
||||
|
||||
return &reqV5, nil
|
||||
}
|
||||
|
||||
// mapSpanMetricsTopOpsResp maps span-metrics scalar results to OperationItem array using queryName for aggregation mapping.
|
||||
func (m *module) mapSpanMetricsTopOpsResp(resp *qbtypes.QueryRangeResponse) []servicetypesv1.OperationItem {
|
||||
if resp == nil || len(resp.Data.Results) == 0 {
|
||||
return []servicetypesv1.OperationItem{}
|
||||
}
|
||||
|
||||
// Group data by operation name and merge aggregations from all results
|
||||
type agg struct {
|
||||
p50 float64
|
||||
p95 float64
|
||||
p99 float64
|
||||
calls uint64
|
||||
errors uint64
|
||||
}
|
||||
perOp := make(map[string]*agg)
|
||||
|
||||
// Iterate through all results (each query returns a separate ScalarData)
|
||||
for _, result := range resp.Data.Results {
|
||||
sd, ok := result.(*qbtypes.ScalarData)
|
||||
if !ok || sd == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip empty results
|
||||
if len(sd.Columns) == 0 || len(sd.Data) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find operation column index (should be consistent across all results)
|
||||
operationIdx := -1
|
||||
for i, c := range sd.Columns {
|
||||
if c.Type == qbtypes.ColumnTypeGroup && c.Name == "operation" {
|
||||
operationIdx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if operationIdx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find aggregation column index
|
||||
aggIdx := -1
|
||||
for i, c := range sd.Columns {
|
||||
if c.Type == qbtypes.ColumnTypeAggregation {
|
||||
aggIdx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if aggIdx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Process each row in this result and merge by operation name
|
||||
queryName := sd.QueryName
|
||||
for _, row := range sd.Data {
|
||||
if len(row) <= operationIdx || len(row) <= aggIdx {
|
||||
continue
|
||||
}
|
||||
|
||||
opName := fmt.Sprintf("%v", row[operationIdx])
|
||||
val := toFloat(row, aggIdx)
|
||||
|
||||
a := perOp[opName]
|
||||
if a == nil {
|
||||
a = &agg{}
|
||||
perOp[opName] = a
|
||||
}
|
||||
|
||||
// Map values based on queryName
|
||||
switch queryName {
|
||||
case "p50_latency":
|
||||
a.p50 = val * math.Pow(10, 6) // convert seconds to nanoseconds
|
||||
case "p95_latency":
|
||||
a.p95 = val * math.Pow(10, 6)
|
||||
case "p99_latency":
|
||||
a.p99 = val * math.Pow(10, 6)
|
||||
case "num_calls":
|
||||
a.calls = uint64(val)
|
||||
case "num_errors":
|
||||
a.errors = uint64(val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(perOp) == 0 {
|
||||
return []servicetypesv1.OperationItem{}
|
||||
}
|
||||
|
||||
// Convert to OperationItem array and sort by P99 desc
|
||||
out := make([]servicetypesv1.OperationItem, 0, len(perOp))
|
||||
for opName, a := range perOp {
|
||||
out = append(out, servicetypesv1.OperationItem{
|
||||
Name: opName,
|
||||
P50: a.p50,
|
||||
P95: a.p95,
|
||||
P99: a.p99,
|
||||
NumCalls: a.calls,
|
||||
ErrorCount: a.errors,
|
||||
})
|
||||
}
|
||||
|
||||
// Sort by P99 descending (matching traces behavior)
|
||||
sort.Slice(out, func(i, j int) bool {
|
||||
return out[i].P99 > out[j].P99
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// mapSpanMetricsEntryPointOpsResp maps span-metrics scalar results to OperationItem array for entry point operations.
|
||||
// Uses queryName for aggregation mapping.
|
||||
func (m *module) mapSpanMetricsEntryPointOpsResp(resp *qbtypes.QueryRangeResponse) []servicetypesv1.OperationItem {
|
||||
if resp == nil || len(resp.Data.Results) == 0 {
|
||||
return []servicetypesv1.OperationItem{}
|
||||
}
|
||||
|
||||
// Group data by operation name and merge aggregations from all results
|
||||
type agg struct {
|
||||
p50 float64
|
||||
p95 float64
|
||||
p99 float64
|
||||
calls uint64
|
||||
errors uint64
|
||||
}
|
||||
perOp := make(map[string]*agg)
|
||||
|
||||
// Iterate through all results (each query returns a separate ScalarData)
|
||||
for _, result := range resp.Data.Results {
|
||||
sd, ok := result.(*qbtypes.ScalarData)
|
||||
if !ok || sd == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip empty results
|
||||
if len(sd.Columns) == 0 || len(sd.Data) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find operation column index (should be consistent across all results)
|
||||
operationIdx := -1
|
||||
for i, c := range sd.Columns {
|
||||
if c.Type == qbtypes.ColumnTypeGroup && c.Name == "operation" {
|
||||
operationIdx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if operationIdx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Find aggregation column index
|
||||
aggIdx := -1
|
||||
for i, c := range sd.Columns {
|
||||
if c.Type == qbtypes.ColumnTypeAggregation {
|
||||
aggIdx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if aggIdx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Process each row in this result and merge by operation name
|
||||
queryName := sd.QueryName
|
||||
for _, row := range sd.Data {
|
||||
if len(row) <= operationIdx || len(row) <= aggIdx {
|
||||
continue
|
||||
}
|
||||
|
||||
opName := fmt.Sprintf("%v", row[operationIdx])
|
||||
val := toFloat(row, aggIdx)
|
||||
|
||||
a := perOp[opName]
|
||||
if a == nil {
|
||||
a = &agg{}
|
||||
perOp[opName] = a
|
||||
}
|
||||
|
||||
// Map values based on queryName
|
||||
switch queryName {
|
||||
case "p50_latency":
|
||||
a.p50 = val * math.Pow(10, 6) // convert seconds to nanoseconds
|
||||
case "p95_latency":
|
||||
a.p95 = val * math.Pow(10, 6)
|
||||
case "p99_latency":
|
||||
a.p99 = val * math.Pow(10, 6)
|
||||
case "num_calls":
|
||||
a.calls = uint64(val)
|
||||
case "num_errors":
|
||||
a.errors = uint64(val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(perOp) == 0 {
|
||||
return []servicetypesv1.OperationItem{}
|
||||
}
|
||||
|
||||
// Convert to OperationItem array and sort by P99 desc
|
||||
out := make([]servicetypesv1.OperationItem, 0, len(perOp))
|
||||
for opName, a := range perOp {
|
||||
out = append(out, servicetypesv1.OperationItem{
|
||||
Name: opName,
|
||||
P50: a.p50,
|
||||
P95: a.p95,
|
||||
P99: a.p99,
|
||||
NumCalls: a.calls,
|
||||
ErrorCount: a.errors,
|
||||
})
|
||||
}
|
||||
|
||||
// Sort by P99 descending (matching traces behavior)
|
||||
sort.Slice(out, func(i, j int) bool {
|
||||
return out[i].P99 > out[j].P99
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// buildSpanMetricsEntryPointOpsQueryRangeRequest constructs span-metrics queries for entry point operations.
|
||||
// Similar to buildSpanMetricsTopOpsQueryRangeRequest but includes isTopLevelOperation filter.
|
||||
func (m *module) buildSpanMetricsEntryPointOpsQueryRangeRequest(req *servicetypesv1.OperationsRequest) (*qbtypes.QueryRangeRequest, error) {
|
||||
if req.Service == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "service is required")
|
||||
}
|
||||
startNs, err := strconv.ParseUint(req.Start, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time: %v", err)
|
||||
}
|
||||
endNs, err := strconv.ParseUint(req.End, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time: %v", err)
|
||||
}
|
||||
if startNs >= endNs {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "start must be before end")
|
||||
}
|
||||
if req.Limit < 1 || req.Limit > 5000 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000")
|
||||
}
|
||||
if err := validateTagFilterItems(req.Tags); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
startMs := startNs / 1_000_000
|
||||
endMs := endNs / 1_000_000
|
||||
|
||||
// Build service filter
|
||||
serviceTag := servicetypesv1.TagFilterItem{
|
||||
Key: "service.name",
|
||||
Operator: "in",
|
||||
StringValues: []string{req.Service},
|
||||
}
|
||||
tags := append([]servicetypesv1.TagFilterItem{serviceTag}, req.Tags...)
|
||||
filterExpr, variables := buildFilterExpression(tags)
|
||||
|
||||
// Enforce top-level scope via synthetic field (entry point operations only)
|
||||
scopeExpr := "isTopLevelOperation = 'true'"
|
||||
if filterExpr != "" {
|
||||
filterExpr = "(" + filterExpr + ") AND (" + scopeExpr + ")"
|
||||
} else {
|
||||
filterExpr = scopeExpr
|
||||
}
|
||||
|
||||
// Build error filter for num_errors query
|
||||
var errorFilterExpr string
|
||||
if filterExpr != "" {
|
||||
errorFilterExpr = "(" + filterExpr + ") AND (status.code = 'STATUS_CODE_ERROR')"
|
||||
} else {
|
||||
errorFilterExpr = "status.code = 'STATUS_CODE_ERROR'"
|
||||
}
|
||||
|
||||
// Common groupBy on operation
|
||||
groupByOperation := []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "operation",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
}},
|
||||
}
|
||||
|
||||
queries := []qbtypes.QueryEnvelope{
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "p50_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.bucket",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile50,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "p95_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.bucket",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "p99_latency",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_latency.bucket",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile99,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "num_calls",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: filterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_calls_total",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationIncrease,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "num_errors",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Filter: &qbtypes.Filter{Expression: errorFilterExpr},
|
||||
GroupBy: groupByOperation,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "signoz_calls_total",
|
||||
Temporality: metrictypes.Delta,
|
||||
TimeAggregation: metrictypes.TimeAggregationIncrease,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
reqV5 := qbtypes.QueryRangeRequest{
|
||||
Start: startMs,
|
||||
End: endMs,
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
Variables: variables,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: queries,
|
||||
},
|
||||
}
|
||||
|
||||
return &reqV5, nil
|
||||
}
|
||||
|
||||
func (m *module) buildEntryPointOpsQueryRangeRequest(req *servicetypesv1.OperationsRequest) (*qbtypes.QueryRangeRequest, error) {
|
||||
if req.Service == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "service is required")
|
||||
|
||||
@@ -10,11 +10,9 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -250,40 +248,6 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -411,18 +375,3 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -52,6 +51,7 @@ func consume(rows driver.Rows, kind qbtypes.RequestType, queryWindow *qbtypes.Ti
|
||||
}
|
||||
|
||||
func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbtypes.Step, queryName string) (*qbtypes.TimeSeriesData, error) {
|
||||
|
||||
colTypes := rows.ColumnTypes()
|
||||
colNames := rows.Columns()
|
||||
|
||||
@@ -354,22 +354,10 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
colTypes := rows.ColumnTypes()
|
||||
colCnt := len(colNames)
|
||||
|
||||
// Helper that decides scan target per column based on DB type
|
||||
makeScanTarget := func(i int) any {
|
||||
dbt := strings.ToUpper(colTypes[i].DatabaseTypeName())
|
||||
if strings.HasPrefix(dbt, "JSON") {
|
||||
// Since the driver fails to decode JSON/Dynamic into native Go values, we read it as raw bytes
|
||||
// TODO: check in future if fixed in the driver
|
||||
var v []byte
|
||||
return &v
|
||||
}
|
||||
return reflect.New(colTypes[i].ScanType()).Interface()
|
||||
}
|
||||
|
||||
// Build a template slice of correctly-typed pointers once
|
||||
scanTpl := make([]any, colCnt)
|
||||
for i := range colTypes {
|
||||
scanTpl[i] = makeScanTarget(i)
|
||||
for i, ct := range colTypes {
|
||||
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
|
||||
}
|
||||
|
||||
var outRows []*qbtypes.RawRow
|
||||
@@ -378,7 +366,7 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// fresh copy of the scan slice (otherwise the driver reuses pointers)
|
||||
scan := make([]any, colCnt)
|
||||
for i := range scanTpl {
|
||||
scan[i] = makeScanTarget(i)
|
||||
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
|
||||
}
|
||||
|
||||
if err := rows.Scan(scan...); err != nil {
|
||||
@@ -395,28 +383,6 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
case string:
|
||||
if x != "" {
|
||||
var v any
|
||||
if err := sonic.Unmarshal([]byte(x), &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
}
|
||||
|
||||
// special-case: timestamp column
|
||||
if name == "timestamp" || name == "timestamp_datetime" {
|
||||
switch t := val.(type) {
|
||||
|
||||
@@ -78,7 +78,7 @@ func newProvider(
|
||||
telemetryMetadataStore,
|
||||
)
|
||||
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
|
||||
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
@@ -102,13 +102,14 @@ func newProvider(
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, telemetryMetadataStore)
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
@@ -116,6 +117,7 @@ func newProvider(
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
logFieldMapper,
|
||||
logConditionBuilder,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder(
|
||||
@@ -126,6 +128,7 @@ func newProvider(
|
||||
logResourceFilterStmtBuilder,
|
||||
logAggExprRewriter,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
|
||||
|
||||
@@ -11,16 +11,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -131,40 +128,6 @@ func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
|
||||
return err
|
||||
}
|
||||
|
||||
func (ic *LogParsingPipelineController) getDefaultPipelines() ([]pipelinetypes.GettablePipeline, error) {
|
||||
defaultPipelines := []pipelinetypes.GettablePipeline{}
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
preprocessingPipeline := pipelinetypes.GettablePipeline{
|
||||
StoreablePipeline: pipelinetypes.StoreablePipeline{
|
||||
Name: "Default Pipeline - PreProcessing Body",
|
||||
Alias: "NormalizeBodyDefault",
|
||||
Enabled: true,
|
||||
},
|
||||
Filter: &v3.FilterSet{
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body",
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []pipelinetypes.PipelineOperator{
|
||||
{
|
||||
ID: uuid.NewString(),
|
||||
Type: "normalize",
|
||||
Enabled: true,
|
||||
If: "body != nil",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
defaultPipelines = append(defaultPipelines, preprocessingPipeline)
|
||||
}
|
||||
return defaultPipelines, nil
|
||||
}
|
||||
|
||||
// Returns effective list of pipelines including user created
|
||||
// pipelines and pipelines for installed integrations
|
||||
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
@@ -295,13 +258,6 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
// recommend default pipelines along with user created pipelines
|
||||
defaultPipelines, err := pc.getDefaultPipelines()
|
||||
if err != nil {
|
||||
return nil, "", model.InternalError(fmt.Errorf("failed to get default pipelines: %w", err))
|
||||
}
|
||||
pipelinesResp.Pipelines = append(pipelinesResp.Pipelines, defaultPipelines...)
|
||||
|
||||
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
|
||||
@@ -132,7 +132,7 @@ func SignozLogsToPLogs(logs []model.SignozLog) []plog.Logs {
|
||||
slRecord.SetSeverityText(log.SeverityText)
|
||||
slRecord.SetSeverityNumber(plog.SeverityNumber(log.SeverityNumber))
|
||||
|
||||
slRecord.Body().FromRaw(log.Body)
|
||||
slRecord.Body().SetStr(log.Body)
|
||||
|
||||
slAttribs := slRecord.Attributes()
|
||||
for k, v := range log.Attributes_int64 {
|
||||
|
||||
@@ -20,6 +20,7 @@ type aggExprRewriter struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -30,6 +31,7 @@ func NewAggExprRewriter(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *aggExprRewriter {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
|
||||
@@ -39,6 +41,7 @@ func NewAggExprRewriter(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -78,6 +81,7 @@ func (r *aggExprRewriter) Rewrite(
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
r.jsonBodyPrefix,
|
||||
r.jsonKeyToKey,
|
||||
)
|
||||
// Rewrite the first select item (our expression)
|
||||
@@ -125,6 +129,7 @@ type exprVisitor struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
Modified bool
|
||||
chArgs []any
|
||||
@@ -137,6 +142,7 @@ func newExprVisitor(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
@@ -145,6 +151,7 @@ func newExprVisitor(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -183,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
if aggFunc.FuncCombinator {
|
||||
// Map the predicate (last argument)
|
||||
origPred := args[len(args)-1].String()
|
||||
whereClause, err := PrepareWhereClause(
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Logger: v.logger,
|
||||
@@ -192,7 +199,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, 0, 0,
|
||||
}, 0, 0,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -212,7 +219,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -230,7 +237,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ func CollisionHandledFinalExpr(
|
||||
cb qbtypes.ConditionBuilder,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
requiredDataType telemetrytypes.FieldDataType,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) (string, []any, error) {
|
||||
|
||||
@@ -44,7 +45,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -57,8 +58,8 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, fieldForErr := fm.FieldFor(ctx, field)
|
||||
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
|
||||
colName, err := fm.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
keysForField := keys[field.Name]
|
||||
@@ -81,10 +82,10 @@ func CollisionHandledFinalExpr(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "%s", correction)
|
||||
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "field `%s` not found", field.Name)
|
||||
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
}
|
||||
} else {
|
||||
for _, key := range keysForField {
|
||||
@@ -103,11 +104,10 @@ func CollisionHandledFinalExpr(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
// first if condition covers the older tests and second if condition covers the array conditions
|
||||
if !BodyJSONQueryEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil {
|
||||
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
|
||||
// TODO(nitya): enable group by on body column?
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
|
||||
} else if strings.Contains(field.Name, telemetrytypes.ArraySep) || strings.Contains(field.Name, telemetrytypes.ArrayAnyIndex) {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", field.Name)
|
||||
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
|
||||
} else {
|
||||
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
}
|
||||
@@ -204,7 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
case telemetrytypes.FieldDataTypeString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -219,36 +219,8 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeFloat64,
|
||||
telemetrytypes.FieldDataTypeArrayFloat64:
|
||||
switch v := value.(type) {
|
||||
case float32, float64:
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
case string:
|
||||
// check if it's a number inside a string
|
||||
isNumber := false
|
||||
if _, err := strconv.ParseFloat(v, 64); err == nil {
|
||||
isNumber = true
|
||||
}
|
||||
|
||||
if !operator.IsComparisonOperator() || !isNumber {
|
||||
// try to convert the number attribute to string
|
||||
tblFieldName = castString(tblFieldName) // numeric col vs string literal
|
||||
} else {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
}
|
||||
case []any:
|
||||
if allFloats(v) {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
} else if hasString(v) {
|
||||
tblFieldName, value = castString(tblFieldName), toStrings(v)
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
telemetrytypes.FieldDataTypeArrayNumber:
|
||||
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
|
||||
switch v := value.(type) {
|
||||
// why? ; CH returns an error for a simple check
|
||||
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
|
||||
@@ -286,8 +258,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeBool,
|
||||
telemetrytypes.FieldDataTypeArrayBool:
|
||||
case telemetrytypes.FieldDataTypeBool:
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
tblFieldName = castString(tblFieldName)
|
||||
|
||||
@@ -43,6 +43,7 @@ type resourceFilterStatementBuilder[T any] struct {
|
||||
signal telemetrytypes.Signal
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -75,6 +76,7 @@ func NewLogResourceFilterStatementBuilder(
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
|
||||
@@ -85,6 +87,7 @@ func NewLogResourceFilterStatementBuilder(
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalLogs,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -97,18 +100,12 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
|
||||
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||
}
|
||||
|
||||
// exclude out the body related key selectors
|
||||
filteredKeySelectors := []*telemetrytypes.FieldKeySelector{}
|
||||
for idx := range keySelectors {
|
||||
if keySelectors[idx].FieldContext == telemetrytypes.FieldContextBody {
|
||||
continue
|
||||
}
|
||||
keySelectors[idx].Signal = b.signal
|
||||
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
|
||||
}
|
||||
|
||||
return filteredKeySelectors
|
||||
return keySelectors
|
||||
}
|
||||
|
||||
// Build builds a SQL query based on the given parameters
|
||||
@@ -171,7 +168,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
IgnoreNotFoundKeys: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -16,12 +16,11 @@ import (
|
||||
)
|
||||
|
||||
type conditionBuilder struct {
|
||||
fm qbtypes.FieldMapper
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
fm qbtypes.FieldMapper
|
||||
}
|
||||
|
||||
func NewConditionBuilder(fm qbtypes.FieldMapper, metadataStore telemetrytypes.MetadataStore) *conditionBuilder {
|
||||
return &conditionBuilder{fm: fm, metadataStore: metadataStore}
|
||||
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
return &conditionBuilder{fm: fm}
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
@@ -31,34 +30,22 @@ func (c *conditionBuilder) conditionFor(
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
column, err := c.fm.ColumnFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// For JSON columns, preserve the original value type (numeric, bool, etc.)
|
||||
// Only format to string for non-JSON columns that need string formatting
|
||||
isJSONColumn := column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled && key.FieldContext == telemetrytypes.FieldContextBody
|
||||
if !isJSONColumn {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
}
|
||||
|
||||
if isJSONColumn {
|
||||
cond, err := c.buildJSONCondition(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -176,7 +163,9 @@ func (c *conditionBuilder) conditionFor(
|
||||
// in the UI based query builder, `exists` and `not exists` are used for
|
||||
// key membership checks, so depending on the column type, the condition changes
|
||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled {
|
||||
|
||||
// Check if this is a body JSON search - by FieldContext
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody {
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
|
||||
} else {
|
||||
@@ -258,7 +247,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
|
||||
@@ -373,8 +373,7 @@ func TestConditionFor(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -427,8 +426,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -687,8 +685,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
@@ -2,6 +2,7 @@ package telemetrylogs
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -36,6 +37,8 @@ const (
|
||||
|
||||
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
|
||||
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArrayAnyIndex = "[*]."
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -45,7 +48,8 @@ var (
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
}
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
BodyJSONStringSearchPrefix = `body.`
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"body": {
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
|
||||
@@ -6,9 +6,7 @@ import (
|
||||
"strings"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/utils"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
@@ -30,11 +28,6 @@ var (
|
||||
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
|
||||
MaxDynamicTypes: utils.ToPointer(uint(32)),
|
||||
MaxDynamicPaths: utils.ToPointer(uint(0)),
|
||||
}},
|
||||
LogsV2BodyPromotedColumn: {Name: LogsV2BodyPromotedColumn, Type: schema.JSONColumnType{}},
|
||||
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{
|
||||
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||
ValueType: schema.ColumnTypeString,
|
||||
@@ -90,23 +83,13 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return logsV2Columns["attributes_bool"], nil
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// Body context is for JSON body fields
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
// body context fields are stored in the body column
|
||||
return logsV2Columns["body"], nil
|
||||
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
|
||||
col, ok := logsV2Columns[key.Name]
|
||||
if !ok {
|
||||
// check if the key has body JSON search
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
// check if the key has body JSON search (backward compatibility)
|
||||
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
return logsV2Columns["body"], nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
@@ -126,34 +109,21 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
// json is only supported for resource context as of now
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
}
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
case telemetrytypes.FieldContextBody:
|
||||
if strings.Contains(key.Name, telemetrytypes.ArraySep) || strings.Contains(key.Name, telemetrytypes.ArrayAnyIndex) {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", key.Name)
|
||||
}
|
||||
fieldExpr := BodyJSONColumnPrefix + fmt.Sprintf("`%s`", key.Name)
|
||||
expr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldExpr, key.JSONDataType.StringValue())
|
||||
if key.Materialized {
|
||||
promotedFieldExpr := BodyPromotedColumnPrefix + fmt.Sprintf("`%s`", key.Name)
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)", expr, fmt.Sprintf("dynamicElement(%s, '%s')", promotedFieldExpr, key.JSONDataType.StringValue()))
|
||||
}
|
||||
// returning qbtypes.ErrColumnNotFound is a hack that will trigger the fallback expr logic to include all the types for the key
|
||||
return expr, qbtypes.ErrColumnNotFound
|
||||
default:
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
} else {
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
case schema.ColumnTypeEnumString:
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -33,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -47,7 +47,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
|
||||
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -69,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -15,7 +14,8 @@ import (
|
||||
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
|
||||
func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, telemetrytypestest.NewMockMetadataStore())
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -2423,7 +2423,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -84,6 +84,7 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
}
|
||||
|
||||
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
|
||||
|
||||
dataType, value := inferDataType(value, operator, key)
|
||||
|
||||
// for array types, we need to extract the value from the JSON_QUERY
|
||||
@@ -30,7 +30,7 @@ func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, paren
|
||||
}
|
||||
|
||||
part := pb.parts[index]
|
||||
pathSoFar := strings.Join(pb.parts[:index+1], telemetrytypes.ArraySep)
|
||||
pathSoFar := strings.Join(pb.parts[:index+1], ArraySep)
|
||||
isTerminal := index == len(pb.parts)-1
|
||||
|
||||
// Calculate progression parameters based on parent's values
|
||||
@@ -110,8 +110,8 @@ func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbt
|
||||
|
||||
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
|
||||
// creation time in distributed_promoted_paths
|
||||
path := strings.ReplaceAll(key.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
|
||||
parts := strings.Split(path, telemetrytypes.ArraySep)
|
||||
path := strings.ReplaceAll(key.Name, ArrayAnyIndex, ArraySep)
|
||||
parts := strings.Split(path, ArraySep)
|
||||
|
||||
pb := &JSONAccessPlanBuilder{
|
||||
key: key,
|
||||
|
||||
@@ -814,6 +814,9 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
|
||||
// testTypeSet returns a map of path->types and a getTypes function for testing
|
||||
// This represents the type information available in the test JSON structure
|
||||
//
|
||||
// TODO(Piyush): Remove this unparam nolint
|
||||
// nolint:unparam
|
||||
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
|
||||
types := map[string][]telemetrytypes.JSONDataType{
|
||||
"user.name": {telemetrytypes.String},
|
||||
|
||||
@@ -1,455 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeCurrentNodeNil = errors.MustNewCode("current_node_nil")
|
||||
CodeNextNodeNil = errors.MustNewCode("next_node_nil")
|
||||
CodeNestedExpressionsEmpty = errors.MustNewCode("nested_expressions_empty")
|
||||
CodeGroupByPlanEmpty = errors.MustNewCode("group_by_plan_empty")
|
||||
CodeArrayMapExpressionsEmpty = errors.MustNewCode("array_map_expressions_empty")
|
||||
CodePromotedPlanMissing = errors.MustNewCode("promoted_plan_missing")
|
||||
CodeArrayNavigationFailed = errors.MustNewCode("array_navigation_failed")
|
||||
)
|
||||
|
||||
func (c *conditionBuilder) getTypes(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
keys, _, err := c.metadataStore.GetKeys(ctx, &telemetrytypes.FieldKeySelector{
|
||||
Name: path,
|
||||
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Limit: 1,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
types := []telemetrytypes.JSONDataType{}
|
||||
for _, key := range keys[path] {
|
||||
if key.JSONDataType != nil {
|
||||
types = append(types, *key.JSONDataType)
|
||||
}
|
||||
}
|
||||
return types, nil
|
||||
}
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *conditionBuilder) buildJSONCondition(ctx context.Context, key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
|
||||
plan, err := PlanJSON(ctx, key, operator, value, c.getTypes)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
conditions := []string{}
|
||||
for _, plan := range plan {
|
||||
condition, err := c.emitPlannedCondition(plan, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, condition)
|
||||
}
|
||||
return sb.Or(conditions...), nil
|
||||
}
|
||||
|
||||
// emitPlannedCondition handles paths with array traversal
|
||||
func (c *conditionBuilder) emitPlannedCondition(plan *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
// Build traversal + terminal recursively per-hop
|
||||
compiled, err := c.recurseArrayHops(plan, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// sb.AddWhereClause(sqlbuilder.NewWhereClause().AddWhereExpr(sb.Args, compiled))
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// buildTerminalCondition creates the innermost condition
|
||||
func (c *conditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
// Use the parent's alias + current field name for the full path
|
||||
fieldPath := node.FieldPath()
|
||||
|
||||
if node.TerminalConfig.ElemType.IsArray {
|
||||
// switch operator for array membership checks
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
}
|
||||
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return arrayCond, nil
|
||||
}
|
||||
conditions := []string{}
|
||||
|
||||
elemType := node.TerminalConfig.ElemType
|
||||
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
|
||||
fieldExpr, value = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, fieldExpr, operator)
|
||||
|
||||
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
|
||||
return index.Type == elemType && index.ColumnExpression == fieldPath
|
||||
})
|
||||
if elemType.IndexSupported && indexed {
|
||||
indexedExpr := assumeNotNull(fieldPath, elemType)
|
||||
emptyValue := func() any {
|
||||
switch elemType {
|
||||
case telemetrytypes.String:
|
||||
return ""
|
||||
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
|
||||
return 0
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}()
|
||||
|
||||
// switch the operator and value for exists and not exists
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorExists:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
value = emptyValue
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
value = emptyValue
|
||||
default:
|
||||
// do nothing
|
||||
}
|
||||
|
||||
cond, err := c.applyOperator(sb, indexedExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, cond)
|
||||
// Switch operator to EXISTS
|
||||
operator = qbtypes.FilterOperatorExists
|
||||
}
|
||||
|
||||
cond, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, cond)
|
||||
if len(conditions) > 1 {
|
||||
return sb.And(conditions...), nil
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
// buildArrayMembershipCondition handles array membership checks
|
||||
func (c *conditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
arrayPath := node.FieldPath()
|
||||
|
||||
// create typed array out of a dynamic array
|
||||
filteredDynamicExpr := func() string {
|
||||
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
|
||||
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
|
||||
node.TerminalConfig.ValueType.StringValue(),
|
||||
node.TerminalConfig.ValueType.StringValue(),
|
||||
baseArrayDynamicExpr)
|
||||
}
|
||||
typedArrayExpr := func() string {
|
||||
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
|
||||
}
|
||||
|
||||
var arrayExpr string
|
||||
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
|
||||
arrayExpr = filteredDynamicExpr()
|
||||
} else {
|
||||
arrayExpr = typedArrayExpr()
|
||||
}
|
||||
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, "x", operator)
|
||||
op, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
|
||||
}
|
||||
|
||||
// recurseArrayHops recursively builds array traversal conditions
|
||||
func (c *conditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
if current == nil {
|
||||
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
|
||||
}
|
||||
|
||||
if current.IsTerminal {
|
||||
terminalCond, err := c.buildTerminalCondition(current, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return terminalCond, nil
|
||||
}
|
||||
|
||||
currAlias := current.Alias()
|
||||
fieldPath := current.FieldPath()
|
||||
// Determine availability of Array(JSON) and Array(Dynamic) at this hop
|
||||
hasArrayJSON := current.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasArrayDynamic := current.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
// Then, at this hop, compute child per branch and wrap
|
||||
branches := make([]string, 0, 2)
|
||||
if hasArrayJSON {
|
||||
jsonArrayExpr := fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')", fieldPath, current.MaxDynamicTypes, current.MaxDynamicPaths)
|
||||
childGroupJSON, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchJSON], operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupJSON, jsonArrayExpr))
|
||||
}
|
||||
if hasArrayDynamic {
|
||||
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", fieldPath)
|
||||
dynFilteredExpr := fmt.Sprintf("arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
|
||||
|
||||
// Create the Query for Dynamic array
|
||||
childGroupDyn, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchDynamic], operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupDyn, dynFilteredExpr))
|
||||
}
|
||||
|
||||
if len(branches) == 1 {
|
||||
return branches[0], nil
|
||||
}
|
||||
return fmt.Sprintf("(%s)", strings.Join(branches, " OR ")), nil
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, fieldExpr string, operator qbtypes.FilterOperator, value any) (string, error) {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return sb.E(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotEqual:
|
||||
return sb.NE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorGreaterThan:
|
||||
return sb.G(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorGreaterThanOrEq:
|
||||
return sb.GE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLessThan:
|
||||
return sb.LT(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLessThanOrEq:
|
||||
return sb.LE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLike:
|
||||
return sb.Like(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotLike:
|
||||
return sb.NotLike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorILike:
|
||||
return sb.ILike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotILike:
|
||||
return sb.NotILike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
return fmt.Sprintf("match(%s, %s)", fieldExpr, sb.Var(value)), nil
|
||||
case qbtypes.FilterOperatorNotRegexp:
|
||||
return fmt.Sprintf("NOT match(%s, %s)", fieldExpr, sb.Var(value)), nil
|
||||
case qbtypes.FilterOperatorContains:
|
||||
return sb.ILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
|
||||
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
values = []any{value}
|
||||
}
|
||||
conds := []string{}
|
||||
for _, v := range values {
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
conds = append(conds, sb.E(fieldExpr, v))
|
||||
} else {
|
||||
conds = append(conds, sb.NE(fieldExpr, v))
|
||||
}
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
return sb.Or(conds...), nil
|
||||
}
|
||||
return sb.And(conds...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
default:
|
||||
return "", qbtypes.ErrUnsupportedOperator
|
||||
}
|
||||
}
|
||||
|
||||
// GroupByArrayJoinInfo contains information about array joins needed for GroupBy
|
||||
type GroupByArrayJoinInfo struct {
|
||||
ArrayJoinClauses []string // ARRAY JOIN clauses to add to FROM clause
|
||||
TerminalExpr string // Terminal field expression for SELECT/GROUP BY
|
||||
}
|
||||
|
||||
// BuildGroupBy builds GroupBy information for body JSON fields using arrayConcat pattern
|
||||
func (c *conditionBuilder) BuildGroupBy(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*GroupByArrayJoinInfo, error) {
|
||||
path := strings.TrimPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
|
||||
plan, err := PlanJSON(ctx, key, qbtypes.FilterOperatorExists, nil, c.getTypes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(plan) == 0 {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
|
||||
"Could not find any valid paths for: %s", path)
|
||||
}
|
||||
|
||||
if plan[0].IsTerminal {
|
||||
node := plan[0]
|
||||
|
||||
expr := fmt.Sprintf("dynamicElement(%s, '%s')", node.FieldPath(), node.TerminalConfig.ElemType.StringValue())
|
||||
if key.Materialized {
|
||||
if len(plan) < 2 {
|
||||
return nil, errors.Newf(errors.TypeUnexpected, CodePromotedPlanMissing,
|
||||
"plan length is less than 2 for promoted path: %s", path)
|
||||
}
|
||||
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)",
|
||||
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
|
||||
expr,
|
||||
)
|
||||
}
|
||||
|
||||
return &GroupByArrayJoinInfo{
|
||||
ArrayJoinClauses: []string{},
|
||||
TerminalExpr: expr,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Build arrayConcat pattern directly from the tree structure
|
||||
arrayConcatExpr, err := c.buildArrayConcat(plan)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create single ARRAY JOIN clause with arrayFlatten
|
||||
arrayJoinClause := fmt.Sprintf("ARRAY JOIN %s AS `%s`", arrayConcatExpr, key.Name)
|
||||
|
||||
return &GroupByArrayJoinInfo{
|
||||
ArrayJoinClauses: []string{arrayJoinClause},
|
||||
TerminalExpr: fmt.Sprintf("`%s`", key.Name),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// buildArrayConcat builds the arrayConcat pattern directly from the tree structure
|
||||
func (c *conditionBuilder) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (string, error) {
|
||||
if len(plan) == 0 {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeGroupByPlanEmpty, "group by plan is empty while building arrayConcat")
|
||||
}
|
||||
|
||||
// Build arrayMap expressions for ALL available branches at the root level
|
||||
var arrayMapExpressions []string
|
||||
for _, node := range plan {
|
||||
hasJSON := node.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasDynamic := node.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
if hasJSON {
|
||||
jsonExpr, err := c.buildArrayMap(node, telemetrytypes.BranchJSON)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
arrayMapExpressions = append(arrayMapExpressions, jsonExpr)
|
||||
}
|
||||
|
||||
if hasDynamic {
|
||||
dynamicExpr, err := c.buildArrayMap(node, telemetrytypes.BranchDynamic)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
arrayMapExpressions = append(arrayMapExpressions, dynamicExpr)
|
||||
}
|
||||
}
|
||||
if len(arrayMapExpressions) == 0 {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeArrayMapExpressionsEmpty, "array map expressions are empty while building arrayConcat")
|
||||
}
|
||||
|
||||
// Build the arrayConcat expression
|
||||
arrayConcatExpr := fmt.Sprintf("arrayConcat(%s)", strings.Join(arrayMapExpressions, ", "))
|
||||
|
||||
// Wrap with arrayFlatten
|
||||
arrayFlattenExpr := fmt.Sprintf("arrayFlatten(%s)", arrayConcatExpr)
|
||||
|
||||
return arrayFlattenExpr, nil
|
||||
}
|
||||
|
||||
// buildArrayMap builds the arrayMap expression for a specific branch, handling all sub-branches
|
||||
func (c *conditionBuilder) buildArrayMap(currentNode *telemetrytypes.JSONAccessNode, branchType telemetrytypes.JSONAccessBranchType) (string, error) {
|
||||
if currentNode == nil {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeCurrentNodeNil, "current node is nil while building arrayMap")
|
||||
}
|
||||
|
||||
nextNode := currentNode.Branches[branchType]
|
||||
if nextNode == nil {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeNextNodeNil, "next node is nil while building arrayMap")
|
||||
}
|
||||
|
||||
// Build the array expression for this level
|
||||
var arrayExpr string
|
||||
if branchType == telemetrytypes.BranchJSON {
|
||||
// Array(JSON) branch
|
||||
arrayExpr = fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')",
|
||||
currentNode.FieldPath(), currentNode.MaxDynamicTypes, currentNode.MaxDynamicPaths)
|
||||
} else {
|
||||
// Array(Dynamic) branch - filter for JSON objects
|
||||
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", currentNode.FieldPath())
|
||||
arrayExpr = fmt.Sprintf("arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
|
||||
}
|
||||
|
||||
// If this is the terminal level, return the simple arrayMap
|
||||
if nextNode.IsTerminal {
|
||||
dynamicElementExpr := fmt.Sprintf("dynamicElement(%s, '%s')", nextNode.FieldPath(),
|
||||
nextNode.TerminalConfig.ElemType.StringValue(),
|
||||
)
|
||||
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), dynamicElementExpr, arrayExpr), nil
|
||||
}
|
||||
|
||||
// For non-terminal nodes, we need to handle ALL possible branches at the next level
|
||||
var nestedExpressions []string
|
||||
hasJSON := nextNode.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasDynamic := nextNode.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
if hasJSON {
|
||||
jsonNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchJSON)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nestedExpressions = append(nestedExpressions, jsonNested)
|
||||
}
|
||||
|
||||
if hasDynamic {
|
||||
dynamicNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchDynamic)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nestedExpressions = append(nestedExpressions, dynamicNested)
|
||||
}
|
||||
|
||||
// If we have multiple nested expressions, we need to concat them
|
||||
var nestedExpr string
|
||||
if len(nestedExpressions) == 1 {
|
||||
nestedExpr = nestedExpressions[0]
|
||||
} else if len(nestedExpressions) > 1 {
|
||||
// This shouldn't happen in our current tree structure, but handle it just in case
|
||||
nestedExpr = fmt.Sprintf("arrayConcat(%s)", strings.Join(nestedExpressions, ", "))
|
||||
} else {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeNestedExpressionsEmpty, "nested expressions are empty while building arrayMap")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), nestedExpr, arrayExpr), nil
|
||||
}
|
||||
|
||||
func assumeNotNull(column string, elemType telemetrytypes.JSONDataType) string {
|
||||
return fmt.Sprintf("assumeNotNull(dynamicElement(%s, '%s'))", column, elemType.StringValue())
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -23,6 +23,7 @@ type logQueryStatementBuilder struct {
|
||||
aggExprRewriter qbtypes.AggExprRewriter
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -36,6 +37,7 @@ func NewLogQueryStatementBuilder(
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *logQueryStatementBuilder {
|
||||
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
|
||||
@@ -48,6 +50,7 @@ func NewLogQueryStatementBuilder(
|
||||
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||
aggExprRewriter: aggExprRewriter,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -168,25 +171,6 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
|
||||
overallMatch = overallMatch || findMatch(IntrinsicFields)
|
||||
}
|
||||
|
||||
if strings.Contains(k.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
k.Name = strings.TrimPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
fieldKeys, found := keys[k.Name]
|
||||
if found && len(fieldKeys) > 0 {
|
||||
k.FieldContext = fieldKeys[0].FieldContext
|
||||
k.FieldDataType = fieldKeys[0].FieldDataType
|
||||
k.Materialized = fieldKeys[0].Materialized
|
||||
k.JSONDataType = fieldKeys[0].JSONDataType
|
||||
k.Indexes = fieldKeys[0].Indexes
|
||||
|
||||
overallMatch = true // because we found a match
|
||||
} else {
|
||||
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
|
||||
k.FieldContext = telemetrytypes.FieldContextBody
|
||||
k.FieldDataType = telemetrytypes.FieldDataTypeString
|
||||
k.JSONDataType = &telemetrytypes.String
|
||||
}
|
||||
}
|
||||
|
||||
if !overallMatch {
|
||||
// check if all the key for the given field have been materialized, if so
|
||||
// set the key to materialized
|
||||
@@ -237,9 +221,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
// Select timestamp and id by default
|
||||
sb.Select(LogsV2TimestampColumn)
|
||||
sb.SelectMore(LogsV2IDColumn)
|
||||
@@ -253,10 +234,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.SelectMore(LogsV2ScopeNameColumn)
|
||||
sb.SelectMore(LogsV2ScopeVersionColumn)
|
||||
sb.SelectMore(LogsV2BodyColumn)
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
sb.SelectMore(LogsV2BodyJSONColumn)
|
||||
sb.SelectMore(LogsV2BodyPromotedColumn)
|
||||
}
|
||||
sb.SelectMore(LogsV2AttributesStringColumn)
|
||||
sb.SelectMore(LogsV2AttributesNumberColumn)
|
||||
sb.SelectMore(LogsV2AttributesBoolColumn)
|
||||
@@ -269,7 +246,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn {
|
||||
continue
|
||||
}
|
||||
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
@@ -279,12 +255,8 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
}
|
||||
}
|
||||
|
||||
// From table (inject ARRAY JOINs if collected)
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
// From table
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
@@ -358,17 +330,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
@@ -390,13 +358,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
|
||||
}
|
||||
|
||||
// Add FROM clause
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
|
||||
if err != nil {
|
||||
@@ -442,6 +404,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
|
||||
// Stitch it all together: WITH … SELECT …
|
||||
@@ -468,6 +431,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
|
||||
// Stitch it all together: WITH … SELECT …
|
||||
@@ -514,15 +478,11 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
@@ -548,12 +508,8 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
}
|
||||
}
|
||||
|
||||
// From table (inject ARRAY JOINs if collected)
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
// From table
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
@@ -698,6 +654,7 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
|
||||
start, end uint64,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
return b.resourceFilterStmtBuilder.Build(
|
||||
ctx,
|
||||
start,
|
||||
|
||||
@@ -32,6 +32,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
cb,
|
||||
mockMetadataStore,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
}
|
||||
@@ -196,11 +197,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -212,6 +213,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -316,11 +318,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -332,6 +334,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -424,11 +427,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -440,11 +443,10 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
//
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
@@ -489,8 +491,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "status",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
Name: "body.status",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -500,11 +501,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -516,6 +517,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -595,11 +597,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -611,6 +613,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
|
||||
@@ -47,6 +47,9 @@ var (
|
||||
//
|
||||
// searchOperator: LIKE for pattern matching, EQUAL for exact match
|
||||
// Returns: (paths, error)
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
|
||||
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||
|
||||
@@ -132,7 +135,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
orClauses := []string{}
|
||||
for _, fieldKeySelector := range fieldKeySelectors {
|
||||
// replace [*] with []
|
||||
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
|
||||
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrylogs.ArrayAnyIndex, telemetrylogs.ArraySep)
|
||||
// Extract search text for body JSON keys
|
||||
keyName := CleanPathPrefixes(fieldKeySelector.Name)
|
||||
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
||||
@@ -159,11 +162,13 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
return query, args, limit, nil
|
||||
}
|
||||
|
||||
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
filteredPaths := []string{}
|
||||
for _, path := range paths {
|
||||
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
continue
|
||||
}
|
||||
filteredPaths = append(filteredPaths, path)
|
||||
@@ -291,7 +296,7 @@ func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...str
|
||||
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
path = CleanPathPrefixes(path)
|
||||
|
||||
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
|
||||
}
|
||||
|
||||
@@ -451,7 +456,7 @@ func derefValue(v any) any {
|
||||
|
||||
// IsPathPromoted checks if a specific path is promoted
|
||||
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
split := strings.Split(path, telemetrylogs.ArraySep)
|
||||
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
|
||||
if err != nil {
|
||||
|
||||
@@ -572,14 +572,6 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
}
|
||||
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
bodyJSONPaths, finished, err := t.getBodyJSONPaths(ctx, fieldKeySelectors) // LIKE for pattern matching
|
||||
if err != nil {
|
||||
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
|
||||
}
|
||||
keys = append(keys, bodyJSONPaths...)
|
||||
complete = complete && finished
|
||||
}
|
||||
return keys, complete, nil
|
||||
}
|
||||
|
||||
@@ -697,6 +689,19 @@ func (t *telemetryMetaStore) getMetricsKeys(ctx context.Context, fieldKeySelecto
|
||||
// hit the limit?
|
||||
complete := rowCount <= limit
|
||||
|
||||
// Add synthetic metrics-only key isTopLevelOperation so filters can be parsed even if not present in metrics tables
|
||||
for _, selector := range fieldKeySelectors {
|
||||
if selector.Name == "isTopLevelOperation" {
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "isTopLevelOperation",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextUnspecified,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return keys, complete, nil
|
||||
}
|
||||
|
||||
@@ -1172,10 +1177,6 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
|
||||
limit = 50
|
||||
}
|
||||
|
||||
if strings.HasPrefix(fieldValueSelector.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
return t.ListJSONValues(ctx, fieldValueSelector.Name, limit)
|
||||
}
|
||||
|
||||
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
|
||||
|
||||
if fieldValueSelector.Name != "" {
|
||||
|
||||
@@ -4,9 +4,11 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
@@ -139,9 +141,13 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
start uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
if c.isMetricScopeField(key.Name) {
|
||||
return c.buildMetricScopeCondition(operator, value, start)
|
||||
}
|
||||
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -149,3 +155,37 @@ func (c *conditionBuilder) ConditionFor(
|
||||
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) isMetricScopeField(keyName string) bool {
|
||||
return keyName == MetricScopeFieldIsTopLevelOperation
|
||||
}
|
||||
|
||||
// buildMetricScopeCondition handles synthetic field isTopLevelOperation for metrics signal.
|
||||
func (c *conditionBuilder) buildMetricScopeCondition(operator qbtypes.FilterOperator, value any, start uint64) (string, error) {
|
||||
if operator != qbtypes.FilterOperatorEqual {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "%s only supports '=' operator", MetricScopeFieldIsTopLevelOperation)
|
||||
}
|
||||
// Accept true in bool or string form; anything else is invalid
|
||||
isTrue := false
|
||||
switch v := value.(type) {
|
||||
case bool:
|
||||
isTrue = v
|
||||
case string:
|
||||
isTrue = strings.ToLower(v) == "true"
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "%s expects boolean value, got %T", MetricScopeFieldIsTopLevelOperation, value)
|
||||
}
|
||||
if !isTrue {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "%s can only be filtered with value 'true'", MetricScopeFieldIsTopLevelOperation)
|
||||
}
|
||||
|
||||
startSec := int64(start / 1000)
|
||||
|
||||
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
|
||||
return fmt.Sprintf(
|
||||
"((JSONExtractString(labels, 'operation'), JSONExtractString(labels, 'service.name')) GLOBAL IN (SELECT DISTINCT name, serviceName FROM %s.%s WHERE time >= toDateTime(%d)))",
|
||||
telemetrytraces.DBName,
|
||||
telemetrytraces.LocalTopLevelOperationsTableName,
|
||||
startSec,
|
||||
), nil
|
||||
}
|
||||
|
||||
@@ -7,3 +7,5 @@ var IntrinsicFields = []string{
|
||||
"type",
|
||||
"is_monotonic",
|
||||
}
|
||||
|
||||
const MetricScopeFieldIsTopLevelOperation = "isTopLevelOperation"
|
||||
|
||||
@@ -32,8 +32,6 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
options.MaxIdleConns = config.Connection.MaxIdleConns
|
||||
options.MaxOpenConns = config.Connection.MaxOpenConns
|
||||
options.DialTimeout = config.Connection.DialTimeout
|
||||
// This is to avoid the driver decoding issues with JSON columns
|
||||
options.Settings["output_format_native_write_json_as_string"] = 1
|
||||
|
||||
chConn, err := clickhouse.Open(options)
|
||||
if err != nil {
|
||||
|
||||
@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -357,7 +357,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -525,7 +525,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -681,7 +681,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
package telemetrytraces
|
||||
|
||||
const (
|
||||
DBName = "signoz_traces"
|
||||
SpanIndexV3TableName = "distributed_signoz_index_v3"
|
||||
SpanIndexV3LocalTableName = "signoz_index_v3"
|
||||
TagAttributesV2TableName = "distributed_tag_attributes_v2"
|
||||
TagAttributesV2LocalTableName = "tag_attributes_v2"
|
||||
TopLevelOperationsTableName = "distributed_top_level_operations"
|
||||
TraceSummaryTableName = "distributed_trace_summary"
|
||||
SpanAttributesKeysTblName = "distributed_span_attributes_keys"
|
||||
DBName = "signoz_traces"
|
||||
SpanIndexV3TableName = "distributed_signoz_index_v3"
|
||||
SpanIndexV3LocalTableName = "signoz_index_v3"
|
||||
TagAttributesV2TableName = "distributed_tag_attributes_v2"
|
||||
TagAttributesV2LocalTableName = "tag_attributes_v2"
|
||||
TopLevelOperationsTableName = "distributed_top_level_operations"
|
||||
LocalTopLevelOperationsTableName = "top_level_operations"
|
||||
TraceSummaryTableName = "distributed_trace_summary"
|
||||
SpanAttributesKeysTblName = "distributed_span_attributes_keys"
|
||||
)
|
||||
|
||||
@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
}, b.start, b.end,
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -552,6 +552,7 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -661,6 +662,7 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -800,6 +802,7 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -390,7 +390,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
@@ -506,7 +506,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
|
||||
@@ -44,7 +44,7 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
|
||||
mockMetadataStore,
|
||||
)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
@@ -32,7 +33,7 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, telemetrytypes.ArraySep) || strings.Contains(i.Path, telemetrytypes.ArrayAnyIndex) {
|
||||
if strings.Contains(i.Path, telemetrylogs.ArraySep) || strings.Contains(i.Path, telemetrylogs.ArrayAnyIndex) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
@@ -40,12 +41,12 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
if !strings.HasPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
|
||||
}
|
||||
|
||||
// remove the "body." prefix from the path
|
||||
i.Path = strings.TrimPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
i.Path = strings.TrimPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix)
|
||||
|
||||
isCardinal := keycheck.IsCardinal(i.Path)
|
||||
if isCardinal {
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -18,13 +17,9 @@ var (
|
||||
FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")}
|
||||
)
|
||||
|
||||
const (
|
||||
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
BodyJSONStringSearchPrefix = "body."
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArrayAnyIndex = "[*]."
|
||||
)
|
||||
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
const BodyJSONStringSearchPrefix = `body.`
|
||||
|
||||
type TelemetryFieldKey struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
Reference in New Issue
Block a user