Compare commits
8 Commits
fix/multi-
...
v0.88.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9a3a8c8305 | ||
|
|
2ac45b0174 | ||
|
|
2a53918ebd | ||
|
|
9daefeb881 | ||
|
|
526cf01cb7 | ||
|
|
cd4766ec2b | ||
|
|
2196b58d36 | ||
|
|
53c58b9983 |
@@ -40,7 +40,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
2
.github/workflows/integrationci.yaml
vendored
2
.github/workflows/integrationci.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
- 24.1.2-alpine
|
||||
- 24.12-alpine
|
||||
schema-migrator-version:
|
||||
- v0.111.38
|
||||
- v0.128.0
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.87.0
|
||||
image: signoz/signoz:v0.88.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -206,7 +206,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
image: signoz/signoz-otel-collector:v0.128.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -230,7 +230,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -100,7 +100,7 @@ services:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
# - "9181:9181"
|
||||
|
||||
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
@@ -110,13 +110,12 @@ services:
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.xml
|
||||
|
||||
volumes:
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.87.0
|
||||
image: signoz/signoz:v0.88.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -148,7 +147,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
image: signoz/signoz-otel-collector:v0.128.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -174,7 +173,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -195,7 +194,6 @@ volumes:
|
||||
name: signoz-sqlite
|
||||
zookeeper-1:
|
||||
name: signoz-zookeeper-1
|
||||
|
||||
configs:
|
||||
clickhouse-config:
|
||||
file: ../common/clickhouse/config.xml
|
||||
@@ -205,7 +203,6 @@ configs:
|
||||
file: ../common/clickhouse/custom-function.xml
|
||||
clickhouse-cluster:
|
||||
file: ../common/clickhouse/cluster.xml
|
||||
|
||||
signoz-prometheus-config:
|
||||
file: ../common/signoz/prometheus.yml
|
||||
# If you have multiple dashboard files, you can list them individually:
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
image: signoz/signoz:${VERSION:-v0.88.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -210,7 +210,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -236,7 +236,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -247,7 +247,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
image: signoz/signoz:${VERSION:-v0.88.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -142,7 +142,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -14,8 +14,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Tabs, TabsProps } from 'antd';
|
||||
import { escapeRegExp } from 'lodash-es';
|
||||
import { useLocation, useParams } from 'react-router-dom';
|
||||
|
||||
import { RouteTabProps } from './types';
|
||||
@@ -28,7 +29,11 @@ function RouteTab({
|
||||
|
||||
// Find the matching route for the current pathname
|
||||
const currentRoute = routesWithParams.find((route) => {
|
||||
const routePattern = route.route.replace(/:(\w+)/g, '([^/]+)');
|
||||
const pathnameOnly = route.route.split('?')[0];
|
||||
const routePattern = escapeRegExp(pathnameOnly).replace(
|
||||
/\\:([a-zA-Z0-9_]+)/g,
|
||||
'([^/]+)',
|
||||
);
|
||||
const regex = new RegExp(`^${routePattern}$`);
|
||||
return regex.test(location.pathname);
|
||||
});
|
||||
|
||||
@@ -212,9 +212,12 @@ function QuerySection({
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const step2Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '2' : '1';
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading> {t('alert_form_step2')}</StepHeading>
|
||||
<StepHeading> {t('alert_form_step2', { step: step2Label })}</StepHeading>
|
||||
<FormContainer>
|
||||
<div>{renderTabs(alertType)}</div>
|
||||
{renderQuerySection(currentTab)}
|
||||
|
||||
@@ -371,9 +371,11 @@ function RuleOptions({
|
||||
selectedCategory?.name,
|
||||
);
|
||||
|
||||
const step3Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '3' : '2';
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading>{t('alert_form_step3')}</StepHeading>
|
||||
<StepHeading>{t('alert_form_step3', { step: step3Label })}</StepHeading>
|
||||
<FormContainer>
|
||||
{queryCategory === EQueryType.PROM && renderPromRuleOptions()}
|
||||
{queryCategory !== EQueryType.PROM &&
|
||||
|
||||
@@ -731,7 +731,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
@@ -751,7 +751,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: 'a7da59c7',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -786,12 +786,12 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'available',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
@@ -804,14 +804,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDeploymentDesiredKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '55110885',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -846,14 +846,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'desired',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
@@ -890,13 +890,13 @@ export const getClusterMetricsQueryPayload = (
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
formatForWeb: true,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
@@ -909,14 +909,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetCurrentPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '3c57b4d1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -951,14 +951,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'current',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -969,14 +969,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetDesiredPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '0f49fe64',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1011,14 +1011,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'desired',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1029,14 +1029,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetReadyPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'C',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '0bebf625',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1071,14 +1071,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'ready',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'C',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1089,14 +1089,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetUpdatedPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'D',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '1ddacbbe',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1131,14 +1131,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'updated',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'D',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
@@ -1199,13 +1199,13 @@ export const getClusterMetricsQueryPayload = (
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
formatForWeb: true,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
@@ -1218,14 +1218,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetCurrentScheduledNodesKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: 'e0bea554',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1250,24 +1250,16 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'current_nodes',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'avg',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1278,14 +1270,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetDesiredScheduledNodesKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '741052f7',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1310,24 +1302,16 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'desired_nodes',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'avg',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1338,14 +1322,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetReadyNodesKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'C',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: 'f23759f2',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1370,24 +1354,16 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'ready_nodes',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'C',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'avg',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
@@ -1436,316 +1412,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_active_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobActivePodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_successful_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobSuccessfulPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_failed_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobFailedPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'C',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'C',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_desired_successful_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobDesiredSuccessfulPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'D',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'D',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: v4(),
|
||||
promql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
formatForWeb: true,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
@@ -1777,7 +1444,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -1837,7 +1504,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -1897,7 +1564,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -1957,7 +1624,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -2005,6 +1672,24 @@ export const getClusterMetricsQueryPayload = (
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: v4(),
|
||||
promql: [
|
||||
@@ -2014,6 +1699,24 @@ export const getClusterMetricsQueryPayload = (
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
|
||||
@@ -98,6 +98,7 @@ interface QueryBuilderSearchV2Props {
|
||||
hideSpanScopeSelector?: boolean;
|
||||
// Determines whether to call onChange when a tag is closed
|
||||
triggerOnChangeOnClose?: boolean;
|
||||
skipQueryBuilderRedirect?: boolean;
|
||||
}
|
||||
|
||||
export interface Option {
|
||||
@@ -137,6 +138,7 @@ function QueryBuilderSearchV2(
|
||||
operatorConfigKey,
|
||||
hideSpanScopeSelector,
|
||||
triggerOnChangeOnClose,
|
||||
skipQueryBuilderRedirect,
|
||||
} = props;
|
||||
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
@@ -1038,7 +1040,11 @@ function QueryBuilderSearchV2(
|
||||
})}
|
||||
</Select>
|
||||
{!hideSpanScopeSelector && (
|
||||
<SpanScopeSelector query={query} onChange={onChange} />
|
||||
<SpanScopeSelector
|
||||
query={query}
|
||||
onChange={onChange}
|
||||
skipQueryBuilderRedirect={skipQueryBuilderRedirect}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@@ -1056,6 +1062,7 @@ QueryBuilderSearchV2.defaultProps = {
|
||||
operatorConfigKey: undefined,
|
||||
hideSpanScopeSelector: true,
|
||||
triggerOnChangeOnClose: false,
|
||||
skipQueryBuilderRedirect: false,
|
||||
};
|
||||
|
||||
export default QueryBuilderSearchV2;
|
||||
|
||||
@@ -23,6 +23,7 @@ interface SpanFilterConfig {
|
||||
interface SpanScopeSelectorProps {
|
||||
onChange?: (value: TagFilter) => void;
|
||||
query?: IBuilderQuery;
|
||||
skipQueryBuilderRedirect?: boolean;
|
||||
}
|
||||
|
||||
const SPAN_FILTER_CONFIG: Record<SpanScope, SpanFilterConfig | null> = {
|
||||
@@ -58,6 +59,7 @@ const SELECT_OPTIONS = [
|
||||
function SpanScopeSelector({
|
||||
onChange,
|
||||
query,
|
||||
skipQueryBuilderRedirect,
|
||||
}: SpanScopeSelectorProps): JSX.Element {
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const [selectedScope, setSelectedScope] = useState<SpanScope>(
|
||||
@@ -79,6 +81,7 @@ function SpanScopeSelector({
|
||||
if (hasFilter('isEntryPoint')) return SpanScope.ENTRYPOINT_SPANS;
|
||||
return SpanScope.ALL_SPANS;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
let queryData = (currentQuery?.builder?.queryData || [])?.find(
|
||||
(item) => item.queryName === query?.queryName,
|
||||
@@ -127,13 +130,10 @@ function SpanScopeSelector({
|
||||
},
|
||||
}));
|
||||
|
||||
if (onChange && query) {
|
||||
if (skipQueryBuilderRedirect && onChange && query) {
|
||||
onChange({
|
||||
...query.filters,
|
||||
items: getUpdatedFilters(
|
||||
[...query.filters.items, ...newQuery.builder.queryData[0].filters.items],
|
||||
true,
|
||||
),
|
||||
items: getUpdatedFilters([...query.filters.items], true),
|
||||
});
|
||||
|
||||
setSelectedScope(newScope);
|
||||
@@ -156,6 +156,7 @@ function SpanScopeSelector({
|
||||
SpanScopeSelector.defaultProps = {
|
||||
onChange: undefined,
|
||||
query: undefined,
|
||||
skipQueryBuilderRedirect: false,
|
||||
};
|
||||
|
||||
export default SpanScopeSelector;
|
||||
|
||||
@@ -3,9 +3,11 @@ import {
|
||||
render,
|
||||
RenderResult,
|
||||
screen,
|
||||
within,
|
||||
} from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
Query,
|
||||
@@ -13,6 +15,7 @@ import {
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import QueryBuilderSearchV2 from '../QueryBuilderSearchV2';
|
||||
import SpanScopeSelector from '../SpanScopeSelector';
|
||||
|
||||
const mockRedirectWithQueryBuilderData = jest.fn();
|
||||
@@ -48,6 +51,14 @@ const defaultQuery = {
|
||||
},
|
||||
};
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
refetchOnWindowFocus: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const defaultQueryBuilderQuery: IBuilderQuery = {
|
||||
...initialQueriesMap.traces.builder.queryData[0],
|
||||
queryName: 'A',
|
||||
@@ -76,6 +87,7 @@ const renderWithContext = (
|
||||
initialQuery = defaultQuery,
|
||||
onChangeProp?: (value: TagFilter) => void,
|
||||
queryProp?: IBuilderQuery,
|
||||
skipQueryBuilderRedirect = false,
|
||||
): RenderResult =>
|
||||
render(
|
||||
<QueryBuilderContext.Provider
|
||||
@@ -87,12 +99,19 @@ const renderWithContext = (
|
||||
} as any
|
||||
}
|
||||
>
|
||||
<SpanScopeSelector onChange={onChangeProp} query={queryProp} />
|
||||
<SpanScopeSelector
|
||||
onChange={onChangeProp}
|
||||
query={queryProp}
|
||||
skipQueryBuilderRedirect={skipQueryBuilderRedirect}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
const selectOption = async (optionText: string): Promise<void> => {
|
||||
const selector = screen.getByRole('combobox');
|
||||
const selector = within(screen.getByTestId('span-scope-selector')).getByRole(
|
||||
'combobox',
|
||||
);
|
||||
|
||||
fireEvent.mouseDown(selector);
|
||||
|
||||
// Wait for dropdown to appear
|
||||
@@ -264,6 +283,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('All Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -283,6 +303,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -303,6 +324,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -324,6 +346,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -350,6 +373,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Entrypoint Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -361,5 +385,60 @@ describe('SpanScopeSelector', () => {
|
||||
container.querySelector('span[title="All Spans"]'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not duplicate non-scope filters when changing span scope', async () => {
|
||||
const query = {
|
||||
...defaultQuery,
|
||||
builder: {
|
||||
...defaultQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...defaultQuery.builder.queryData[0],
|
||||
filters: {
|
||||
items: [createNonScopeFilter('service', 'checkout')],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<QueryBuilderContext.Provider
|
||||
value={
|
||||
{
|
||||
currentQuery: query,
|
||||
redirectWithQueryBuilderData: mockRedirectWithQueryBuilderData,
|
||||
} as any
|
||||
}
|
||||
>
|
||||
<QueryBuilderSearchV2
|
||||
query={query.builder.queryData[0] as any}
|
||||
onChange={mockOnChange}
|
||||
hideSpanScopeSelector={false}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
expect(await screen.findByText('All Spans')).toBeInTheDocument();
|
||||
|
||||
await selectOption('Entrypoint Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).toHaveBeenCalled();
|
||||
|
||||
const redirectQueryArg = mockRedirectWithQueryBuilderData.mock
|
||||
.calls[0][0] as Query;
|
||||
const { items } = redirectQueryArg.builder.queryData[0].filters;
|
||||
// Count non-scope filters
|
||||
const nonScopeFilters = items.filter(
|
||||
(filter) => filter.key?.type !== 'spanSearchScope',
|
||||
);
|
||||
expect(nonScopeFilters).toHaveLength(1);
|
||||
|
||||
expect(nonScopeFilters).toContainEqual(
|
||||
createNonScopeFilter('service', 'checkout'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -142,6 +142,7 @@ function Filters({
|
||||
}}
|
||||
onChange={handleFilterChange}
|
||||
hideSpanScopeSelector={false}
|
||||
skipQueryBuilderRedirect
|
||||
/>
|
||||
{filteredSpanIds.length > 0 && (
|
||||
<div className="pre-next-toggle">
|
||||
|
||||
@@ -17,6 +17,7 @@ import { AppState } from 'store/reducers';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import DOCLINKS from 'utils/docLinks';
|
||||
import { transformBuilderQueryFields } from 'utils/queryTransformers';
|
||||
|
||||
import TraceExplorerControls from '../Controls';
|
||||
import { TracesLoading } from '../TraceLoading/TraceLoading';
|
||||
@@ -39,9 +40,22 @@ function TracesView({ isFilterApplied }: TracesViewProps): JSX.Element {
|
||||
QueryParams.pagination,
|
||||
);
|
||||
|
||||
const transformedQuery = useMemo(
|
||||
() =>
|
||||
transformBuilderQueryFields(stagedQuery || initialQueriesMap.traces, {
|
||||
orderBy: [
|
||||
{
|
||||
columnName: 'timestamp',
|
||||
order: 'desc',
|
||||
},
|
||||
],
|
||||
}),
|
||||
[stagedQuery],
|
||||
);
|
||||
|
||||
const { data, isLoading, isFetching, isError } = useGetQueryRange(
|
||||
{
|
||||
query: stagedQuery || initialQueriesMap.traces,
|
||||
query: transformedQuery,
|
||||
graphType: panelType || PANEL_TYPES.TRACE,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
|
||||
@@ -594,6 +594,53 @@ describe('TracesExplorer - ', () => {
|
||||
'http://localhost/trace/5765b60ba7cc4ddafe8bdaa9c1b4b246',
|
||||
);
|
||||
});
|
||||
it('trace explorer - trace view should only send order by timestamp in the query', async () => {
|
||||
let capturedPayload: QueryRangePayload;
|
||||
const orderBy = [
|
||||
{ columnName: 'id', order: 'desc' },
|
||||
{ columnName: 'serviceName', order: 'desc' },
|
||||
];
|
||||
const defaultOrderBy = [{ columnName: 'timestamp', order: 'desc' }];
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v4/query_range`, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
capturedPayload = payload;
|
||||
return res(ctx.status(200), ctx.json(queryRangeForTraceView));
|
||||
}),
|
||||
);
|
||||
render(
|
||||
<QueryBuilderContext.Provider
|
||||
value={{
|
||||
...qbProviderValue,
|
||||
panelType: PANEL_TYPES.TRACE,
|
||||
stagedQuery: {
|
||||
...qbProviderValue.stagedQuery,
|
||||
builder: {
|
||||
...qbProviderValue.stagedQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...qbProviderValue.stagedQuery.builder.queryData[0],
|
||||
orderBy,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}}
|
||||
>
|
||||
<TracesExplorer />
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(capturedPayload).toBeDefined();
|
||||
expect(capturedPayload?.compositeQuery?.builderQueries?.A.orderBy).toEqual(
|
||||
defaultOrderBy,
|
||||
);
|
||||
expect(
|
||||
capturedPayload?.compositeQuery?.builderQueries?.A.orderBy,
|
||||
).not.toEqual(orderBy);
|
||||
});
|
||||
});
|
||||
|
||||
it('test for explorer options', async () => {
|
||||
const { getByText, getByTestId } = render(
|
||||
|
||||
28
frontend/src/utils/queryTransformers.ts
Normal file
28
frontend/src/utils/queryTransformers.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
/**
|
||||
* Transforms a query by modifying specific fields in the builder queries
|
||||
* @param query - The original query object
|
||||
* @param fieldOverrides - Partial object containing fields to override in each builder query
|
||||
* @returns A new query object with the modified fields
|
||||
*/
|
||||
export const transformBuilderQueryFields = (
|
||||
query: Query,
|
||||
fieldOverrides: Partial<IBuilderQuery>,
|
||||
): Query => {
|
||||
// Create a deep copy of the query
|
||||
const transformedQuery: Query = cloneDeep(query);
|
||||
|
||||
// Update the specified fields for each query in the builder
|
||||
if (transformedQuery.builder?.queryData) {
|
||||
transformedQuery.builder.queryData = transformedQuery.builder.queryData.map(
|
||||
(queryItem) => ({
|
||||
...queryItem,
|
||||
...fieldOverrides,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return transformedQuery;
|
||||
};
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -205,6 +206,10 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
|
||||
rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
return nil, errors.Newf(errors.TypeTimeout, errors.CodeTimeout, "Query timed out").
|
||||
WithAdditional("Try refining your search by adding relevant resource attributes filtering")
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -89,6 +89,9 @@ func newProvider(
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
|
||||
@@ -94,3 +94,11 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
return multiIfStmt, allArgs, nil
|
||||
}
|
||||
|
||||
func GroupByKeys(keys []qbtypes.GroupByKey) []string {
|
||||
k := []string{}
|
||||
for _, key := range keys {
|
||||
k = append(k, "`"+key.Name+"`")
|
||||
}
|
||||
return k
|
||||
}
|
||||
|
||||
@@ -38,6 +38,10 @@ type resourceFilterStatementBuilder[T any] struct {
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
signal telemetrytypes.Signal
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
// Ensure interface compliance at compile time
|
||||
@@ -64,12 +68,18 @@ func NewLogResourceFilterStatementBuilder(
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalLogs,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -140,7 +150,11 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
FieldMapper: b.fieldMapper,
|
||||
ConditionBuilder: b.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
SkipFullTextFilter: true,
|
||||
SkipFunctionCalls: true,
|
||||
Variables: variables,
|
||||
})
|
||||
|
||||
|
||||
@@ -29,7 +29,10 @@ type filterExpressionVisitor struct {
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
skipResourceFilter bool
|
||||
skipFullTextFilter bool
|
||||
skipFunctionCalls bool
|
||||
variables map[string]qbtypes.VariableItem
|
||||
|
||||
keysWithWarnings map[string]bool
|
||||
}
|
||||
|
||||
type FilterExprVisitorOpts struct {
|
||||
@@ -42,6 +45,7 @@ type FilterExprVisitorOpts struct {
|
||||
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
SkipResourceFilter bool
|
||||
SkipFullTextFilter bool
|
||||
SkipFunctionCalls bool
|
||||
Variables map[string]qbtypes.VariableItem
|
||||
}
|
||||
|
||||
@@ -57,7 +61,9 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
|
||||
jsonKeyToKey: opts.JsonKeyToKey,
|
||||
skipResourceFilter: opts.SkipResourceFilter,
|
||||
skipFullTextFilter: opts.SkipFullTextFilter,
|
||||
skipFunctionCalls: opts.SkipFunctionCalls,
|
||||
variables: opts.Variables,
|
||||
keysWithWarnings: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -547,6 +553,10 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
|
||||
// VisitFunctionCall handles function calls like has(), hasAny(), etc.
|
||||
func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext) any {
|
||||
if v.skipFunctionCalls {
|
||||
return "true"
|
||||
}
|
||||
|
||||
// Get function name based on which token is present
|
||||
var functionName string
|
||||
if ctx.HAS() != nil {
|
||||
@@ -690,7 +700,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
}
|
||||
}
|
||||
|
||||
if len(fieldKeysForName) > 1 {
|
||||
if len(fieldKeysForName) > 1 && !v.keysWithWarnings[keyName] {
|
||||
// this is warning state, we must have a unambiguous key
|
||||
v.warnings = append(v.warnings, fmt.Sprintf(
|
||||
"key `%s` is ambiguous, found %d different combinations of field context and data type: %v",
|
||||
@@ -698,6 +708,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
len(fieldKeysForName),
|
||||
fieldKeysForName,
|
||||
))
|
||||
v.keysWithWarnings[keyName] = true
|
||||
}
|
||||
|
||||
return fieldKeysForName
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
@@ -148,7 +149,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
// if the field is intrinsic, it always exists
|
||||
if slices.Contains(IntrinsicFields, key.Name) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), key.Name) {
|
||||
return "true", nil
|
||||
}
|
||||
|
||||
@@ -210,7 +211,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(IntrinsicFields, field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -337,7 +337,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: "GET",
|
||||
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."method"'), 'String') = ?`,
|
||||
expectedSQL: `JSON_VALUE(body, '$."http"."method"') = ?`,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -417,7 +417,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
operator: qbtypes.FilterOperatorContains,
|
||||
value: "200",
|
||||
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) LIKE LOWER(?)`,
|
||||
expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) LIKE LOWER(?)`,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -427,7 +427,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
operator: qbtypes.FilterOperatorNotContains,
|
||||
value: "200",
|
||||
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) NOT LIKE LOWER(?)`,
|
||||
expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) NOT LIKE LOWER(?)`,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -10,7 +10,57 @@ var (
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
}
|
||||
BodyJSONStringSearchPrefix = `body.`
|
||||
IntrinsicFields = []string{
|
||||
"body", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number", "scope_name", "scope_version",
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"body": {
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"trace_id": {
|
||||
Name: "trace_id",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"span_id": {
|
||||
Name: "span_id",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"trace_flags": {
|
||||
Name: "trace_flags",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"severity_text": {
|
||||
Name: "severity_text",
|
||||
Description: "Log level. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitytext)",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"severity_number": {
|
||||
Name: "severity_number",
|
||||
Description: "Numerical value of the severity. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitynumber)",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"scope_name": {
|
||||
Name: "scope_name",
|
||||
Description: "Logger name. Learn more about instrumentation scope [here](https://opentelemetry.io/docs/concepts/instrumentation-scope/)",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"scope_version": {
|
||||
Name: "scope_version",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -73,7 +73,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: "body.message = hello",
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."message"'), 'String') = ? AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedQuery: `WHERE (JSON_VALUE(body, '$."message"') = ? AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedArgs: []any{"hello"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
@@ -113,7 +113,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: "body.message REGEXP 'a*'",
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE (match(JSONExtract(JSON_VALUE(body, '$."message"'), 'String'), ?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedQuery: `WHERE (match(JSON_VALUE(body, '$."message"'), ?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedArgs: []any{"a*"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
@@ -121,7 +121,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: `body.message CONTAINS "hello 'world'"`,
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE (LOWER(JSONExtract(JSON_VALUE(body, '$."message"'), 'String')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedQuery: `WHERE (LOWER(JSON_VALUE(body, '$."message"')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedArgs: []any{"%hello 'world'%"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
@@ -136,7 +136,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: `body.name IN ('hello', 'world')`,
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE ((JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ? OR JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ?) AND JSON_EXISTS(body, '$."name"'))`,
|
||||
expectedQuery: `WHERE ((JSON_VALUE(body, '$."name"') = ? OR JSON_VALUE(body, '$."name"') = ?) AND JSON_EXISTS(body, '$."name"'))`,
|
||||
expectedArgs: []any{"hello", "world"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
|
||||
@@ -61,7 +61,7 @@ func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp
|
||||
}
|
||||
|
||||
// check if it is array
|
||||
if strings.HasSuffix(key.Name, "[*]") {
|
||||
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
|
||||
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
|
||||
}
|
||||
|
||||
@@ -74,6 +74,8 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
for _, part := range parts {
|
||||
if strings.HasSuffix(part, "[*]") {
|
||||
newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[*]")))
|
||||
} else if strings.HasSuffix(part, "[]") {
|
||||
newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[]")))
|
||||
} else {
|
||||
newParts = append(newParts, fmt.Sprintf(`"%s"`, part))
|
||||
}
|
||||
@@ -94,8 +96,12 @@ func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, op
|
||||
return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
|
||||
}
|
||||
|
||||
// for all other types, we need to extract the value from the JSON_VALUE
|
||||
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
|
||||
if dataType != telemetrytypes.FieldDataTypeString {
|
||||
// for all types except strings, we need to extract the value from the JSON_VALUE
|
||||
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
|
||||
}
|
||||
// for string types, we should compare with the JSON_VALUE
|
||||
return fmt.Sprintf("JSON_VALUE(body, '$.%s')", getBodyJSONPath(key)), value
|
||||
}
|
||||
|
||||
func GetBodyJSONKeyForExists(_ context.Context, key *telemetrytypes.TelemetryFieldKey, _ qbtypes.FilterOperator, _ any) string {
|
||||
|
||||
@@ -270,10 +270,11 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
// Constrain the main query to the rows that appear in the CTE.
|
||||
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
|
||||
sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
|
||||
// Group by all dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
@@ -290,7 +291,8 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||
|
||||
} else {
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||
@@ -380,7 +382,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
}
|
||||
|
||||
// Group by dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
@@ -492,7 +494,7 @@ func (b *logQueryStatementBuilder) maybeAttachResourceFilter(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)")
|
||||
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
|
||||
|
||||
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
|
||||
}
|
||||
|
||||
@@ -30,6 +30,9 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
fm,
|
||||
cb,
|
||||
mockMetadataStore,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -65,7 +68,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -104,7 +107,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -208,6 +209,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
}
|
||||
|
||||
keys = append(keys, key)
|
||||
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
@@ -215,8 +217,8 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
}
|
||||
|
||||
staticKeys := []string{"isRoot", "isEntrypoint"}
|
||||
staticKeys = append(staticKeys, telemetrytraces.IntrinsicFields...)
|
||||
staticKeys = append(staticKeys, telemetrytraces.CalculatedFields...)
|
||||
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.IntrinsicFields)...)
|
||||
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.CalculatedFields)...)
|
||||
|
||||
// add matching intrinsic and matching calculated fields
|
||||
for _, key := range staticKeys {
|
||||
@@ -228,6 +230,19 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
}
|
||||
}
|
||||
if found {
|
||||
if field, exists := telemetrytraces.IntrinsicFields[key]; exists {
|
||||
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
keys = append(keys, &field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if field, exists := telemetrytraces.CalculatedFields[key]; exists {
|
||||
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
keys = append(keys, &field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
@@ -361,6 +376,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
|
||||
keys = append(keys, key)
|
||||
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
@@ -368,7 +384,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
|
||||
staticKeys := []string{}
|
||||
staticKeys = append(staticKeys, telemetrylogs.IntrinsicFields...)
|
||||
staticKeys = append(staticKeys, maps.Keys(telemetrylogs.IntrinsicFields)...)
|
||||
|
||||
// add matching intrinsic and matching calculated fields
|
||||
for _, key := range staticKeys {
|
||||
@@ -380,6 +396,13 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
}
|
||||
if found {
|
||||
if field, exists := telemetrylogs.IntrinsicFields[key]; exists {
|
||||
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
keys = append(keys, &field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
|
||||
@@ -258,7 +258,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.GTE("unix_milli", start),
|
||||
sb.LT("unix_milli", end),
|
||||
)
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
@@ -320,7 +321,8 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
sb.AddWhereClause(filterWhere)
|
||||
}
|
||||
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("fingerprint")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("(%s) AS filtered_time_series", q), args, nil
|
||||
@@ -375,7 +377,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
sb.GTE("unix_milli", start),
|
||||
sb.LT("unix_milli", end),
|
||||
)
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("fingerprint", "ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.OrderBy("fingerprint", "ts")
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
@@ -412,7 +415,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
baseSb.GTE("unix_milli", start),
|
||||
baseSb.LT("unix_milli", end),
|
||||
)
|
||||
baseSb.GroupBy("ALL")
|
||||
baseSb.GroupBy("fingerprint", "ts")
|
||||
baseSb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
baseSb.OrderBy("fingerprint", "ts")
|
||||
|
||||
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
@@ -438,7 +442,7 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW increase_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
default:
|
||||
@@ -465,7 +469,8 @@ func (b *metricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
if query.Aggregations[0].ValueFilter != nil {
|
||||
sb.Where(sb.EQ("per_series_value", query.Aggregations[0].ValueFilter.Value))
|
||||
}
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
|
||||
@@ -49,7 +49,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -82,7 +82,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -114,7 +114,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_latency", uint64(1747947419000), uint64(1747983448000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -147,7 +147,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983448000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -176,7 +176,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "http_server_duration_bucket", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
type conditionBuilder struct {
|
||||
@@ -129,10 +130,10 @@ func (c *conditionBuilder) conditionFor(
|
||||
// key membership checks, so depending on the column type, the condition changes
|
||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||
// if the field is intrinsic, it always exists
|
||||
if slices.Contains(IntrinsicFields, tblFieldName) ||
|
||||
slices.Contains(CalculatedFields, tblFieldName) ||
|
||||
slices.Contains(IntrinsicFieldsDeprecated, tblFieldName) ||
|
||||
slices.Contains(CalculatedFieldsDeprecated, tblFieldName) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), tblFieldName) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), tblFieldName) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), tblFieldName) ||
|
||||
slices.Contains(maps.Keys(CalculatedFieldsDeprecated), tblFieldName) {
|
||||
return "true", nil
|
||||
}
|
||||
|
||||
@@ -205,10 +206,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(IntrinsicFields, field) ||
|
||||
slices.Contains(IntrinsicFieldsDeprecated, field) ||
|
||||
slices.Contains(CalculatedFields, field) ||
|
||||
slices.Contains(CalculatedFieldsDeprecated, field) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFieldsDeprecated), field) {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -3,89 +3,320 @@ package telemetrytraces
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
var (
|
||||
IntrinsicFields = []string{
|
||||
"trace_id",
|
||||
"span_id",
|
||||
"trace_state",
|
||||
"parent_span_id",
|
||||
"flags",
|
||||
"name",
|
||||
"kind",
|
||||
"kind_string",
|
||||
"duration_nano",
|
||||
"status_code",
|
||||
"status_message",
|
||||
"status_code_string",
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {
|
||||
Name: "trace_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"span_id": {
|
||||
Name: "span_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"trace_state": {
|
||||
Name: "trace_state",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"parent_span_id": {
|
||||
Name: "parent_span_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"flags": {
|
||||
Name: "flags",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"name": {
|
||||
Name: "name",
|
||||
Description: "Name of the span",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"kind": {
|
||||
Name: "kind",
|
||||
Description: "Span kind enum (number). Use `kind_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"kind_string": {
|
||||
Name: "kind_string",
|
||||
Description: "Span kind enum (string). Known values are ['Client', 'Server', 'Internal', 'Producer', 'Consumer']. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"duration_nano": {
|
||||
Name: "duration_nano",
|
||||
Description: "Span duration",
|
||||
Unit: "ns",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"status_code": {
|
||||
Name: "status_code",
|
||||
Description: "Span status code enum (number). Use `status_code_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"status_message": {
|
||||
Name: "status_message",
|
||||
Description: "Span status message. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"status_code_string": {
|
||||
Name: "status_code_string",
|
||||
Description: "Span status code enum (string). Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
IntrinsicFieldsDeprecated = []string{
|
||||
"traceID",
|
||||
"spanID",
|
||||
"parentSpanID",
|
||||
"spanKind",
|
||||
"durationNano",
|
||||
"statusCode",
|
||||
"statusMessage",
|
||||
"statusCodeString",
|
||||
IntrinsicFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"traceID": {
|
||||
Name: "traceID",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"spanID": {
|
||||
Name: "spanID",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"parentSpanID": {
|
||||
Name: "parentSpanID",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"spanKind": {
|
||||
Name: "spanKind",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"durationNano": {
|
||||
Name: "durationNano",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"statusCode": {
|
||||
Name: "statusCode",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"statusMessage": {
|
||||
Name: "statusMessage",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"statusCodeString": {
|
||||
Name: "statusCodeString",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
|
||||
CalculatedFields = []string{
|
||||
"response_status_code",
|
||||
"external_http_url",
|
||||
"http_url",
|
||||
"external_http_method",
|
||||
"http_method",
|
||||
"http_host",
|
||||
"db_name",
|
||||
"db_operation",
|
||||
"has_error",
|
||||
"is_remote",
|
||||
CalculatedFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"response_status_code": {
|
||||
Name: "response_status_code",
|
||||
Description: "Derived response status code from the HTTP/RPC status code attributes. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#response_status_code)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"external_http_url": {
|
||||
Name: "external_http_url",
|
||||
Description: "The hostname of the external HTTP URL. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_url)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"http_url": {
|
||||
Name: "http_url",
|
||||
Description: "HTTP URL of the request. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"external_http_method": {
|
||||
Name: "external_http_method",
|
||||
Description: "HTTP request method of client spans. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_method)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"http_method": {
|
||||
Name: "http_method",
|
||||
Description: "The HTTP request method. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_method)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"http_host": {
|
||||
Name: "http_host",
|
||||
Description: "The HTTP host or server address. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_host)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"db_name": {
|
||||
Name: "db_name",
|
||||
Description: "The database name or namespace. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_name)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"db_operation": {
|
||||
Name: "db_operation",
|
||||
Description: "The database operation being performed. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_operation)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"has_error": {
|
||||
Name: "has_error",
|
||||
Description: "Whether the span has an error. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#has_error)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
"is_remote": {
|
||||
Name: "is_remote",
|
||||
Description: "Whether the span is remote. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#is_remote)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
}
|
||||
|
||||
CalculatedFieldsDeprecated = []string{
|
||||
"responseStatusCode",
|
||||
"externalHttpUrl",
|
||||
"httpUrl",
|
||||
"externalHttpMethod",
|
||||
"httpMethod",
|
||||
"httpHost",
|
||||
"dbName",
|
||||
"dbOperation",
|
||||
"hasError",
|
||||
"isRemote",
|
||||
CalculatedFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"responseStatusCode": {
|
||||
Name: "responseStatusCode",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"externalHttpUrl": {
|
||||
Name: "externalHttpUrl",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"httpUrl": {
|
||||
Name: "httpUrl",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"externalHttpMethod": {
|
||||
Name: "externalHttpMethod",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"httpMethod": {
|
||||
Name: "httpMethod",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"httpHost": {
|
||||
Name: "httpHost",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"dbName": {
|
||||
Name: "dbName",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"dbOperation": {
|
||||
Name: "dbOperation",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"hasError": {
|
||||
Name: "hasError",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
"isRemote": {
|
||||
Name: "isRemote",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
}
|
||||
SpanSearchScopeRoot = "isroot"
|
||||
SpanSearchScopeEntryPoint = "isentrypoint"
|
||||
|
||||
DefaultFields = []telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "timestamp",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
{
|
||||
Name: "span_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "span_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "trace_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
{
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "name",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
{
|
||||
Name: "service.name",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
},
|
||||
{
|
||||
Name: "duration_nano",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "duration_nano",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
{
|
||||
Name: "response_status_code",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "response_status_code",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -305,10 +305,11 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
// Constrain the main query to the rows that appear in the CTE.
|
||||
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
|
||||
sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
|
||||
// Group by all dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
|
||||
@@ -323,7 +324,8 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||
|
||||
} else {
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
|
||||
@@ -412,7 +414,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
}
|
||||
|
||||
// Group by dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" && !skipHaving {
|
||||
@@ -521,7 +523,7 @@ func (b *traceQueryStatementBuilder) maybeAttachResourceFilter(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)")
|
||||
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
|
||||
|
||||
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -104,6 +104,8 @@ func detectPlatform() string {
|
||||
return "heroku"
|
||||
case os.Getenv("RENDER") != "" || os.Getenv("RENDER_SERVICE_ID") != "":
|
||||
return "render"
|
||||
case os.Getenv("COOLIFY_RESOURCE_UUID") != "":
|
||||
return "coolify"
|
||||
}
|
||||
|
||||
// Try to detect cloud provider through metadata endpoints
|
||||
@@ -151,5 +153,16 @@ func detectPlatform() string {
|
||||
}
|
||||
}
|
||||
|
||||
// Hetzner metadata
|
||||
if req, err := http.NewRequest(http.MethodGet, "http://169.254.169.254/hetzner/v1/metadata", nil); err == nil {
|
||||
if resp, err := client.Do(req); err == nil {
|
||||
resp.Body.Close()
|
||||
if resp.StatusCode == 200 {
|
||||
return "hetzner"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user