Compare commits

..

40 Commits

Author SHA1 Message Date
nikhilmantri0902
1541734542 feat: match query building semantics for tags of 0 length 2025-08-18 15:26:12 +05:30
nikhilmantri0902
46e5b407f7 fix: added necessary 0 numCalls handling 2025-08-18 13:53:59 +05:30
nikhilmantri0902
f2c3946101 fix: added necessary 0 numCalls handling 2025-08-18 13:52:55 +05:30
nikhilmantri0902
4dca46de40 chore: added debugging funcs 2025-08-18 13:24:28 +05:30
nikhilmantri0902
6f420abe27 chore: removed comparison block 2025-08-18 13:09:12 +05:30
nikhilmantri0902
1d9b457af6 chore: removed comparison block 2025-08-18 12:40:51 +05:30
nikhilmantri0902
d437998750 chore: tuple issue fixed 2025-08-18 12:13:08 +05:30
nikhilmantri0902
e02d0cdd98 chore: tuple issue fixed 2025-08-18 11:44:45 +05:30
nikhilmantri0902
1ad4a6699a chore: added debug logs 2025-08-18 11:24:29 +05:30
nikhilmantri0902
00ae45022b fix: added filtering based on both name and serviceName pairs 2025-08-18 11:20:08 +05:30
nikhilmantri0902
6f4a965c6d fix: added filtering based on both name and serviceName pairs 2025-08-18 11:19:01 +05:30
nikhilmantri0902
4c29b03577 chore: added logs for debugging 2025-08-15 14:15:20 +05:30
nikhilmantri0902
ea1409bc4f fix: added query optimization 2025-08-14 20:12:48 +05:30
Abhi kumar
0e3ac2a179 fix: added loading indicators in traces pages when running query (#8782) 2025-08-14 13:53:39 +05:30
Amlan Kumar Nandy
249f8be845 fix: resolve infinite loading issue in metric view in messaging queues (#8779) 2025-08-14 04:16:39 +00:00
primus-bot[bot]
9c952942ad chore(release): bump to v0.92.1 (#8780)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-08-13 15:10:08 +05:30
Nityananda Gohain
dac46d82ff fix: check ch version (#8778)
Check the clickhouse version, before the setting secondary_indices_enable_bulk_filtering is used.
2025-08-13 14:57:25 +05:30
primus-bot[bot]
802ce6de01 chore(release): bump to v0.92.0 (#8776)
#### Summary
 - Release SigNoz v0.92.0
 - Bump SigNoz OTel Collector to v0.129.0
2025-08-13 12:17:43 +05:30
dependabot[bot]
6853f0c99d chore(deps): bump urllib3 from 2.4.0 to 2.5.0 in /tests/integration (#8296) 2025-08-13 04:58:39 +00:00
Srikanth Chekuri
3f8a2870e4 fix: key CONTAINS value doesn't work for numeric values (#8768) 2025-08-13 09:59:28 +05:30
Srikanth Chekuri
5fa70ea802 chore: use *_keys tables instead of tag_attributes_v2 for suggestions (#8753) 2025-08-12 18:10:35 +05:30
Yunus M
3a952fa330 fix: pass metric name to get value suggestions api (#8671)
* fix: pass metric name to get value suggestions api

* feat: add source to get value suggestions
2025-08-11 08:10:31 +00:00
Yunus M
6d97db1d9d fix: use localstorage value to avoid waiting for pref api to set the toggle state, add shortcut (#8751) 2025-08-11 10:26:27 +05:30
Shaheer Kochai
5412e7f70b feat: show count in span details drawer tabs (#8702)
* feat: show event count in Events tab of SpanDetailsDrawer

* feat: add count badges to all SpanDetailsDrawer tabs
2025-08-10 05:39:20 +00:00
aniketio-ctrl
8e5cb9046d fix(alert): added querier v5 in test notify (#8749) 2025-08-08 18:01:23 +05:30
Srikanth Chekuri
760eabb2dc chore: do not return err for meter source temporality (#8750) 2025-08-08 17:39:06 +05:30
Srikanth Chekuri
35ddaaa2fc chore: add env to override logs keys table name (#8748) 2025-08-08 11:34:09 +00:00
nikhilmantri0902
a51ee66c02 Improvement: Added Otel-collector setup for local dev environment (#8701)
* feat(devenv): add otel-collector support for local development

- Add .devenv/docker/otel-collector/ with compose.yaml and config
- Add devenv-otel-collector and devenv-up targets to Makefile
- Update development.md with otel-collector setup instructions
- Add README.md with usage documentation for otel-collector setup

This enables developers to run the complete SigNoz stack locally,
including the OpenTelemetry Collector for receiving telemetry data
on ports 4317 (gRPC) and 4318 (HTTP).

* docs: improve collector setup wordings

* chore: fixed comment and service name

* chore: docker service name updated otel-collector -> signoz-otel-collector
2025-08-08 16:54:05 +05:30
Yunus M
75d189162b feat: migrate old saved columns keys to name (#8747) 2025-08-08 14:41:34 +05:30
Yunus M
932918e3a4 feat: meter explorer (#8741)
* feat: meter explorer

* feat: meter explorer

* fix: remove meter as data source

* fix: change meter-explorer to meter - quick filter

* chore: delete test file

* fix: failing test cases
2025-08-08 12:03:26 +05:30
Vibhu Pandey
aa3bc16dcb test(integration): bump requests to 2.32.4 (#8743) 2025-08-08 00:25:38 +05:30
Yunus M
b5098e00a3 fix: logs explorer - should have atleast 1 column, discard empty key columns (#8740) 2025-08-07 20:17:34 +05:30
Abhi kumar
20dc561bfe fix: added fix for query becoming empty on time change (#8739) 2025-08-07 19:42:07 +05:30
Nityananda Gohain
99bbb87738 chore: add option to ignore data skipping indices (#8738)
* chore: add option to ignore data skipping indices

* fix: update example
2025-08-07 13:21:17 +00:00
Vikrant Gupta
f1ce93171c feat(telemetrymeter): add support for telemetry meter (#8667)
* feat(telemetry/meter): added base setup for telemetry meter signal

* feat(telemetry/meter): added metadata setup for meter

* feat(telemetry/meter): fix stmnt builder tests

* feat(telemetry/meter): test query range API fixes

* feat(telemetry/meter): improve error messages

* feat(telemetrymeter): step interval improvements

* feat(telemetrymeter): metadata changes and aggregate attribute changes

* feat(telemetrymeter): metadata changes and aggregate attribute changes

* feat(telemetrymeter): deprecate the signal and use aggregation instead

* feat(telemetrymeter): deprecate the signal and use aggregation instead

* feat(telemetrymeter): deprecate the signal and use aggregation instead

* feat(telemetrymeter): cleanup the types

* feat(telemetrymeter): introduce source for query

* feat(telemetrymeter): better naming for source in metadata

* feat(telemetrymeter): added quick filters for meter explorer

* feat(telemetrymeter): incorporate the new changes to stmnt builder

* feat(telemetrymeter): add the statement builder for the ranged cache queries

* feat(telemetrymeter): use meter aggregate keys

* feat(telemetrymeter): use meter aggregate keys

* feat(telemetrymeter): remove meter from complete bools

* feat(telemetrymeter): remove meter from complete bools

* feat(telemetrymeter): update the quick filters to use meter
2025-08-07 16:50:37 +05:30
Srikanth Chekuri
92794389d6 fix: limit keys for empty search key (#8728) 2025-08-07 00:34:44 +05:30
Srikanth Chekuri
bd02848623 chore: add sql migration for dashboards, alerts, and saved views (#8642)
## 📄 Summary

To reliably migrate the alerts and dashboards, we need access to the telemetrystore to fetch some metadata and while doing migration, I need to log some stuff to fix stuff later.

Key changes:
- Modified the migration to include telemetrystore and a logging provider (open to using a standard logger instead)
- To avoid the previous issues with imported dashboards failing during migration, I've ensured that imported JSON files are automatically transformed when migration is active
- Implemented detailed logic to handle dashboard migration cleanly and prevent unnecessary errors
- Separated the core migration logic from SQL migration code, as users from the dot metrics migration requested shareable code snippets for local migrations. This modular approach allows others to easily reuse the migration functionality.

Known: I didn't register the migration yet in this PR, and will not merge this yet, so please review with that in mid.
2025-08-06 23:05:39 +05:30
Abhi kumar
b5016b061b fix: added fix for key suggestions (#8727) 2025-08-06 11:48:43 +00:00
Abhi kumar
c308e8668c fix: added fix for query addon lightmode ui (#8725) 2025-08-06 16:21:35 +05:30
SagarRajput-7
41ee4176ad fix: fixed metric aggregation and value retention inconsistency in edit mode (#8718) 2025-08-06 13:55:16 +05:30
128 changed files with 4099 additions and 902 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.128.2
image: signoz/signoz-schema-migrator:v0.129.0
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.128.2
image: signoz/signoz-schema-migrator:v0.129.0
container_name: schema-migrator-async
command:
- async

View File

@@ -0,0 +1,29 @@
services:
signoz-otel-collector:
image: signoz/signoz-otel-collector:v0.128.2
container_name: signoz-otel-collector-dev
command:
- --config=/etc/otel-collector-config.yaml
- --feature-gates=-pkg.translator.prometheus.NormalizeName
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- LOW_CARDINAL_EXCEPTION_GROUPING=false
ports:
- "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver
- "13133:13133" # health check extension
healthcheck:
test:
- CMD
- wget
- --spider
- -q
- localhost:13133
interval: 30s
timeout: 5s
retries: 3
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"

View File

@@ -0,0 +1,96 @@
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-collector
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
enable_exp_histogram: true
dimensions:
- name: service.namespace
default: default
- name: deployment.environment
default: default
# This is added to ensure the uniqueness of the timeseries
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
clickhousetraces:
datasource: tcp://host.docker.internal:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
signozclickhousemetrics:
dsn: tcp://host.docker.internal:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://host.docker.internal:9000/signoz_logs
timeout: 10s
use_new_schema: true
service:
telemetry:
logs:
encoding: json
extensions:
- health_check
- pprof
pipelines:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter]

View File

@@ -61,6 +61,17 @@ devenv-postgres: ## Run postgres in devenv
@cd .devenv/docker/postgres; \
docker compose -f compose.yaml up -d
.PHONY: devenv-signoz-otel-collector
devenv-signoz-otel-collector: ## Run signoz-otel-collector in devenv (requires clickhouse to be running)
@cd .devenv/docker/signoz-otel-collector; \
docker compose -f compose.yaml up -d
.PHONY: devenv-up
devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhouse and signoz-otel-collector for local development
@echo "Development environment is ready!"
@echo " - ClickHouse: http://localhost:8123"
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
##############################################################
# go commands
##############################################################

View File

@@ -121,6 +121,8 @@ telemetrystore:
timeout_before_checking_execution_speed: 0
max_bytes_to_read: 0
max_result_rows: 0
ignore_data_skipping_indices: ""
secondary_indices_enable_bulk_filtering: false
##################### Prometheus #####################
prometheus:

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.91.0
image: signoz/signoz:v0.92.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -207,7 +207,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.128.2
image: signoz/signoz-otel-collector:v0.129.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -231,7 +231,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.128.2
image: signoz/signoz-schema-migrator:v0.129.0
deploy:
restart_policy:
condition: on-failure

View File

@@ -115,7 +115,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.91.0
image: signoz/signoz:v0.92.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -148,7 +148,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.128.2
image: signoz/signoz-otel-collector:v0.129.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -174,7 +174,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.128.2
image: signoz/signoz-schema-migrator:v0.129.0
deploy:
restart_policy:
condition: on-failure

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.91.0}
image: signoz/signoz:${VERSION:-v0.92.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -211,7 +211,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -237,7 +237,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-sync
command:
- sync
@@ -248,7 +248,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.91.0}
image: signoz/signoz:${VERSION:-v0.92.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -143,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -165,7 +165,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-sync
command:
- sync
@@ -177,7 +177,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -44,20 +44,35 @@ Before diving in, make sure you have these tools installed:
SigNoz has three main components: Clickhouse, Backend, and Frontend. Let's set them up one by one.
### 1. Setting up Clickhouse
### 1. Setting up ClickHouse
First, we need to get Clickhouse running:
First, we need to get ClickHouse running:
```bash
make devenv-clickhouse
```
This command:
- Starts Clickhouse in a single-shard, single-replica cluster
- Starts ClickHouse in a single-shard, single-replica cluster
- Sets up Zookeeper
- Runs the latest schema migrations
### 2. Starting the Backend
### 2. Setting up SigNoz OpenTelemetry Collector
Next, start the OpenTelemetry Collector to receive telemetry data:
```bash
make devenv-signoz-otel-collector
```
This command:
- Starts the SigNoz OpenTelemetry Collector
- Listens on port 4317 (gRPC) and 4318 (HTTP) for incoming telemetry data
- Forwards data to ClickHouse for storage
> 💡 **Quick Setup**: Use `make devenv-up` to start both ClickHouse and OTel Collector together
### 3. Starting the Backend
1. Run the backend server:
```bash
@@ -73,7 +88,7 @@ This command:
> 💡 **Tip**: The API server runs at `http://localhost:8080/` by default
### 3. Setting up the Frontend
### 4. Setting up the Frontend
1. Navigate to the frontend directory:
```bash
@@ -98,3 +113,25 @@ This command:
> 💡 **Tip**: `yarn dev` will automatically rebuild when you make changes to the code
Now you're all set to start developing! Happy coding! 🎉
## Verifying Your Setup
To verify everything is working correctly:
1. **Check ClickHouse**: `curl http://localhost:8123/ping` (should return "Ok.")
2. **Check OTel Collector**: `curl http://localhost:13133` (should return health status)
3. **Check Backend**: `curl http://localhost:8080/api/v1/health` (should return `{"status":"ok"}`)
4. **Check Frontend**: Open `http://localhost:3301` in your browser
## How to send test data?
You can now send telemetry data to your local SigNoz instance:
- **OTLP gRPC**: `localhost:4317`
- **OTLP HTTP**: `localhost:4318`
For example, using `curl` to send a test trace:
```bash
curl -X POST http://localhost:4318/v1/traces \
-H "Content-Type: application/json" \
-d '{"resourceSpans":[{"resource":{"attributes":[{"key":"service.name","value":{"stringValue":"test-service"}}]},"scopeSpans":[{"spans":[{"traceId":"12345678901234567890123456789012","spanId":"1234567890123456","name":"test-span","startTimeUnixNano":"1609459200000000000","endTimeUnixNano":"1609459201000000000"}]}]}]}'
```

View File

@@ -8,14 +8,15 @@ import {
export const getValueSuggestions = (
props: QueryKeyValueRequestProps,
): Promise<AxiosResponse<QueryKeyValueSuggestionsResponseProps>> => {
const { signal, key, searchText, signalSource } = props;
const { signal, key, searchText, signalSource, metricName } = props;
const encodedSignal = encodeURIComponent(signal);
const encodedKey = encodeURIComponent(key);
const encodedMetricName = encodeURIComponent(metricName || '');
const encodedSearchText = encodeURIComponent(searchText);
const encodedSource = encodeURIComponent(signalSource || '');
return axios.get(
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}&source=${encodedSource}`,
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&source=${encodedSource}`,
);
};

View File

@@ -385,7 +385,7 @@ export function convertV5ResponseToLegacy(
data: {
resultType: 'scalar',
result: webTables,
warnings: v5Data?.data?.warnings || [],
warnings: v5Data?.data?.warning || [],
},
warning: v5Data?.warning || undefined,
},

View File

@@ -127,6 +127,7 @@
box-sizing: border-box;
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.6);
font-family: 'Space Mono', monospace !important;
color: var(--bg-vanilla-100) !important;
ul {
width: 100% !important;
@@ -162,7 +163,6 @@
overflow: hidden;
font-family: 'Space Mono', monospace !important;
color: var(--bg-vanilla-100) !important;
.cm-completionIcon {
display: none !important;
@@ -331,13 +331,14 @@
ul {
li {
color: var(--bg-ink-300) !important;
&:hover {
background: var(--bg-vanilla-300) !important;
}
&[aria-selected='true'] {
color: var(--bg-ink-500) !important;
background: var(--bg-vanilla-300) !important;
font-weight: 600 !important;
}
}
}

View File

@@ -271,16 +271,13 @@
ul {
li {
&:hover {
background-color: var(--bg-vanilla-300) !important;
color: var(--bg-ink-500) !important;
font-weight: 600;
}
color: var(--bg-ink-300) !important;
&:hover,
&[aria-selected='true'] {
background: var(--bg-vanilla-300) !important;
color: var(--bg-ink-500) !important;
font-weight: 600;
font-weight: 600 !important;
}
}
}

View File

@@ -585,7 +585,7 @@
&:hover,
&[aria-selected='true'] {
background-color: var(--bg-vanilla-300) !important;
font-weight: 600;
font-weight: 600 !important;
}
}
}

View File

@@ -155,6 +155,7 @@ function QuerySearch({
// Reference to the editor view for programmatic autocompletion
const editorRef = useRef<EditorView | null>(null);
const lastKeyRef = useRef<string>('');
const lastFetchedKeyRef = useRef<string>('');
const lastValueRef = useRef<string>('');
const isMountedRef = useRef<boolean>(true);
@@ -212,6 +213,9 @@ function QuerySearch({
setKeySuggestions([]);
return;
}
lastFetchedKeyRef.current = searchText || '';
const response = await getKeySuggestions({
signal: dataSource,
searchText: searchText || '',
@@ -379,6 +383,7 @@ function QuerySearch({
searchText: sanitizedSearchText,
signal: dataSource,
signalSource: signalSource as 'meter' | '',
metricName: debouncedMetricName ?? undefined,
});
// Skip updates if component unmounted or key changed
@@ -470,6 +475,7 @@ function QuerySearch({
activeKey,
dataSource,
isLoadingSuggestions,
debouncedMetricName,
signalSource,
toggleSuggestions,
],
@@ -815,7 +821,7 @@ function QuerySearch({
option.label.toLowerCase().includes(searchText),
);
if (options.length === 0 && lastKeyRef.current !== searchText) {
if (options.length === 0 && lastFetchedKeyRef.current !== searchText) {
debouncedFetchKeySuggestions(searchText);
}

View File

@@ -54,6 +54,7 @@ const quickFiltersListURL = `${BASE_URL}/api/v1/orgs/me/filters/${SIGNAL}`;
const saveQuickFiltersURL = `${BASE_URL}/api/v1/orgs/me/filters`;
const quickFiltersSuggestionsURL = `${BASE_URL}/api/v3/filter_suggestions`;
const quickFiltersAttributeValuesURL = `${BASE_URL}/api/v3/autocomplete/attribute_values`;
const fieldsValuesURL = `${BASE_URL}/api/v1/fields/values`;
const FILTER_OS_DESCRIPTION = 'os.description';
const FILTER_K8S_DEPLOYMENT_NAME = 'k8s.deployment.name';
@@ -77,7 +78,11 @@ const setupServer = (): void => {
putHandler(await req.json());
return res(ctx.status(200), ctx.json({}));
}),
rest.get(quickFiltersAttributeValuesURL, (_, res, ctx) =>
rest.get(quickFiltersAttributeValuesURL, (req, res, ctx) =>
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
),
rest.get(fieldsValuesURL, (req, res, ctx) =>
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
),
);

View File

@@ -23,7 +23,7 @@ export enum SignalType {
LOGS = 'logs',
API_MONITORING = 'api_monitoring',
EXCEPTIONS = 'exceptions',
METER_EXPLORER = 'meter_explorer',
METER_EXPLORER = 'meter',
}
export interface IQuickFiltersConfig {
@@ -54,5 +54,5 @@ export enum QuickFiltersSource {
TRACES_EXPLORER = 'traces-explorer',
API_MONITORING = 'api-monitoring',
EXCEPTIONS = 'exceptions',
METER_EXPLORER = 'meter-explorer',
METER_EXPLORER = 'meter',
}

View File

@@ -6,6 +6,7 @@ export const GlobalShortcuts = {
NavigateToAlerts: 'a+shift',
NavigateToExceptions: 'e+shift',
NavigateToMessagingQueues: 'm+shift',
ToggleSidebar: 'b+shift',
};
export const GlobalShortcutsName = {
@@ -16,6 +17,7 @@ export const GlobalShortcutsName = {
NavigateToAlerts: 'shift+a',
NavigateToExceptions: 'shift+e',
NavigateToMessagingQueues: 'shift+m',
ToggleSidebar: 'shift+b',
};
export const GlobalShortcutsDescription = {
@@ -26,4 +28,5 @@ export const GlobalShortcutsDescription = {
NavigateToAlerts: 'Navigate to alerts page',
NavigateToExceptions: 'Navigate to Exceptions page',
NavigateToMessagingQueues: 'Navigate to Messaging Queues page',
ToggleSidebar: 'Toggle sidebar visibility',
};

View File

@@ -0,0 +1,176 @@
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import logEvent from 'api/common/logEvent';
import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts';
import { USER_PREFERENCES } from 'constants/userPreferences';
import {
KeyboardHotkeysProvider,
useKeyboardHotkeys,
} from 'hooks/hotkeys/useKeyboardHotkeys';
import { QueryClient, QueryClientProvider } from 'react-query';
// Mock dependencies
jest.mock('api/common/logEvent', () => jest.fn());
// Mock the AppContext
const mockUpdateUserPreferenceInContext = jest.fn();
const SHIFT_B_KEYBOARD_SHORTCUT = '{Shift>}b{/Shift}';
jest.mock('providers/App/App', () => ({
useAppContext: jest.fn(() => ({
userPreferences: [
{
name: USER_PREFERENCES.SIDENAV_PINNED,
value: false,
},
],
updateUserPreferenceInContext: mockUpdateUserPreferenceInContext,
})),
}));
function TestComponent({
mockHandleShortcut,
}: {
mockHandleShortcut: () => void;
}): JSX.Element {
const { registerShortcut } = useKeyboardHotkeys();
registerShortcut(GlobalShortcuts.ToggleSidebar, mockHandleShortcut);
return <div data-testid="test">Test</div>;
}
describe('Sidebar Toggle Shortcut', () => {
let queryClient: QueryClient;
beforeEach(() => {
queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
},
mutations: {
retry: false,
},
},
});
jest.clearAllMocks();
});
afterEach(() => {
jest.clearAllMocks();
});
describe('Global Shortcuts Constants', () => {
it('should have the correct shortcut key combination', () => {
expect(GlobalShortcuts.ToggleSidebar).toBe('b+shift');
});
});
describe('Keyboard Shortcut Registration', () => {
it('should register the sidebar toggle shortcut correctly', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn();
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent mockHandleShortcut={mockHandleShortcut} />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
// Trigger the shortcut
await user.keyboard(SHIFT_B_KEYBOARD_SHORTCUT);
expect(mockHandleShortcut).toHaveBeenCalled();
});
it('should not trigger shortcut in input fields', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn();
function TestComponent(): JSX.Element {
const { registerShortcut } = useKeyboardHotkeys();
registerShortcut(GlobalShortcuts.ToggleSidebar, mockHandleShortcut);
return (
<div>
<input data-testid="input-field" />
<div data-testid="test">Test</div>
</div>
);
}
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
// Focus on input field
const inputField = screen.getByTestId('input-field');
await user.click(inputField);
// Try to trigger shortcut while focused on input
await user.keyboard('{Shift>}b{/Shift}');
// Should not trigger the shortcut
expect(mockHandleShortcut).not.toHaveBeenCalled();
});
});
describe('Sidebar Toggle Functionality', () => {
it('should log the toggle event with correct parameters', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn(() => {
logEvent('Global Shortcut: Sidebar Toggle', {
previousState: false,
newState: true,
});
});
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent mockHandleShortcut={mockHandleShortcut} />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
await user.keyboard(SHIFT_B_KEYBOARD_SHORTCUT);
expect(logEvent).toHaveBeenCalledWith('Global Shortcut: Sidebar Toggle', {
previousState: false,
newState: true,
});
});
it('should update user preference in context', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn(() => {
const save = {
name: USER_PREFERENCES.SIDENAV_PINNED,
value: true,
};
mockUpdateUserPreferenceInContext(save);
});
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent mockHandleShortcut={mockHandleShortcut} />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
await user.keyboard(SHIFT_B_KEYBOARD_SHORTCUT);
expect(mockUpdateUserPreferenceInContext).toHaveBeenCalledWith({
name: USER_PREFERENCES.SIDENAV_PINNED,
value: true,
});
});
});
});

View File

@@ -10,8 +10,10 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import getChangelogByVersion from 'api/changelog/getChangelogByVersion';
import logEvent from 'api/common/logEvent';
import manageCreditCardApi from 'api/v1/portal/create';
import updateUserPreference from 'api/v1/user/preferences/name/update';
import getUserLatestVersion from 'api/v1/version/getLatestVersion';
import getUserVersion from 'api/v1/version/getVersion';
import { AxiosError } from 'axios';
import cx from 'classnames';
import ChangelogModal from 'components/ChangelogModal/ChangelogModal';
import ChatSupportGateway from 'components/ChatSupportGateway/ChatSupportGateway';
@@ -22,10 +24,12 @@ import { Events } from 'constants/events';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts';
import { USER_PREFERENCES } from 'constants/userPreferences';
import SideNav from 'container/SideNav';
import TopNav from 'container/TopNav';
import dayjs from 'dayjs';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { useNotifications } from 'hooks/useNotifications';
@@ -68,8 +72,10 @@ import {
LicensePlatform,
LicenseState,
} from 'types/api/licensesV3/getActive';
import { UserPreference } from 'types/api/preferences/preference';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import { showErrorNotification } from 'utils/error';
import { eventEmitter } from 'utils/getEventEmitter';
import {
getFormattedDate,
@@ -662,10 +668,85 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
</div>
);
const sideNavPinned = userPreferences?.find(
const sideNavPinnedPreference = userPreferences?.find(
(preference) => preference.name === USER_PREFERENCES.SIDENAV_PINNED,
)?.value as boolean;
// Add loading state to prevent layout shift during initial load
const [isSidebarLoaded, setIsSidebarLoaded] = useState(false);
// Get sidebar state from localStorage as fallback until preferences are loaded
const getSidebarStateFromLocalStorage = useCallback((): boolean => {
try {
const storedValue = getLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED);
return storedValue === 'true';
} catch {
return false;
}
}, []);
// Set sidebar as loaded after user preferences are fetched
useEffect(() => {
if (userPreferences !== null) {
setIsSidebarLoaded(true);
}
}, [userPreferences]);
// Use localStorage value as fallback until preferences are loaded
const isSideNavPinned = isSidebarLoaded
? sideNavPinnedPreference
: getSidebarStateFromLocalStorage();
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
const { updateUserPreferenceInContext } = useAppContext();
const { mutate: updateUserPreferenceMutation } = useMutation(
updateUserPreference,
{
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
const handleToggleSidebar = useCallback((): void => {
const newState = !isSideNavPinned;
logEvent('Global Shortcut: Sidebar Toggle', {
previousState: isSideNavPinned,
newState,
});
// Save to localStorage immediately for instant feedback
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, newState.toString());
// Update the context immediately
const save = {
name: USER_PREFERENCES.SIDENAV_PINNED,
value: newState,
};
updateUserPreferenceInContext(save as UserPreference);
// Make the API call in the background
updateUserPreferenceMutation({
name: USER_PREFERENCES.SIDENAV_PINNED,
value: newState,
});
}, [
isSideNavPinned,
updateUserPreferenceInContext,
updateUserPreferenceMutation,
]);
// Register the sidebar toggle shortcut
useEffect(() => {
registerShortcut(GlobalShortcuts.ToggleSidebar, handleToggleSidebar);
return (): void => {
deregisterShortcut(GlobalShortcuts.ToggleSidebar);
};
}, [registerShortcut, deregisterShortcut, handleToggleSidebar]);
const SHOW_TRIAL_EXPIRY_BANNER =
showTrialExpiryBanner && !showPaymentFailedWarning;
const SHOW_WORKSPACE_RESTRICTED_BANNER = showWorkspaceRestricted;
@@ -739,14 +820,14 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
className={cx(
'app-layout',
isDarkMode ? 'darkMode dark' : 'lightMode',
sideNavPinned ? 'side-nav-pinned' : '',
isSideNavPinned ? 'side-nav-pinned' : '',
SHOW_WORKSPACE_RESTRICTED_BANNER ? 'isWorkspaceRestricted' : '',
SHOW_TRIAL_EXPIRY_BANNER ? 'isTrialExpired' : '',
SHOW_PAYMENT_FAILED_BANNER ? 'isPaymentFailed' : '',
)}
>
{isToDisplayLayout && !renderFullScreen && (
<SideNav isPinned={sideNavPinned} />
<SideNav isPinned={isSideNavPinned} />
)}
<div
className={cx('app-content', {

View File

@@ -28,6 +28,7 @@ import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { isEmpty } from 'lodash-es';
import { useAppContext } from 'providers/App/App';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
@@ -313,7 +314,7 @@ function ChartPreview({
featureFlags?.find((flag) => flag.name === FeatureKeys.ANOMALY_DETECTION)
?.active || false;
const isWarning = !!queryResponse.data?.warning?.message;
const isWarning = !isEmpty(queryResponse.data?.warning);
return (
<div className="alert-chart-container" ref={graphRef}>
<ChartContainer>

View File

@@ -240,8 +240,18 @@ function FormAlertRules({
const queryData = currentQuery.builder.queryData[index];
const updatedFunctions = updateFunctions(queryData);
queryData.functions = updatedFunctions;
handleSetQueryData(index, queryData);
// Only update if functions actually changed to avoid resetting aggregateAttribute
const currentFunctions = queryData.functions || [];
const functionsChanged = !isEqual(currentFunctions, updatedFunctions);
if (functionsChanged) {
const updatedQueryData = {
...queryData,
functions: updatedFunctions,
};
handleSetQueryData(index, updatedQueryData);
}
}
};

View File

@@ -301,7 +301,7 @@ function GridCardGraph({
widget={widget}
queryResponse={queryResponse}
errorMessage={errorMessage}
isWarning={!!queryResponse.data?.warning?.message}
isWarning={!isEmpty(queryResponse.data?.warning)}
version={version}
threshold={threshold}
headerMenuList={menuList}

View File

@@ -73,6 +73,8 @@ export default function TableRow({
{tableColumns.map((column) => {
if (!column.render) return <td>Empty</td>;
if (!column.key) return null;
const element: ColumnTypeRender<Record<string, unknown>> = column.render(
log[column.key as keyof Record<string, unknown>],
log,
@@ -97,6 +99,7 @@ export default function TableRow({
fontSize={fontSize}
columnKey={column.key as string}
onClick={handleShowLogDetails}
className={column.key as string}
>
{cloneElement(children, props)}
</TableCellStyled>

View File

@@ -136,7 +136,7 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
key={column.key}
fontSize={tableViewProps?.fontSize}
// eslint-disable-next-line react/jsx-props-no-spreading
{...(isDragColumn && { className: 'dragHandler' })}
{...(isDragColumn && { className: `dragHandler ${column.key}` })}
columnKey={column.key as string}
>
{(column.title as string).replace(/^\w/, (c) => c.toUpperCase())}

View File

@@ -1,4 +1,5 @@
import { TelemetryFieldKey } from 'api/v5/v5';
import { isEmpty } from 'lodash-es';
import { IField } from 'types/api/logs/fields';
import {
IBuilderQuery,
@@ -8,11 +9,13 @@ import {
export const convertKeysToColumnFields = (
keys: TelemetryFieldKey[],
): IField[] =>
keys.map((item) => ({
dataType: item.fieldDataType ?? '',
name: item.name,
type: item.fieldContext ?? '',
}));
keys
.filter((item) => !isEmpty(item.name))
.map((item) => ({
dataType: item.fieldDataType ?? '',
name: item.name,
type: item.fieldContext ?? '',
}));
/**
* Determines if a query represents a trace-to-logs navigation
* by checking for the presence of a trace_id filter.

View File

@@ -1,220 +0,0 @@
import { render, screen } from '@testing-library/react';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import * as useOptionsMenuHooks from 'container/OptionsMenu';
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
import * as appContextHooks from 'providers/App/App';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import * as timezoneHooks from 'providers/Timezone';
import { QueryClient, QueryClientProvider } from 'react-query';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import { useSearchParams } from 'react-router-dom-v5-compat';
import store from 'store';
import { LicenseEvent } from 'types/api/licensesV3/getActive';
import { DataSource } from 'types/common/queryBuilder';
import Explorer from '../Explorer';
const mockSetSearchParams = jest.fn();
const queryClient = new QueryClient();
const mockUpdateAllQueriesOperators = jest.fn();
const mockUseQueryBuilderData = {
handleRunQuery: jest.fn(),
stagedQuery: initialQueriesMap[DataSource.METRICS],
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
currentQuery: initialQueriesMap[DataSource.METRICS],
resetQuery: jest.fn(),
redirectWithQueryBuilderData: jest.fn(),
isStagedQueryUpdated: jest.fn(),
handleSetQueryData: jest.fn(),
handleSetFormulaData: jest.fn(),
handleSetQueryItemData: jest.fn(),
handleSetConfig: jest.fn(),
removeQueryBuilderEntityByIndex: jest.fn(),
removeQueryTypeItemByIndex: jest.fn(),
isDefaultQuery: jest.fn(),
};
jest.mock('react-router-dom-v5-compat', () => {
const actual = jest.requireActual('react-router-dom-v5-compat');
return {
...actual,
useSearchParams: jest.fn(),
useNavigationType: (): any => 'PUSH',
};
});
jest.mock('hooks/useDimensions', () => ({
useResizeObserver: (): { width: number; height: number } => ({
width: 800,
height: 400,
}),
}));
jest.mock('react-query', () => ({
...jest.requireActual('react-query'),
useQueryClient: jest.fn().mockReturnValue({
getQueriesData: jest.fn(),
}),
}));
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
safeNavigate: jest.fn(),
}),
}));
jest.mock('hooks/useNotifications', () => ({
useNotifications: (): any => ({
notifications: {
error: jest.fn(),
},
}),
}));
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: (): any => ({
globalTime: {
selectedTime: {
startTime: 1713734400000,
endTime: 1713738000000,
},
maxTime: 1713738000000,
minTime: 1713734400000,
},
}),
}));
jest.spyOn(useUpdateDashboardHooks, 'useUpdateDashboard').mockReturnValue({
mutate: jest.fn(),
isLoading: false,
} as any);
jest.spyOn(useOptionsMenuHooks, 'useOptionsMenu').mockReturnValue({
options: {
selectColumns: [],
},
} as any);
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
timezone: {
offset: 0,
},
browserTimezone: {
offset: 0,
},
} as any);
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
user: {
role: 'admin',
},
activeLicenseV3: {
event_queue: {
created_at: '0',
event: LicenseEvent.NO_EVENT,
scheduled_at: '0',
status: '',
updated_at: '0',
},
license: {
license_key: 'test-license-key',
license_type: 'trial',
org_id: 'test-org-id',
plan_id: 'test-plan-id',
plan_name: 'test-plan-name',
plan_type: 'trial',
plan_version: 'test-plan-version',
},
},
} as any);
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
} as any);
describe('Explorer', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should render Explorer query builder with metrics datasource selected', () => {
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
stagedQuery: initialQueriesMap[DataSource.TRACES],
} as any);
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
mockSetSearchParams,
]);
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(mockUpdateAllQueriesOperators).toHaveBeenCalledWith(
initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
);
});
it('should enable one chart per query toggle when oneChartPerQuery=true in URL', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).toBeChecked();
});
it('should disable one chart per query toggle when oneChartPerQuery=false in URL', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
mockSetSearchParams,
]);
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).not.toBeChecked();
});
});

View File

@@ -13,6 +13,7 @@ import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import { isEmpty } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useSearchParams } from 'react-router-dom-v5-compat';
@@ -135,7 +136,7 @@ function Explorer(): JSX.Element {
/>
</div>
<div className="explore-header-right-actions">
{warning?.message && <WarningPopover warningData={warning} />}
{!isEmpty(warning) && <WarningPopover warningData={warning} />}
<DateTimeSelector showAutoRefresh />
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery(true, true)}

View File

@@ -1,5 +1,6 @@
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
import { initialQueriesMap } from 'constants/queryBuilder';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -110,6 +111,15 @@ export function getMetricDetailsQuery(
isJSON: false,
dataType: DataTypes.String,
},
aggregations: [
{
metricName,
timeAggregation: timeAggregation as TimeAggregation,
spaceAggregation: spaceAggregation as SpaceAggregation,
reduceTo: 'avg',
temporality: '',
},
],
aggregateOperator,
timeAggregation,
spaceAggregation,

View File

@@ -1,6 +1,7 @@
import './MySettings.styles.scss';
import { Radio, RadioChangeEvent, Switch, Tag } from 'antd';
import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import updateUserPreference from 'api/v1/user/preferences/name/update';
import { AxiosError } from 'axios';
@@ -109,6 +110,9 @@ function MySettings(): JSX.Element {
// Optimistically update the UI
setSideNavPinned(checked);
// Save to localStorage immediately for instant feedback
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, checked.toString());
// Update the context immediately
const save = {
name: USER_PREFERENCES.SIDENAV_PINNED,
@@ -130,6 +134,8 @@ function MySettings(): JSX.Element {
name: USER_PREFERENCES.SIDENAV_PINNED,
value: !checked,
} as UserPreference);
// Also revert localStorage
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, (!checked).toString());
showErrorNotification(notifications, error as AxiosError);
},
},

View File

@@ -6,6 +6,7 @@ import { Card } from 'container/GridCardLayout/styles';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { isEmpty } from 'lodash-es';
import { memo } from 'react';
import { Warning } from 'types/api';
@@ -38,7 +39,7 @@ function WidgetGraph({
<div className="header">
<div className="header-left">
<PlotTag queryType={currentQuery.queryType} panelType={selectedGraph} />
{queryResponse.data?.warning?.message && (
{!isEmpty(queryResponse.data?.warning) && (
<WarningPopover warningData={queryResponse.data?.warning as Warning} />
)}
</div>

View File

@@ -6,6 +6,7 @@ import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKey
import useDebounce from 'hooks/useDebounce';
import { useNotifications } from 'hooks/useNotifications';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { has } from 'lodash-es';
import { AllTraceFilterKeyValue } from 'pages/TracesExplorer/Filter/filterUtils';
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useEffect, useMemo, useState } from 'react';
@@ -452,7 +453,9 @@ const useOptionsMenu = ({
() => ({
addColumn: {
isFetching: isSearchedAttributesFetchingV5,
value: preferences?.columns || defaultOptionsQuery.selectColumns,
value:
preferences?.columns.filter((item) => has(item, 'name')) ||
defaultOptionsQuery.selectColumns.filter((item) => has(item, 'name')),
options: optionsFromAttributeKeys || [],
onFocus: handleFocus,
onBlur: handleBlur,

View File

@@ -4,7 +4,7 @@ import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
export type AgregatorFilterProps = Pick<AutoCompleteProps, 'disabled'> & {
query: IBuilderQuery;
onChange: (value: BaseAutocompleteData) => void;
onChange: (value: BaseAutocompleteData, isEditMode?: boolean) => void;
defaultValue?: string;
onSelect?: (value: BaseAutocompleteData) => void;
index?: number;

View File

@@ -137,7 +137,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({
if (metricData) {
// Update the aggregateAttribute with the fetched type information
onChange(metricData);
onChange(metricData, true);
}
}
}, [
@@ -223,11 +223,16 @@ export const AggregatorFilter = memo(function AggregatorFilter({
);
const handleBlur = useCallback(async () => {
if (searchText) {
if (searchText && searchText !== queryAggregation.metricName) {
const aggregateAttributes = await getResponseAttributes();
handleChangeCustomValue(searchText, aggregateAttributes);
}
}, [getResponseAttributes, handleChangeCustomValue, searchText]);
}, [
getResponseAttributes,
handleChangeCustomValue,
searchText,
queryAggregation?.metricName,
]);
const handleChange = useCallback(
(

View File

@@ -1,7 +1,7 @@
.span-details-drawer {
display: flex;
flex-direction: column;
width: 330px;
width: 450px;
border-left: 1px solid var(--bg-slate-400);
overflow-y: auto;
@@ -176,6 +176,34 @@
justify-content: center;
width: 100%;
padding: 4px 8px;
margin-right: 8px;
gap: 4px;
.tab-label {
display: flex;
align-items: center;
}
.count-badge {
display: flex;
align-items: center;
justify-content: center;
min-width: 20px;
height: 20px;
padding: 0 6px;
border-radius: 10px;
background: rgba(171, 189, 255, 0.1);
color: var(--bg-vanilla-400);
font-variant-numeric: lining-nums tabular-nums slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on;
font-family: Inter;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.065px;
text-transform: uppercase;
}
}
.attributes-tab-btn:hover,

View File

@@ -54,7 +54,10 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
icon={<Bookmark size="14" />}
className="attributes-tab-btn"
>
Attributes
<span className="tab-label">Attributes</span>
<span className="count-badge">
{Object.keys(span.tagMap || {}).length}
</span>
</Button>
),
key: 'attributes',
@@ -63,7 +66,8 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
{
label: (
<Button type="text" icon={<Anvil size="14" />} className="events-tab-btn">
Events
<span className="tab-label">Events</span>
<span className="count-badge">{span.event?.length || 0}</span>
</Button>
),
key: 'events',
@@ -82,7 +86,14 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
icon={<Link2 size="14" />}
className="linked-spans-tab-btn"
>
Links
<span className="tab-label">Links</span>
<span className="count-badge">
{
(
span.references?.filter((ref: any) => ref.refType !== 'CHILD_OF') || []
).length
}
</span>
</Button>
),
key: 'linked-spans',

View File

@@ -20,6 +20,7 @@ function TimeSeriesViewContainer({
dataSource = DataSource.TRACES,
isFilterApplied,
setWarning,
setIsLoadingQueries,
}: TimeSeriesViewProps): JSX.Element {
const { stagedQuery, currentQuery, panelType } = useQueryBuilder();
@@ -83,6 +84,14 @@ function TimeSeriesViewContainer({
[data, isValidToConvertToMs],
);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
return (
<TimeSeriesView
isFilterApplied={isFilterApplied}
@@ -101,6 +110,7 @@ interface TimeSeriesViewProps {
dataSource?: DataSource;
isFilterApplied: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}
TimeSeriesViewContainer.defaultProps = {

View File

@@ -372,7 +372,7 @@ function DateTimeSelection({
})),
},
};
return JSON.stringify(updatedCompositeQuery);
return encodeURIComponent(JSON.stringify(updatedCompositeQuery));
}, [currentQuery]);
const onSelectHandler = useCallback(

View File

@@ -49,9 +49,14 @@ import { getListColumns, transformDataWithDate } from './utils';
interface ListViewProps {
isFilterApplied: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}
function ListView({ isFilterApplied, setWarning }: ListViewProps): JSX.Element {
function ListView({
isFilterApplied,
setWarning,
setIsLoadingQueries,
}: ListViewProps): JSX.Element {
const {
stagedQuery,
panelType: panelTypeFromQueryBuilder,
@@ -162,6 +167,14 @@ function ListView({ isFilterApplied, setWarning }: ListViewProps): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [data?.payload, data?.warning]);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
const dataLength =
data?.payload?.data?.newResult?.data?.result[0]?.list?.length;
const totalCount = useMemo(() => dataLength || 0, [dataLength]);

View File

@@ -16,8 +16,10 @@ import { GlobalReducer } from 'types/reducer/globalTime';
function TableView({
setWarning,
setIsLoadingQueries,
}: {
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}): JSX.Element {
const { stagedQuery, panelType } = useQueryBuilder();
@@ -26,7 +28,7 @@ function TableView({
GlobalReducer
>((state) => state.globalTime);
const { data, isLoading, isError, error } = useGetQueryRange(
const { data, isLoading, isFetching, isError, error } = useGetQueryRange(
{
query: stagedQuery || initialQueriesMap.traces,
graphType: panelType || PANEL_TYPES.TABLE,
@@ -49,6 +51,14 @@ function TableView({
},
);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
const queryTableData = useMemo(
() =>
data?.payload?.data?.newResult?.data?.result ||

View File

@@ -40,11 +40,13 @@ import { ActionsContainer, Container } from './styles';
interface TracesViewProps {
isFilterApplied: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}
function TracesView({
isFilterApplied,
setWarning,
setIsLoadingQueries,
}: TracesViewProps): JSX.Element {
const { stagedQuery, panelType } = useQueryBuilder();
const [orderBy, setOrderBy] = useState<string>('timestamp:desc');
@@ -117,6 +119,14 @@ function TracesView({
[responseData],
);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
useEffect(() => {
if (!isLoading && !isFetching && !isError && (tableData || []).length !== 0) {
logEvent('Traces Explorer: Data present', {

View File

@@ -212,7 +212,7 @@ export const useQueryOperations: UseQueryOperations = ({
);
const handleChangeAggregatorAttribute = useCallback(
(value: BaseAutocompleteData): void => {
(value: BaseAutocompleteData, isEditMode?: boolean): void => {
const newQuery: IBuilderQuery = {
...query,
aggregateAttribute: value,
@@ -226,30 +226,32 @@ export const useQueryOperations: UseQueryOperations = ({
handleMetricAggregateAtributeTypes(newQuery.aggregateAttribute);
}
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregateOperator = MetricAggregateOperator.RATE;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
newQuery.aggregateOperator = MetricAggregateOperator.AVG;
newQuery.timeAggregation = MetricAggregateOperator.AVG;
} else {
newQuery.timeAggregation = '';
}
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
if (!isEditMode) {
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregateOperator = MetricAggregateOperator.RATE;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
newQuery.aggregateOperator = MetricAggregateOperator.AVG;
newQuery.timeAggregation = MetricAggregateOperator.AVG;
newQuery.spaceAggregation = MetricAggregateOperator.AVG;
} else {
newQuery.aggregateOperator = MetricAggregateOperator.COUNT;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
newQuery.spaceAggregation = MetricAggregateOperator.SUM;
newQuery.timeAggregation = '';
}
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
newQuery.aggregateOperator = MetricAggregateOperator.AVG;
newQuery.timeAggregation = MetricAggregateOperator.AVG;
newQuery.spaceAggregation = MetricAggregateOperator.AVG;
} else {
newQuery.aggregateOperator = MetricAggregateOperator.COUNT;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
newQuery.spaceAggregation = MetricAggregateOperator.SUM;
}
}
}
}
@@ -262,61 +264,63 @@ export const useQueryOperations: UseQueryOperations = ({
handleMetricAggregateAtributeTypes(newQuery.aggregateAttribute);
}
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
} else {
newQuery.aggregations = [
{
timeAggregation: '',
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
}
newQuery.aggregateOperator = '';
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
if (!isEditMode) {
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.AVG,
spaceAggregation: '',
},
];
} else {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.COUNT,
timeAggregation: '',
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
spaceAggregation: '',
},
];
}
newQuery.aggregateOperator = '';
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
];
} else {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.COUNT,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
];
}
}
}
}

View File

@@ -10,6 +10,7 @@ export const useGetQueryKeyValueSuggestions = ({
signal,
searchText,
signalSource,
metricName,
}: {
key: string;
signal: 'traces' | 'logs' | 'metrics';
@@ -18,17 +19,26 @@ export const useGetQueryKeyValueSuggestions = ({
options?: UseQueryOptions<
SuccessResponse<QueryKeyValueSuggestionsResponseProps> | ErrorResponse
>;
metricName?: string;
}): UseQueryResult<
AxiosResponse<QueryKeyValueSuggestionsResponseProps>,
AxiosError
> =>
useQuery<AxiosResponse<QueryKeyValueSuggestionsResponseProps>, AxiosError>({
queryKey: ['queryKeyValueSuggestions', key, signal, searchText, signalSource],
queryKey: [
'queryKeyValueSuggestions',
key,
signal,
searchText,
signalSource,
metricName,
],
queryFn: () =>
getValueSuggestions({
signal,
key,
searchText: searchText || '',
signalSource: signalSource as 'meter' | '',
metricName: metricName || '',
}),
});

View File

@@ -40,9 +40,7 @@ function validateMetricNameForMetricsDataSource(query: Query): boolean {
// Check if any METRICS data source queries exist
const metricsQueries = queryData.filter(
(queryItem) =>
queryItem.dataSource === DataSource.METRICS ||
queryItem.dataSource === DataSource.METER,
(queryItem) => queryItem.dataSource === DataSource.METRICS,
);
// If no METRICS queries, validation passes

View File

@@ -17,9 +17,9 @@ const getChartData = ({
// eslint-disable-next-line sonarjs/cognitive-complexity
} => {
const uniqueTimeLabels = new Set<number>();
queryData.forEach((data) => {
data.queryData.forEach((query) => {
query.values.forEach((value) => {
queryData?.forEach((data) => {
data.queryData?.forEach((query) => {
query.values?.forEach((value) => {
uniqueTimeLabels.add(value[0]);
});
});
@@ -27,8 +27,8 @@ const getChartData = ({
const labels = Array.from(uniqueTimeLabels).sort((a, b) => a - b);
const response = queryData.map(
({ queryData, query: queryG, legend: legendG }) =>
const response =
queryData?.map(({ queryData, query: queryG, legend: legendG }) =>
queryData.map((e) => {
const { values = [], metric, legend, queryName } = e || {};
const labelNames = getLabelName(
@@ -61,7 +61,7 @@ const getChartData = ({
second: filledDataValues.map((e) => e.second || 0),
};
}),
);
) || [];
const modifiedData = response
.flat()

View File

@@ -490,6 +490,7 @@ export const defaultOutput = {
pageSize: 0,
queryName: 'A',
reduceTo: 'avg',
source: '',
spaceAggregation: 'sum',
stepInterval: 240,
timeAggregation: 'rate',

View File

@@ -28,7 +28,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { isEqual, isNull } from 'lodash-es';
import { isEmpty, isEqual, isNull } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
@@ -361,7 +361,7 @@ function LogsExplorer(): JSX.Element {
/>
}
warningElement={
warning?.message ? <WarningPopover warningData={warning} /> : <div />
!isEmpty(warning) ? <WarningPopover warningData={warning} /> : <div />
}
rightActions={
<RightToolbarActions

View File

@@ -6,7 +6,7 @@ import cx from 'classnames';
import { CardContainer } from 'container/GridCardLayout/styles';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { ChevronDown, ChevronUp } from 'lucide-react';
import { useRef, useState } from 'react';
import { useCallback, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { Widgets } from 'types/api/dashboard/getAll';
@@ -129,23 +129,22 @@ function MetricPage(): JSX.Element {
},
];
const [renderedGraphCount, setRenderedGraphCount] = useState(0);
const renderedGraphCountRef = useRef(0);
const hasLoggedRef = useRef(false);
const checkIfDataExists = (isDataAvailable: boolean): void => {
const checkIfDataExists = useCallback((isDataAvailable: boolean): void => {
if (isDataAvailable) {
const newCount = renderedGraphCount + 1;
setRenderedGraphCount(newCount);
renderedGraphCountRef.current += 1;
// Only log when first graph has rendered and we haven't logged yet
if (newCount === 1 && !hasLoggedRef.current) {
if (renderedGraphCountRef.current === 1 && !hasLoggedRef.current) {
logEvent('MQ Kafka: Metric view', {
graphRendered: true,
});
hasLoggedRef.current = true;
}
}
};
}, []);
return (
<div className="metric-page">

View File

@@ -69,6 +69,7 @@ function TracesExplorer(): JSX.Element {
// Get panel type from URL
const panelTypesFromUrl = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
const [isLoadingQueries, setIsLoadingQueries] = useState<boolean>(false);
const [selectedView, setSelectedView] = useState<ExplorerViews>(() =>
getExplorerViewFromUrl(searchParams, panelTypesFromUrl),
@@ -318,11 +319,12 @@ function TracesExplorer(): JSX.Element {
/>
}
warningElement={
warning?.message ? <WarningPopover warningData={warning} /> : <div />
!isEmpty(warning) ? <WarningPopover warningData={warning} /> : <div />
}
rightActions={
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery(true, true)}
isLoadingQueries={isLoadingQueries}
/>
}
/>
@@ -344,13 +346,21 @@ function TracesExplorer(): JSX.Element {
{selectedView === ExplorerViews.LIST && (
<div className="trace-explorer-list-view">
<ListView isFilterApplied={isFilterApplied} setWarning={setWarning} />
<ListView
isFilterApplied={isFilterApplied}
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
</div>
)}
{selectedView === ExplorerViews.TRACE && (
<div className="trace-explorer-traces-view">
<TracesView isFilterApplied={isFilterApplied} setWarning={setWarning} />
<TracesView
isFilterApplied={isFilterApplied}
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
</div>
)}
@@ -360,13 +370,17 @@ function TracesExplorer(): JSX.Element {
dataSource={DataSource.TRACES}
isFilterApplied={isFilterApplied}
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
</div>
)}
{selectedView === ExplorerViews.TABLE && (
<div className="trace-explorer-table-view">
<TableView setWarning={setWarning} />
<TableView
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
</div>
)}
</div>

View File

@@ -1,6 +1,7 @@
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable no-empty */
import { TelemetryFieldKey } from 'api/v5/v5';
import { has } from 'lodash-es';
import { useEffect, useState } from 'react';
import { DataSource } from 'types/common/queryBuilder';
@@ -8,6 +9,14 @@ import logsLoaderConfig from '../configs/logsLoaderConfig';
import tracesLoaderConfig from '../configs/tracesLoaderConfig';
import { FormattingOptions, Preferences } from '../types';
const migrateColumns = (columns: any): any =>
columns.map((column: any) => {
if (has(column, 'key') && !has(column, 'name')) {
return { ...column, name: column.key };
}
return column;
});
// Generic preferences loader that works with any config
async function preferencesLoader<T>(config: {
priority: readonly string[];
@@ -26,11 +35,16 @@ async function preferencesLoader<T>(config: {
const validColumnsResult = results.find(
({ result }) => result.columns?.length,
);
const validFormattingResult = results.find(({ result }) => result.formatting);
const migratedColumns = validColumnsResult?.result.columns
? migrateColumns(validColumnsResult?.result.columns)
: undefined;
// Combine valid results or fallback to default
const finalResult = {
columns: validColumnsResult?.result.columns || config.default().columns,
columns: migratedColumns || config.default().columns,
formatting:
validFormattingResult?.result.formatting || config.default().formatting,
};

View File

@@ -46,6 +46,7 @@ export interface QueryKeyValueRequestProps {
key: string;
searchText: string;
signalSource?: 'meter' | '';
metricName?: string;
}
export type SignalType = 'traces' | 'logs' | 'metrics';

View File

@@ -427,7 +427,7 @@ export type QueryRangeDataV5 =
export interface QueryRangeResponseV5 {
type: RequestType;
data: QueryRangeDataV5 & { warnings?: string[] };
data: QueryRangeDataV5 & { warning?: string[] };
meta: ExecStats;
warning?: Warning;
}

2
go.mod
View File

@@ -70,6 +70,7 @@ require (
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.39.0
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac
golang.org/x/net v0.41.0
golang.org/x/oauth2 v0.30.0
golang.org/x/sync v0.15.0
golang.org/x/text v0.26.0
@@ -283,7 +284,6 @@ require (
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/goleak v1.3.0 // indirect
golang.org/x/mod v0.25.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.33.0 // indirect

View File

@@ -31,6 +31,7 @@ func NewAPI(
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
@@ -39,6 +40,8 @@ func NewAPI(
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
@@ -66,7 +69,7 @@ func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
return
}
keys, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
@@ -74,7 +77,7 @@ func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
response := fieldKeysResponse{
Keys: keys,
Complete: len(keys) < fieldKeySelector.Limit,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
@@ -97,13 +100,13 @@ func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
return
}
allValues, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
@@ -116,11 +119,8 @@ func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
}
response := fieldValuesResponse{
Values: values,
Complete: len(values.StringValues) < fieldValueSelector.Limit &&
len(values.BoolValues) < fieldValueSelector.Limit &&
len(values.NumberValues) < fieldValueSelector.Limit &&
len(values.RelatedValues) < fieldValueSelector.Limit,
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)

View File

@@ -7,8 +7,11 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -16,11 +19,12 @@ import (
)
type handler struct {
module dashboard.Module
module dashboard.Module
providerSettings factory.ProviderSettings
}
func NewHandler(module dashboard.Module) dashboard.Handler {
return &handler{module: module}
func NewHandler(module dashboard.Module, providerSettings factory.ProviderSettings) dashboard.Handler {
return &handler{module: module, providerSettings: providerSettings}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -46,6 +50,13 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
if querybuilder.QBV5Enabled {
dashboardMigrator := transition.NewDashboardMigrateV5(handler.providerSettings.Logger, nil, nil)
if req["version"] != "v5" {
dashboardMigrator.Migrate(ctx, req)
}
}
dashboard, err := handler.module.Create(ctx, orgID, claims.Email, valuer.MustNewUUID(claims.UserID), req)
if err != nil {
render.Error(rw, err)

View File

@@ -401,7 +401,7 @@ func (q *querier) run(
}
}
resp.Warning = qbtypes.QueryWarnData{
resp.Warning = &qbtypes.QueryWarnData{
Message: "Encountered warnings",
Url: warningsDocURL,
Warnings: warns,
@@ -515,26 +515,39 @@ func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query
// createRangedQuery creates a copy of the query with a different time range
func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtypes.TimeRange) qbtypes.Query {
// this is called in a goroutine, so we create a copy of the query to avoid race conditions
switch qt := originalQuery.(type) {
case *promqlQuery:
return newPromqlQuery(q.logger, q.promEngine, qt.query, timeRange, qt.requestType, qt.vars)
queryCopy := qt.query.Copy()
return newPromqlQuery(q.logger, q.promEngine, queryCopy, timeRange, qt.requestType, qt.vars)
case *chSQLQuery:
return newchSQLQuery(q.logger, q.telemetryStore, qt.query, qt.args, timeRange, qt.kind, qt.vars)
queryCopy := qt.query.Copy()
argsCopy := make([]any, len(qt.args))
copy(argsCopy, qt.args)
return newchSQLQuery(q.logger, q.telemetryStore, queryCopy, argsCopy, timeRange, qt.kind, qt.vars)
case *builderQuery[qbtypes.TraceAggregation]:
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
specCopy := qt.spec.Copy()
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
case *builderQuery[qbtypes.LogAggregation]:
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
specCopy := qt.spec.Copy()
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
case *builderQuery[qbtypes.MetricAggregation]:
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
specCopy := qt.spec.Copy()
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
if qt.spec.Source == telemetrytypes.SourceMeter {
return newBuilderQuery(q.telemetryStore, q.meterStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
return newBuilderQuery(q.telemetryStore, q.meterStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
}
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
default:
return nil
}

View File

@@ -50,6 +50,7 @@ func newProvider(
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
@@ -58,6 +59,8 @@ func newProvider(
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
@@ -69,12 +72,13 @@ func newProvider(
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(nil, traceFieldMapper, traceConditionBuilder, "", nil)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
settings,
telemetryMetadataStore,
@@ -89,6 +93,7 @@ func newProvider(
logFieldMapper := telemetrylogs.NewFieldMapper()
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
@@ -97,6 +102,7 @@ func newProvider(
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
settings,
telemetrylogs.DefaultFullTextColumn,
logFieldMapper,
logConditionBuilder,

View File

@@ -12,7 +12,6 @@ import (
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/prometheus"
@@ -386,7 +385,6 @@ func (r *ClickHouseReader) buildResourceSubQuery(tags []model.TagQueryParam, svc
}
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) {
if r.indexTable == "" {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
}
@@ -395,121 +393,220 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
if apiErr != nil {
return nil, apiErr
}
// Build parallel arrays for arrayZip approach
var ops []string
var svcs []string
serviceOperationsMap := make(map[string][]string)
for svc, opsList := range *topLevelOps {
// Cap operations to 1500 per service (same as original logic)
cappedOps := opsList[:int(math.Min(1500, float64(len(opsList))))]
serviceOperationsMap[svc] = cappedOps
// Add to parallel arrays
for _, op := range cappedOps {
ops = append(ops, op)
svcs = append(svcs, svc)
}
}
fmt.Printf("Operation pairs count: %d\n", len(ops))
// Build resource subquery for all services, but only include our target services
targetServices := make([]string, 0, len(*topLevelOps))
for svc := range *topLevelOps {
targetServices = append(targetServices, svc)
}
resourceSubQuery, err := r.buildResourceSubQueryForServices(queryParams.Tags, targetServices, *queryParams.Start, *queryParams.End)
if err != nil {
zap.L().Error("Error building resource subquery", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
// Build the optimized single query using arrayZip for tuple creation
query := fmt.Sprintf(`
SELECT
resource_string_service$$name AS serviceName,
quantile(0.99)(duration_nano) AS p99,
avg(duration_nano) AS avgDuration,
count(*) AS numCalls,
countIf(statusCode = 2) AS numErrors
FROM %s.%s
WHERE (name, resource_string_service$$name) IN arrayZip(@ops, @svcs)
AND timestamp >= @start
AND timestamp <= @end
AND ts_bucket_start >= @start_bucket
AND ts_bucket_start <= @end_bucket
AND (resource_fingerprint GLOBAL IN %s)
GROUP BY serviceName
ORDER BY numCalls DESC`,
r.TraceDB, r.traceTableName, resourceSubQuery,
)
args := []interface{}{
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("start_bucket", strconv.FormatInt(queryParams.Start.Unix()-1800, 10)),
clickhouse.Named("end_bucket", strconv.FormatInt(queryParams.End.Unix(), 10)),
// Important: wrap slices with clickhouse.Array for IN/array params
clickhouse.Named("ops", ops),
clickhouse.Named("svcs", svcs),
}
fmt.Printf("Query: %s\n", query)
// Execute the single optimized query
rows, err := r.db.Query(ctx, query, args...)
if err != nil {
zap.L().Error("Error executing optimized services query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
defer rows.Close()
// Process results
serviceItems := []model.ServiceItem{}
var wg sync.WaitGroup
// limit the number of concurrent queries to not overload the clickhouse server
sem := make(chan struct{}, 10)
var mtx sync.RWMutex
for svc, ops := range *topLevelOps {
sem <- struct{}{}
wg.Add(1)
go func(svc string, ops []string) {
defer wg.Done()
defer func() { <-sem }()
var serviceItem model.ServiceItem
var numErrors uint64
for rows.Next() {
var serviceItem model.ServiceItem
err := rows.ScanStruct(&serviceItem)
if err != nil {
zap.L().Error("Error scanning service item", zap.Error(err))
continue
}
// Even if the total number of operations within the time range is less and the all
// the top level operations are high, we want to warn to let user know the issue
// with the instrumentation
// Skip services with zero calls (match original behavior)
if serviceItem.NumCalls == 0 {
continue
}
// Add data warning for this service
if ops, exists := serviceOperationsMap[serviceItem.ServiceName]; exists {
serviceItem.DataWarning = model.DataWarning{
TopLevelOps: (*topLevelOps)[svc],
TopLevelOps: ops,
}
}
// default max_query_size = 262144
// Let's assume the average size of the item in `ops` is 50 bytes
// We can have 262144/50 = 5242 items in the `ops` array
// Although we have make it as big as 5k, We cap the number of items
// in the `ops` array to 1500
// Calculate derived fields
serviceItem.CallRate = float64(serviceItem.NumCalls) / float64(queryParams.Period)
if serviceItem.NumCalls > 0 {
serviceItem.ErrorRate = float64(serviceItem.NumErrors) * 100 / float64(serviceItem.NumCalls)
}
ops = ops[:int(math.Min(1500, float64(len(ops))))]
query := fmt.Sprintf(
`SELECT
quantile(0.99)(duration_nano) as p99,
avg(duration_nano) as avgDuration,
count(*) as numCalls
FROM %s.%s
WHERE resource_string_service$$name = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end`,
r.TraceDB, r.traceTableName,
)
errorQuery := fmt.Sprintf(
`SELECT
count(*) as numErrors
FROM %s.%s
WHERE resource_string_service$$name = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
r.TraceDB, r.traceTableName,
)
args := []interface{}{}
args = append(args,
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("serviceName", svc),
clickhouse.Named("names", ops),
)
resourceSubQuery, err := r.buildResourceSubQuery(queryParams.Tags, svc, *queryParams.Start, *queryParams.End)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
query += `
AND (
resource_fingerprint GLOBAL IN ` +
resourceSubQuery +
`) AND ts_bucket_start >= @start_bucket AND ts_bucket_start <= @end_bucket`
args = append(args,
clickhouse.Named("start_bucket", strconv.FormatInt(queryParams.Start.Unix()-1800, 10)),
clickhouse.Named("end_bucket", strconv.FormatInt(queryParams.End.Unix(), 10)),
)
err = r.db.QueryRow(
ctx,
query,
args...,
).ScanStruct(&serviceItem)
if serviceItem.NumCalls == 0 {
return
}
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
errorQuery += `
AND (
resource_fingerprint GLOBAL IN ` +
resourceSubQuery +
`) AND ts_bucket_start >= @start_bucket AND ts_bucket_start <= @end_bucket`
err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
serviceItem.ServiceName = svc
serviceItem.NumErrors = numErrors
mtx.Lock()
serviceItems = append(serviceItems, serviceItem)
mtx.Unlock()
}(svc, ops)
serviceItems = append(serviceItems, serviceItem)
}
wg.Wait()
for idx := range serviceItems {
serviceItems[idx].CallRate = float64(serviceItems[idx].NumCalls) / float64(queryParams.Period)
serviceItems[idx].ErrorRate = float64(serviceItems[idx].NumErrors) * 100 / float64(serviceItems[idx].NumCalls)
if err = rows.Err(); err != nil {
zap.L().Error("Error iterating over service results", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
return &serviceItems, nil
}
// buildResourceSubQueryForServices builds a resource subquery that includes only specific services
// This maintains service context while optimizing for multiple services in a single query
func (r *ClickHouseReader) buildResourceSubQueryForServices(tags []model.TagQueryParam, targetServices []string, start, end time.Time) (string, error) {
if len(targetServices) == 0 {
return "", fmt.Errorf("no target services provided")
}
if len(tags) == 0 {
// For exact parity with per-service behavior, build via resource builder with only service filter
filterSet := v3.FilterSet{}
filterSet.Items = append(filterSet.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "service.name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: v3.FilterOperatorIn,
Value: targetServices,
})
resourceSubQuery, err := resource.BuildResourceSubQuery(
r.TraceDB,
r.traceResourceTableV3,
start.Unix()-1800,
end.Unix(),
&filterSet,
[]v3.AttributeKey{},
v3.AttributeKey{},
false)
if err != nil {
zap.L().Error("Error building resource subquery for services", zap.Error(err))
return "", err
}
return resourceSubQuery, nil
}
// Convert tags to filter set
filterSet := v3.FilterSet{}
for _, tag := range tags {
// Skip the collector id as we don't add it to traces
if tag.Key == "signoz.collector.id" {
continue
}
var it v3.FilterItem
it.Key = v3.AttributeKey{
Key: tag.Key,
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
}
switch tag.Operator {
case model.NotInOperator:
it.Operator = v3.FilterOperatorNotIn
it.Value = tag.StringValues
case model.InOperator:
it.Operator = v3.FilterOperatorIn
it.Value = tag.StringValues
default:
return "", fmt.Errorf("operator %s not supported", tag.Operator)
}
filterSet.Items = append(filterSet.Items, it)
}
// Add service filter to limit to our target services
filterSet.Items = append(filterSet.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "service.name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: v3.FilterOperatorIn,
Value: targetServices,
})
// Build resource subquery with service-specific filtering
resourceSubQuery, err := resource.BuildResourceSubQuery(
r.TraceDB,
r.traceResourceTableV3,
start.Unix()-1800,
end.Unix(),
&filterSet,
[]v3.AttributeKey{},
v3.AttributeKey{},
false)
if err != nil {
zap.L().Error("Error building resource subquery for services", zap.Error(err))
return "", err
}
return resourceSubQuery, nil
}
// buildServiceInClause creates a properly quoted IN clause for service names
func (r *ClickHouseReader) buildServiceInClause(services []string) string {
var quotedServices []string
for _, svc := range services {
// Escape single quotes and wrap in quotes
escapedSvc := strings.ReplaceAll(svc, "'", "\\'")
quotedServices = append(quotedServices, fmt.Sprintf("'%s'", escapedSvc))
}
return strings.Join(quotedServices, ", ")
}
func getStatusFilters(query string, statusParams []string, excludeMap map[string]struct{}) string {
// status can only be two and if both are selected than they are equivalent to none selected
if _, ok := excludeMap["status"]; ok {
@@ -686,7 +783,6 @@ func addExistsOperator(item model.TagQuery, tagMapType string, not bool) (string
}
return fmt.Sprintf(" AND %s (%s)", notStr, strings.Join(tagOperatorPair, " OR ")), args
}
func (r *ClickHouseReader) GetEntryPointOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, error) {
// Step 1: Get top operations for the given service
topOps, err := r.GetTopOperations(ctx, queryParams)
@@ -1416,7 +1512,6 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
}(ttlPayload)
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
}
func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
// uuid is used as transaction id
uuidWithHyphen := uuid.New()
@@ -2169,7 +2264,6 @@ func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams
return &getNextPrevErrorIDsResponse, nil
}
func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *model.GetErrorParams) (string, time.Time, *model.ApiError) {
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
@@ -2905,7 +2999,6 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
return &attributeValues, nil
}
func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, orgID valuer.UUID, metricName, serviceName string) (*v3.MetricMetadataResponse, error) {
unixMilli := common.PastDayRoundOff()
@@ -5180,7 +5273,6 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
return &response, nil
}
func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, req *metrics_explorer.TreeMapMetricsRequest) (*[]metrics_explorer.TreeMapResponseItem, *model.ApiError) {
var args []interface{}
@@ -5933,7 +6025,6 @@ func (r *ClickHouseReader) CheckForLabelsInMetric(ctx context.Context, metricNam
}
return hasLE, nil
}
func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) {
cachedMetadata := make(map[string]*model.UpdateMetricsMetadata)
var missingMetrics []string

View File

@@ -484,7 +484,7 @@ func parseAggregateAttributeRequest(r *http.Request) (*v3.AggregateAttributeRequ
limit = 50
}
if dataSource != v3.DataSourceMetrics {
if dataSource != v3.DataSourceMetrics && dataSource != v3.DataSourceMeter {
if err := aggregateOperator.Validate(); err != nil {
return nil, err
}
@@ -604,7 +604,7 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
return nil, err
}
if dataSource != v3.DataSourceMetrics {
if dataSource != v3.DataSourceMetrics && dataSource != v3.DataSourceMeter {
if err := aggregateOperator.Validate(); err != nil {
return nil, err
}

View File

@@ -971,7 +971,9 @@ func (m *Manager) TestNotification(ctx context.Context, orgID valuer.UUID, ruleS
RuleStore: m.ruleStore,
MaintenanceStore: m.maintenanceStore,
Logger: m.logger,
SLogger: m.opts.SLogger,
Reader: m.reader,
Querier: m.opts.Querier,
Cache: m.cache,
ManagerOpts: m.opts,
NotifyFunc: m.prepareTestNotifyFunc(),

View File

@@ -543,6 +543,11 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
return resultVector, nil
}
if queryResult == nil {
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
return resultVector, nil
}
for _, series := range queryResult.Series {
smpl, shouldAlert := r.ShouldAlert(*series)
if shouldAlert {

View File

@@ -318,7 +318,7 @@ func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed {
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules)
handlers := signoz.NewHandlers(modules, providerSettings)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
Reader: reader,

View File

@@ -498,7 +498,7 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules)
handlers := signoz.NewHandlers(modules, providerSettings)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
LogsParsingPipelineController: controller,

View File

@@ -379,7 +379,7 @@ func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudI
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules)
handlers := signoz.NewHandlers(modules, providerSettings)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
Reader: reader,

View File

@@ -594,7 +594,7 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules)
handlers := signoz.NewHandlers(modules, providerSettings)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
Reader: reader,

View File

@@ -3,10 +3,12 @@ package querybuilder
import (
"context"
"fmt"
"log/slog"
"strings"
chparser "github.com/AfterShip/clickhouse-sql-parser/parser"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -14,6 +16,7 @@ import (
)
type aggExprRewriter struct {
logger *slog.Logger
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
@@ -24,13 +27,17 @@ type aggExprRewriter struct {
var _ qbtypes.AggExprRewriter = (*aggExprRewriter)(nil)
func NewAggExprRewriter(
settings factory.ProviderSettings,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *aggExprRewriter {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
return &aggExprRewriter{
logger: set.Logger(),
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
@@ -70,7 +77,7 @@ func (r *aggExprRewriter) Rewrite(
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
}
visitor := newExprVisitor(keys,
visitor := newExprVisitor(r.logger, keys,
r.fullTextColumn,
r.fieldMapper,
r.conditionBuilder,
@@ -117,6 +124,7 @@ func (r *aggExprRewriter) RewriteMulti(
// exprVisitor walks FunctionExpr nodes and applies the mappers.
type exprVisitor struct {
chparser.DefaultASTVisitor
logger *slog.Logger
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
@@ -129,6 +137,7 @@ type exprVisitor struct {
}
func newExprVisitor(
logger *slog.Logger,
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
@@ -137,6 +146,7 @@ func newExprVisitor(
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *exprVisitor {
return &exprVisitor{
logger: logger,
fieldKeys: fieldKeys,
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
@@ -183,6 +193,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
FieldKeys: v.fieldKeys,
FieldMapper: v.fieldMapper,
ConditionBuilder: v.conditionBuilder,

View File

@@ -2,7 +2,11 @@ package querybuilder
import (
"context"
"encoding/json"
"fmt"
"math"
"reflect"
"strconv"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
@@ -116,3 +120,58 @@ func GroupByKeys(keys []qbtypes.GroupByKey) []string {
}
return k
}
func FormatValueForContains(value any) string {
if value == nil {
return ""
}
switch v := value.(type) {
case string:
return v
case []byte:
return string(v)
case json.Number:
return v.String()
case float64:
if v == math.Trunc(v) && v >= -1e15 && v <= 1e15 {
return fmt.Sprintf("%.0f", v)
}
return strconv.FormatFloat(v, 'f', -1, 64)
case float32:
return strconv.FormatFloat(float64(v), 'f', -1, 32)
case int, int8, int16, int32, int64:
return fmt.Sprintf("%d", v)
case uint, uint8, uint16, uint32, uint64:
return fmt.Sprintf("%d", v)
case bool:
return strconv.FormatBool(v)
case fmt.Stringer:
return v.String()
default:
// fallback - try to convert through reflection
rv := reflect.ValueOf(value)
switch rv.Kind() {
case reflect.Float32, reflect.Float64:
f := rv.Float()
if f == math.Trunc(f) && f >= -1e15 && f <= 1e15 {
return fmt.Sprintf("%.0f", f)
}
return strconv.FormatFloat(f, 'f', -1, 64)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return strconv.FormatInt(rv.Int(), 10)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return strconv.FormatUint(rv.Uint(), 10)
default:
return fmt.Sprintf("%v", value)
}
}
}

View File

@@ -0,0 +1,275 @@
package querybuilder
import (
"encoding/json"
"fmt"
"math"
"testing"
"github.com/stretchr/testify/assert"
)
type customStringer struct {
value string
}
func (c customStringer) String() string {
return c.value
}
type customInt int64
type customFloat float64
type customUint uint64
func TestFormatValueForContains(t *testing.T) {
tests := []struct {
name string
input any
expected string
}{
{
name: "nil value",
input: nil,
expected: "",
},
{
name: "string value",
input: "hello world",
expected: "hello world",
},
{
name: "empty string",
input: "",
expected: "",
},
{
name: "string with special characters",
input: "test@#$%^&*()_+-=",
expected: "test@#$%^&*()_+-=",
},
{
name: "byte slice",
input: []byte("byte slice test"),
expected: "byte slice test",
},
{
name: "empty byte slice",
input: []byte{},
expected: "",
},
{
name: "json.Number integer",
input: json.Number("521509198310"),
expected: "521509198310",
},
{
name: "json.Number float",
input: json.Number("3.14159"),
expected: "3.14159",
},
{
name: "json.Number scientific notation",
input: json.Number("1.23e+10"),
expected: "1.23e+10",
},
{
name: "float64 whole number",
input: float64(42),
expected: "42",
},
{
name: "float64 decimal",
input: float64(3.14159),
expected: "3.14159",
},
{
name: "float64 large whole number",
input: float64(521509198310),
expected: "521509198310",
},
{
name: "float64 at positive threshold",
input: float64(1e15),
expected: "1000000000000000",
},
{
name: "float64 above positive threshold",
input: float64(1e16),
expected: "10000000000000000",
},
{
name: "float64 at negative threshold",
input: float64(-1e15),
expected: "-1000000000000000",
},
{
name: "float64 negative decimal",
input: float64(-123.456),
expected: "-123.456",
},
{
name: "float64 zero",
input: float64(0),
expected: "0",
},
{
name: "float32 whole number",
input: float32(42),
expected: "42",
},
{
name: "float32 decimal",
input: float32(3.14),
expected: "3.14",
},
{
name: "int",
input: int(123),
expected: "123",
},
{
name: "int negative",
input: int(-456),
expected: "-456",
},
{
name: "int8 max",
input: int8(127),
expected: "127",
},
{
name: "int8 min",
input: int8(-128),
expected: "-128",
},
{
name: "int16",
input: int16(32767),
expected: "32767",
},
{
name: "int32",
input: int32(2147483647),
expected: "2147483647",
},
{
name: "int64",
input: int64(9223372036854775807),
expected: "9223372036854775807",
},
{
name: "uint",
input: uint(123),
expected: "123",
},
{
name: "uint8 max",
input: uint8(255),
expected: "255",
},
{
name: "uint16",
input: uint16(65535),
expected: "65535",
},
{
name: "uint32",
input: uint32(4294967295),
expected: "4294967295",
},
{
name: "uint64 large",
input: uint64(18446744073709551615),
expected: "18446744073709551615",
},
{
name: "bool true",
input: true,
expected: "true",
},
{
name: "bool false",
input: false,
expected: "false",
},
{
name: "custom stringer",
input: customStringer{value: "custom string value"},
expected: "custom string value",
},
{
name: "custom int type",
input: customInt(12345),
expected: "12345",
},
{
name: "custom float type whole number",
input: customFloat(67890),
expected: "67890",
},
{
name: "custom float type decimal",
input: customFloat(123.456),
expected: "123.456",
},
{
name: "custom uint type",
input: customUint(99999),
expected: "99999",
},
{
name: "struct fallback",
input: struct{ Name string }{Name: "test"},
expected: "{test}",
},
{
name: "slice fallback",
input: []int{1, 2, 3},
expected: "[1 2 3]",
},
{
name: "map fallback",
input: map[string]int{"a": 1, "b": 2},
expected: fmt.Sprintf("%v", map[string]int{"a": 1, "b": 2}),
},
{
name: "float64 infinity",
input: math.Inf(1),
expected: "+Inf",
},
{
name: "float64 negative infinity",
input: math.Inf(-1),
expected: "-Inf",
},
{
name: "float64 NaN",
input: math.NaN(),
expected: "NaN",
},
{
name: "float64 very small positive",
input: float64(0.000000123),
expected: "0.000000123",
},
{
name: "float64 very small negative",
input: float64(-0.000000123),
expected: "-0.000000123",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := FormatValueForContains(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}
func TestFormatValueForContains_LargeNumberScientificNotation(t *testing.T) {
largeNumber := float64(521509198310)
result := FormatValueForContains(largeNumber)
assert.Equal(t, "521509198310", result)
assert.NotEqual(t, "5.2150919831e+11", result)
}

15
pkg/querybuilder/init.go Normal file
View File

@@ -0,0 +1,15 @@
package querybuilder
import (
"os"
"strings"
)
var QBV5Enabled = false
func init() {
v := os.Getenv("ENABLE_QB_V5")
if strings.ToLower(v) == "true" || strings.ToLower(v) == "1" {
QBV5Enabled = true
}
}

View File

@@ -42,6 +42,17 @@ func QueryStringToKeysSelectors(query string) []*telemetrytypes.FieldKeySelector
FieldContext: key.FieldContext,
FieldDataType: key.FieldDataType,
})
if key.FieldContext != telemetrytypes.FieldContextUnspecified {
// span.kind in metrics or metric.max_count in span etc.. should get the search on span.kind
// see note in where_clause_visitor.go in VisitKey(...)
keys = append(keys, &telemetrytypes.FieldKeySelector{
Name: key.FieldContext.StringValue() + "." + key.Name,
Signal: key.Signal,
FieldContext: key.FieldContext,
FieldDataType: key.FieldDataType,
})
}
}
}

View File

@@ -32,6 +32,12 @@ func TestQueryToKeys(t *testing.T) {
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
{
Name: "resource.service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{

View File

@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -34,7 +35,8 @@ func valueForIndexFilter(op qbtypes.FilterOperator, key *telemetrytypes.Telemetr
}
return values
}
return value
// resource table expects string value
return fmt.Sprintf(`%%%v%%`, value)
}
func keyIndexFilter(key *telemetrytypes.TelemetryFieldKey) any {
@@ -53,6 +55,16 @@ func (b *defaultConditionBuilder) ConditionFor(
return "true", nil
}
switch op {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := b.fm.ColumnFor(ctx, key)
if err != nil {
return "", err

View File

@@ -5,6 +5,7 @@ import (
"testing"
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
@@ -77,6 +78,18 @@ func TestConditionBuilder(t *testing.T) {
expected: "LOWER(simpleJSONExtractString(labels, 'k8s.namespace.name')) LIKE LOWER(?) AND labels LIKE ? AND LOWER(labels) LIKE LOWER(?)",
expectedArgs: []any{"%banana%", "%k8s.namespace.name%", `%k8s.namespace.name%banana%`},
},
{
name: "Contains operator - string attribute number value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "company.id",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
op: qbtypes.FilterOperatorContains,
value: 521509198310,
expected: "LOWER(simpleJSONExtractString(labels, 'company.id')) LIKE LOWER(?) AND labels LIKE ? AND LOWER(labels) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%", "%company.id%", `%company.id%521509198310%`},
},
{
name: "string_not_contains",
key: &telemetrytypes.TelemetryFieldKey{

View File

@@ -3,8 +3,10 @@ package resourcefilter
import (
"context"
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -34,6 +36,7 @@ var signalConfigs = map[telemetrytypes.Signal]signalConfig{
// Generic resource filter statement builder
type resourceFilterStatementBuilder[T any] struct {
logger *slog.Logger
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
metadataStore telemetrytypes.MetadataStore
@@ -52,11 +55,14 @@ var (
// Constructor functions
func NewTraceResourceFilterStatementBuilder(
settings factory.ProviderSettings,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
) *resourceFilterStatementBuilder[qbtypes.TraceAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
return &resourceFilterStatementBuilder[qbtypes.TraceAggregation]{
logger: set.Logger(),
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
metadataStore: metadataStore,
@@ -65,6 +71,7 @@ func NewTraceResourceFilterStatementBuilder(
}
func NewLogResourceFilterStatementBuilder(
settings factory.ProviderSettings,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
@@ -72,7 +79,9 @@ func NewLogResourceFilterStatementBuilder(
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
logger: set.Logger(),
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
metadataStore: metadataStore,
@@ -93,6 +102,7 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
for idx := range keySelectors {
keySelectors[idx].Signal = b.signal
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
}
return keySelectors
@@ -117,7 +127,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
q.From(fmt.Sprintf("%s.%s", config.dbName, config.tableName))
keySelectors := b.getKeySelectors(query)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -147,6 +157,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
// warnings would be encountered as part of the main condition already
filterWhereClause, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fieldMapper,
ConditionBuilder: b.conditionBuilder,
FieldKeys: keys,

View File

@@ -33,6 +33,8 @@ func ToNanoSecs(epoch uint64) uint64 {
return temp * uint64(math.Pow(10, float64(19-count)))
}
// TODO(srikanthccv): should these be rounded to nearest multiple of 60 instead of 5 if step > 60?
// That would make graph look nice but "nice" but should be less important than the usefulness
func RecommendedStepInterval(start, end uint64) uint64 {
start = ToNanoSecs(start)
end = ToNanoSecs(end)
@@ -160,29 +162,6 @@ func AdjustedMetricTimeRange(start, end, step uint64, mq qbtypes.QueryBuilderQue
return start, end
}
func GCD(a, b int64) int64 {
for b != 0 {
a, b = b, a%b
}
return a
}
func LCM(a, b int64) int64 {
return (a * b) / GCD(a, b)
}
// LCMList computes the LCM of a list of int64 numbers.
func LCMList(nums []int64) int64 {
if len(nums) == 0 {
return 1
}
result := nums[0]
for _, num := range nums[1:] {
result = LCM(result, num)
}
return result
}
func AssignReservedVars(vars map[string]any, start, end uint64) {
start = ToNanoSecs(start)
end = ToNanoSecs(end)

View File

@@ -3,6 +3,7 @@ package querybuilder
import (
"context"
"fmt"
"log/slog"
"strconv"
"strings"
@@ -20,6 +21,7 @@ var searchTroubleshootingGuideURL = "https://signoz.io/docs/userguide/search-tro
// filterExpressionVisitor implements the FilterQueryVisitor interface
// to convert the parsed filter expressions into ClickHouse WHERE clause
type filterExpressionVisitor struct {
logger *slog.Logger
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
warnings []string
@@ -41,6 +43,7 @@ type filterExpressionVisitor struct {
}
type FilterExprVisitorOpts struct {
Logger *slog.Logger
FieldMapper qbtypes.FieldMapper
ConditionBuilder qbtypes.ConditionBuilder
FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
@@ -58,6 +61,7 @@ type FilterExprVisitorOpts struct {
// newFilterExpressionVisitor creates a new filterExpressionVisitor
func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor {
return &filterExpressionVisitor{
logger: opts.Logger,
fieldMapper: opts.FieldMapper,
conditionBuilder: opts.ConditionBuilder,
fieldKeys: opts.FieldKeys,
@@ -744,6 +748,19 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
fieldKeysForName = filteredKeys
}
// if the data type is explicitly provided, filter out the remaining
// example, level:string = 'value', then we don't want to search on
// anything other than the string attributes
if fieldKey.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
filteredKeys := []*telemetrytypes.TelemetryFieldKey{}
for _, item := range fieldKeysForName {
if item.FieldDataType == fieldKey.FieldDataType {
filteredKeys = append(filteredKeys, item)
}
}
fieldKeysForName = filteredKeys
}
// for the body json search, we need to add search on the body field even
// if there is a field with the same name as attribute/resource attribute
// Since it will ORed with the fieldKeysForName, it will not result empty
@@ -773,15 +790,35 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
}
if len(fieldKeysForName) > 1 && !v.keysWithWarnings[keyName] {
v.mainWarnURL = "https://signoz.io/docs/userguide/field-context-data-types/"
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, fmt.Sprintf(
"key `%s` is ambiguous, found %d different combinations of field context / data type: %v",
warnMsg := fmt.Sprintf(
"Key `%s` is ambiguous, found %d different combinations of field context / data type: %v.",
fieldKey.Name,
len(fieldKeysForName),
fieldKeysForName,
))
)
mixedFieldContext := map[string]bool{}
for _, item := range fieldKeysForName {
mixedFieldContext[item.FieldContext.StringValue()] = true
}
if mixedFieldContext[telemetrytypes.FieldContextResource.StringValue()] &&
mixedFieldContext[telemetrytypes.FieldContextAttribute.StringValue()] {
filteredKeys := []*telemetrytypes.TelemetryFieldKey{}
for _, item := range fieldKeysForName {
if item.FieldContext != telemetrytypes.FieldContextResource {
continue
}
filteredKeys = append(filteredKeys, item)
}
fieldKeysForName = filteredKeys
warnMsg += " " + "Using `resource` context by default. To query attributes explicitly, " +
fmt.Sprintf("use the fully qualified name (e.g., 'attribute.%s')", fieldKey.Name)
}
v.mainWarnURL = "https://signoz.io/docs/userguide/field-context-data-types/"
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, warnMsg)
v.keysWithWarnings[keyName] = true
v.logger.Warn("ambiguous key", "field_key_name", fieldKey.Name) //nolint:sloglint
}
return fieldKeysForName

View File

@@ -1,6 +1,7 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/apdex"
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
@@ -30,14 +31,14 @@ type Handlers struct {
TraceFunnel tracefunnel.Handler
}
func NewHandlers(modules Modules) Handlers {
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings) Handlers {
return Handlers{
Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter),
Preference: implpreference.NewHandler(modules.Preference),
User: impluser.NewHandler(modules.User),
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
Dashboard: impldashboard.NewHandler(modules.Dashboard),
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
}

View File

@@ -35,7 +35,7 @@ func TestNewHandlers(t *testing.T) {
emailing := emailingtest.New()
modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil)
handlers := NewHandlers(modules)
handlers := NewHandlers(modules, providerSettings)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -77,7 +77,12 @@ func NewSQLSchemaProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedMap[
)
}
func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.NamedMap[factory.ProviderFactory[sqlmigration.SQLMigration, sqlmigration.Config]] {
func NewSQLMigrationProviderFactories(
sqlstore sqlstore.SQLStore,
sqlschema sqlschema.SQLSchema,
telemetryStore telemetrystore.TelemetryStore,
providerSettings factory.ProviderSettings,
) factory.NamedMap[factory.ProviderFactory[sqlmigration.SQLMigration, sqlmigration.Config]] {
return factory.MustNewNamedMap(
sqlmigration.NewAddDataMigrationsFactory(),
sqlmigration.NewAddOrganizationFactory(),
@@ -124,13 +129,21 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore, sqlschema sqls
sqlmigration.NewUpdateUserInviteFactory(sqlstore, sqlschema),
sqlmigration.NewUpdateOrgDomainFactory(sqlstore, sqlschema),
sqlmigration.NewAddFactorIndexesFactory(sqlstore, sqlschema),
sqlmigration.NewQueryBuilderV5MigrationFactory(sqlstore, telemetryStore),
sqlmigration.NewAddMeterQuickFiltersFactory(sqlstore, sqlschema),
)
}
func NewTelemetryStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]] {
return factory.MustNewNamedMap(
clickhousetelemetrystore.NewFactory(telemetrystorehook.NewSettingsFactory(), telemetrystorehook.NewLoggingFactory()),
clickhousetelemetrystore.NewFactory(
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return telemetrystorehook.NewSettingsFactory(s)
}),
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return telemetrystorehook.NewLoggingFactory()
}),
),
)
}

View File

@@ -40,7 +40,12 @@ func TestNewProviderFactories(t *testing.T) {
})
assert.NotPanics(t, func() {
NewSQLMigrationProviderFactories(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), sqlschematest.New(map[string]*sqlschema.Table{}, map[string][]*sqlschema.UniqueConstraint{}, map[string]sqlschema.Index{}))
NewSQLMigrationProviderFactories(
sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual),
sqlschematest.New(map[string]*sqlschema.Table{}, map[string][]*sqlschema.UniqueConstraint{}, map[string]sqlschema.Index{}),
telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual),
instrumentationtest.New().ToProviderSettings(),
)
})
assert.NotPanics(t, func() {

View File

@@ -201,7 +201,7 @@ func New(
ctx,
providerSettings,
config.SQLMigration,
NewSQLMigrationProviderFactories(sqlstore, sqlschema),
NewSQLMigrationProviderFactories(sqlstore, sqlschema, telemetrystore, providerSettings),
)
if err != nil {
return nil, err
@@ -268,7 +268,7 @@ func New(
modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
// Initialize all handlers for the modules
handlers := NewHandlers(modules)
handlers := NewHandlers(modules, providerSettings)
// Create a list of all stats collectors
statsCollectors := []statsreporter.StatsCollector{

View File

@@ -0,0 +1,300 @@
package sqlmigration
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type queryBuilderV5Migration struct {
store sqlstore.SQLStore
telemetryStore telemetrystore.TelemetryStore
logger *slog.Logger
}
func NewQueryBuilderV5MigrationFactory(
store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("query_builder_v5_migration"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newQueryBuilderV5Migration(ctx, c, store, telemetryStore, ps.Logger)
})
}
func newQueryBuilderV5Migration(
_ context.Context,
_ Config, store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
logger *slog.Logger,
) (SQLMigration, error) {
return &queryBuilderV5Migration{store: store, telemetryStore: telemetryStore, logger: logger}, nil
}
func (migration *queryBuilderV5Migration) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *queryBuilderV5Migration) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT tagKey
FROM signoz_traces.distributed_span_attributes_keys
WHERE tagType IN ('tag', 'resource')
GROUP BY tagKey
HAVING COUNT(DISTINCT tagType) > 1
ORDER BY tagKey
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
return nil, fmt.Errorf("failed to query trace duplicate keys: %w", err)
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
return nil, fmt.Errorf("failed to scan trace duplicate key: %w", err)
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *queryBuilderV5Migration) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT name
FROM (
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
INTERSECT
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
)
ORDER BY name
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
return nil, fmt.Errorf("failed to query log duplicate keys: %w", err)
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
return nil, fmt.Errorf("failed to scan log duplicate key: %w", err)
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *queryBuilderV5Migration) Up(ctx context.Context, db *bun.DB) error {
// fetch keys that have both attribute and resource attribute types
logsKeys, err := migration.getLogDuplicateKeys(ctx)
if err != nil {
return err
}
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
if err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
if err := migration.migrateDashboards(ctx, tx, logsKeys, tracesKeys); err != nil {
return err
}
if err := migration.migrateSavedViews(ctx, tx, logsKeys, tracesKeys); err != nil {
return err
}
if err := migration.migrateRules(ctx, tx, logsKeys, tracesKeys); err != nil {
return err
}
return tx.Commit()
}
func (migration *queryBuilderV5Migration) Down(ctx context.Context, db *bun.DB) error {
// this migration is not reversible as we're transforming the structure
return nil
}
func (migration *queryBuilderV5Migration) migrateDashboards(
ctx context.Context,
tx bun.Tx,
logsKeys []string,
tracesKeys []string,
) error {
var dashboards []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err := tx.NewSelect().
Table("dashboard").
Column("id", "data").
Scan(ctx, &dashboards)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
dashboardMigrator := transition.NewDashboardMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, dashboard := range dashboards {
updated := dashboardMigrator.Migrate(ctx, dashboard.Data)
if updated {
dataJSON, err := json.Marshal(dashboard.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("dashboard").
Set("data = ?", string(dataJSON)).
Where("id = ?", dashboard.ID).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
}
func (migration *queryBuilderV5Migration) migrateSavedViews(
ctx context.Context,
tx bun.Tx,
logsKeys []string,
tracesKeys []string,
) error {
var savedViews []struct {
ID string `bun:"id"`
Data string `bun:"data"`
}
err := tx.NewSelect().
Table("saved_views").
Column("id", "data").
Scan(ctx, &savedViews)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
savedViewsMigrator := transition.NewSavedViewMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, savedView := range savedViews {
var data map[string]any
if err := json.Unmarshal([]byte(savedView.Data), &data); err != nil {
continue // invalid JSON
}
updated := savedViewsMigrator.Migrate(ctx, data)
if updated {
dataJSON, err := json.Marshal(data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("saved_views").
Set("data = ?", string(dataJSON)).
Where("id = ?", savedView.ID).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
}
func (migration *queryBuilderV5Migration) migrateRules(
ctx context.Context,
tx bun.Tx,
logsKeys []string,
tracesKeys []string,
) error {
// Fetch all rules
var rules []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err := tx.NewSelect().
Table("rule").
Column("id", "data").
Scan(ctx, &rules)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, rule := range rules {
migration.logger.InfoContext(ctx, "migrating rule", "rule_id", rule.ID)
updated := alertsMigrator.Migrate(ctx, rule.Data)
if updated {
fmt.Println("updated rule", rule.ID)
dataJSON, err := json.Marshal(rule.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("rule").
Set("data = ?", string(dataJSON)).
Where("id = ?", rule.ID).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
}

View File

@@ -106,7 +106,7 @@ func (migration *addMeterQuickFilters) Up(ctx context.Context, db *bun.DB) error
},
OrgID: orgID,
Filter: string(meterJSON),
Signal: signal{valuer.NewString("meter_explorer")},
Signal: signal{valuer.NewString("meter")},
timeAuditable: timeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
@@ -114,13 +114,15 @@ func (migration *addMeterQuickFilters) Up(ctx context.Context, db *bun.DB) error
})
}
_, err = tx.NewInsert().
Model(&meterFiltersToInsert).
On("CONFLICT (org_id, signal) DO UPDATE").
Set("filter = EXCLUDED.filter, updated_at = EXCLUDED.updated_at").
Exec(ctx)
if err != nil {
return err
if len(meterFiltersToInsert) > 0 {
_, err = tx.NewInsert().
Model(&meterFiltersToInsert).
On("CONFLICT (org_id, signal) DO UPDATE").
Set("filter = EXCLUDED.filter, updated_at = EXCLUDED.updated_at").
Exec(ctx)
if err != nil {
return err
}
}
if err := tx.Commit(); err != nil {

View File

@@ -7,6 +7,7 @@ import (
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"golang.org/x/exp/maps"
@@ -30,6 +31,16 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := c.fm.ColumnFor(ctx, key)
if err != nil {
return "", err

View File

@@ -111,6 +111,30 @@ func TestConditionFor(t *testing.T) {
expectedArgs: []any{"%admin%"},
expectedError: nil,
},
{
name: "Contains operator - string attribute number value",
key: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorContains,
value: 521509198310,
expectedSQL: "LOWER(attributes_string['user.id']) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%", true},
expectedError: nil,
},
{
name: "Contains operator - body",
key: telemetrytypes.TelemetryFieldKey{
Name: "body",
},
operator: qbtypes.FilterOperatorContains,
value: 521509198310,
expectedSQL: "LOWER(body) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%"},
expectedError: nil,
},
{
name: "Contains operator - string attribute",
key: telemetrytypes.TelemetryFieldKey{

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -19,6 +20,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
keys := buildCompleteFieldKeyMap()
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,

View File

@@ -6,8 +6,8 @@ import (
"testing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
@@ -21,14 +21,13 @@ func TestFilterExprLogs(t *testing.T) {
keys := buildCompleteFieldKeyMap()
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
Name: "body",
},
JsonBodyPrefix: "body",
JsonKeyToKey: GetBodyJSONKey,
FullTextColumn: DefaultFullTextColumn,
JsonBodyPrefix: BodyJSONStringSearchPrefix,
JsonKeyToKey: GetBodyJSONKey,
}
testCases := []struct {
@@ -1406,6 +1405,14 @@ func TestFilterExprLogs(t *testing.T) {
expectedArgs: []any{"%error%", true},
expectedErrorContains: "",
},
{
category: "number contains body",
query: "body CONTAINS 521509198310",
shouldPass: true,
expectedQuery: "WHERE LOWER(body) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%"},
expectedErrorContains: "",
},
{
category: "CONTAINS operator",
query: "level CONTAINS \"critical\"",

View File

@@ -26,12 +26,12 @@ func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetryt
var err error
var parsedValue any
if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
valueType = telemetrytypes.FieldDataTypeBool
} else if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeInt64
} else if parsedValue, err = strconv.ParseFloat(valueStr, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeFloat64
} else if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
valueType = telemetrytypes.FieldDataTypeBool
} else {
parsedValue = valueStr
valueType = telemetrytypes.FieldDataTypeString

View File

@@ -68,7 +68,7 @@ func (b *logQueryStatementBuilder) Build(
end = querybuilder.ToNanoSecs(end)
keySelectors := getKeySelectors(query)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -121,6 +121,7 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []
for idx := range keySelectors {
keySelectors[idx].Signal = telemetrytypes.SignalLogs
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
}
return keySelectors
@@ -552,6 +553,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
if query.Filter != nil && query.Filter.Expression != "" {
// add filter expression
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,

View File

@@ -27,6 +27,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
mockMetadataStore.KeysMap = keysMap
return resourcefilter.NewLogResourceFilterStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
fm,
cb,
mockMetadataStore,
@@ -119,7 +120,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -212,7 +213,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -321,7 +322,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()

View File

@@ -6,4 +6,6 @@ const (
LogsV2LocalTableName = "logs_v2"
TagAttributesV2TableName = "distributed_tag_attributes_v2"
TagAttributesV2LocalTableName = "tag_attributes_v2"
LogAttributeKeysTblName = "distributed_logs_attribute_keys"
LogResourceKeysTblName = "distributed_logs_resource_keys"
)

View File

@@ -27,6 +27,13 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"body": {
{
Name: "body",
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"http.status_code": {
{
Name: "http.status_code",

View File

@@ -5,6 +5,7 @@ import (
"fmt"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -25,6 +26,17 @@ func (c *conditionBuilder) ConditionFor(
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := c.fm.ColumnFor(ctx, key)
if err != nil {
// if we don't have a column, we can't build a condition for related values

File diff suppressed because it is too large Load Diff

View File

@@ -40,6 +40,7 @@ func TestGetKeys(t *testing.T) {
mockTelemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
@@ -48,6 +49,8 @@ func TestGetKeys(t *testing.T) {
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
DBName,
AttributesMetadataLocalTableName,
)
@@ -63,14 +66,14 @@ func TestGetKeys(t *testing.T) {
query := `SELECT.*`
mock.ExpectQuery(query).
WithArgs("%http.method%", telemetrytypes.FieldDataTypeString.TagDataType(), 10).
WithArgs("%http.method%", telemetrytypes.FieldDataTypeString.TagDataType(), 11).
WillReturnRows(cmock.NewRows([]cmock.ColumnType{
{Name: "tag_key", Type: "String"},
{Name: "tag_type", Type: "String"},
{Name: "tag_data_type", Type: "String"},
{Name: "priority", Type: "UInt8"},
}, [][]any{{"http.method", "tag", "String", 1}, {"http.method", "tag", "String", 1}}))
keys, err := metadata.GetKeys(context.Background(), &telemetrytypes.FieldKeySelector{
keys, _, err := metadata.GetKeys(context.Background(), &telemetrytypes.FieldKeySelector{
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,

View File

@@ -51,7 +51,7 @@ func (b *meterQueryStatementBuilder) Build(
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
keySelectors := telemetrymetrics.GetKeySelectors(query)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -141,6 +141,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
)
if query.Filter != nil && query.Filter.Expression != "" {
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,
@@ -223,6 +224,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
if query.Filter != nil && query.Filter.Expression != "" {
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,
@@ -286,6 +288,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
)
if query.Filter != nil && query.Filter.Expression != "" {
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,

View File

@@ -6,6 +6,7 @@ import (
"slices"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -28,6 +29,16 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
if err != nil {
return "", err

View File

@@ -133,6 +133,19 @@ func TestConditionFor(t *testing.T) {
expectedSQL: "",
expectedError: qbtypes.ErrInValues,
},
{
name: "Contains operator - string attribute",
key: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorContains,
value: 521509198310,
expectedSQL: "LOWER(JSONExtractString(labels, 'user.id')) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%"},
expectedError: nil,
},
{
name: "Not In operator - metric_name",
key: telemetrytypes.TelemetryFieldKey{

View File

@@ -67,6 +67,10 @@ func GetKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation])
for idx := range keySelectors {
keySelectors[idx].Signal = telemetrytypes.SignalMetrics
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
keySelectors[idx].MetricContext = &telemetrytypes.MetricContext{
MetricName: query.Aggregations[0].MetricName,
}
}
return keySelectors
}
@@ -80,7 +84,7 @@ func (b *MetricQueryStatementBuilder) Build(
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
keySelectors := GetKeySelectors(query)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -294,6 +298,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
if query.Filter != nil && query.Filter.Expression != "" {
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,

Some files were not shown because too many files have changed in this diff Show More