Compare commits

..

38 Commits

Author SHA1 Message Date
Manika Malhotra
15da283d0d Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-10-22 11:32:41 +05:30
Manika Malhotra
455ba0549f Merge branch 'main' into enhancement/cmd-click-stack 2025-10-22 11:32:29 +05:30
manika-signoz
5f2c302551 chore: extract isEventObject utility to separate file 2025-10-13 23:56:49 +05:30
manika-signoz
15c2dc700a chore: use requireActual 2025-10-13 23:41:21 +05:30
manika-signoz
63a4beb737 Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-10-13 23:26:05 +05:30
manika-signoz
02fa0dbc32 Merge branch 'main' into enhancement/cmd-click-stack 2025-10-13 23:25:35 +05:30
manika-signoz
e0948033c8 chore: re-export isEventObject utility from mocks 2025-10-13 23:24:28 +05:30
manika-signoz
8e02c73f0a Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-10-13 10:30:10 +05:30
manika-signoz
a1115ac65b Merge branch 'main' into enhancement/cmd-click-stack 2025-10-13 10:30:04 +05:30
manika-signoz
640482292d Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-10-09 17:03:19 +05:30
manika-signoz
9bcb88c747 Merge branch 'main' into enhancement/cmd-click-stack 2025-10-09 17:03:12 +05:30
manika-signoz
e4711ee79e Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-10-09 00:57:38 +05:30
manika-signoz
367bf7b4b5 Merge branch 'main' into enhancement/cmd-click-stack 2025-10-09 00:57:31 +05:30
manika-signoz
1d67461773 revert: top contributors and new explorer cta changes 2025-10-09 00:56:28 +05:30
manika-signoz
3510411464 Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-10-08 15:38:22 +05:30
manika-signoz
59b68057b8 Merge branch 'main' into enhancement/cmd-click-stack 2025-10-08 15:38:11 +05:30
manika-signoz
1cd2bdd0fc Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-10-07 23:07:30 +05:30
manika-signoz
fa1b2ddf7c Merge branch 'main' into enhancement/cmd-click-stack 2025-10-07 23:07:15 +05:30
manika-signoz
642a0e5656 fix: dashboardandalertspopover.test to use usesafenav mock 2025-09-30 19:31:34 +05:30
manika-signoz
cb99ee1ac1 fix: failing test cases due to isEventObject, add to mock 2025-09-30 19:16:54 +05:30
manika-signoz
018cab5d7e Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-09-30 17:43:06 +05:30
manika-signoz
7616cb89e4 Merge branch 'enhancement/cmd-click-stack' of github.com:SigNoz/signoz into enhancement/cmd-click-stack 2025-09-30 17:41:07 +05:30
manika-signoz
bf780c7445 chore: resolve comments, improve type safety in usesafenav 2025-09-30 17:40:17 +05:30
manika-signoz
61062dfd8d Merge branch 'main' into enhancement/cmd-click-stack 2025-09-30 17:08:38 +05:30
manika-signoz
f3f995420e Merge branch 'feat/cmd-click-impl-updated-history' of github.com:SigNoz/signoz into feat/cmd-click-impl-updated-history 2025-09-30 17:07:49 +05:30
manika-signoz
87971e50f8 chore: log details open in explorer 2025-09-30 17:07:14 +05:30
manika-signoz
24d7a72777 Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-09-29 10:40:38 +05:30
manika-signoz
5b7af9651c Merge branch 'main' into enhancement/cmd-click-stack 2025-09-29 10:40:22 +05:30
manika-signoz
50ff558195 Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-09-24 13:12:44 +05:30
manika-signoz
b9012f6150 Merge branch 'main' into enhancement/cmd-click-stack 2025-09-24 13:12:36 +05:30
manika-signoz
abb3ec3688 Merge branch 'enhancement/cmd-click-stack' into feat/cmd-click-impl-updated-history 2025-09-24 10:13:09 +05:30
manika-signoz
7ab81780b3 Merge branch 'main' into enhancement/cmd-click-stack 2025-09-24 10:06:51 +05:30
manika-signoz
8bf6c23740 feat: cmd click implementation using updated methods 2025-09-23 17:20:21 +05:30
manika-signoz
a16f51457f Merge branch 'main' into enhancement/cmd-click-stack 2025-09-23 16:32:51 +05:30
manika-signoz
38a38b5645 test: add tests for history.push 2025-09-23 16:30:16 +05:30
manika-signoz
bb04bc5044 Merge branch 'main' into enhancement/cmd-click-stack 2025-09-23 16:16:16 +05:30
manika-signoz
58736f40dc feat: add support for location object in history.push override 2025-09-22 19:05:42 +05:30
manika-signoz
91154249d6 feat: add history.push and safeNavigate method overrides 2025-09-22 18:55:52 +05:30
594 changed files with 51082 additions and 64710 deletions

View File

@@ -42,7 +42,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
container_name: schema-migrator-sync
command:
- sync
@@ -55,7 +55,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
container_name: schema-migrator-async
command:
- async

2
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# Owners are automatically requested for review for PRs that changes code
# that they own.
/frontend/ @YounixM @aks07
/frontend/ @SigNoz/frontend @YounixM
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv

View File

@@ -69,7 +69,6 @@ jobs:
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
@@ -108,6 +107,7 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'

View File

@@ -68,7 +68,6 @@ jobs:
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
@@ -107,6 +106,7 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'

View File

@@ -35,7 +35,6 @@ jobs:
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
- name: build-frontend
run: make js-build
- name: upload-frontend-artifact

View File

@@ -17,8 +17,6 @@ jobs:
- bootstrap
- passwordauthn
- callbackauthn
- cloudintegrations
- dashboard
- querier
- ttl
sqlstore-provider:

1
.gitignore vendored
View File

@@ -106,6 +106,7 @@ downloads/
eggs/
.eggs/
lib/
!frontend/src/lib/
lib64/
parts/
sdist/

View File

@@ -1,63 +1,39 @@
version: "2"
linters:
default: none
default: standard
enable:
- bodyclose
- depguard
- errcheck
- forbidigo
- govet
- iface
- ineffassign
- misspell
- nilnil
- sloglint
- depguard
- iface
- unparam
- unused
settings:
depguard:
rules:
noerrors:
deny:
- pkg: errors
desc: Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead.
nozap:
deny:
- pkg: go.uber.org/zap
desc: Do not use zap logger. Use slog instead.
forbidigo:
forbid:
- pattern: fmt.Errorf
- pattern: ^(fmt\.Print.*|print|println)$
iface:
enable:
- identical
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
key-naming-case: snake
exclusions:
generated: lax
presets:
- comments
- common-false-positives
- legacy
- std-error-handling
paths:
- pkg/query-service
- ee/query-service
- scripts/
- tmp/
- third_party$
- builtin$
- examples$
formatters:
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$
- forbidigo
linters-settings:
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
msg-style: lowercased
key-naming-case: snake
depguard:
rules:
nozap:
deny:
- pkg: "go.uber.org/zap"
desc: "Do not use zap logger. Use slog instead."
noerrors:
deny:
- pkg: "errors"
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
iface:
enable:
- identical
issues:
exclude-dirs:
- "pkg/query-service"
- "ee/query-service"
- "scripts/"

View File

@@ -84,9 +84,10 @@ go-run-enterprise: ## Runs the enterprise go backend server
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
go run -race \
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go server
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
--config ./conf/prometheus.yml \
--cluster cluster
.PHONY: go-test
go-test: ## Runs go unit tests
@@ -101,9 +102,10 @@ go-run-community: ## Runs the community go backend server
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
go run -race \
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
--config ./conf/prometheus.yml \
--cluster cluster
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
go-build-community: ## Builds the go backend server for community
@@ -206,4 +208,4 @@ py-lint: ## Run lint for integration tests
.PHONY: py-test
py-test: ## Runs integration tests
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/

View File

@@ -5,12 +5,9 @@ import (
"log/slog"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -79,9 +76,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -31,6 +31,7 @@ builds:
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
mod_timestamp: "{{ .CommitTimestamp }}"

View File

@@ -8,8 +8,6 @@ import (
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
@@ -19,7 +17,6 @@ import (
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
@@ -108,9 +105,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -47,10 +47,10 @@ cache:
provider: memory
# memory: Uses in-memory caching.
memory:
# Max items for the in-memory cache (10x the entries)
num_counters: 100000
# Total cost in bytes allocated bounded cache
max_cost: 67108864
# Time-to-live for cache entries in memory. Specify the duration in ns
ttl: 60000000000
# The interval at which the cache will be cleaned up
cleanup_interval: 1m
# redis: Uses Redis as the caching backend.
redis:
# The hostname or IP address of the Redis server.

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.103.0
image: signoz/signoz:v0.97.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -209,7 +209,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.12
image: signoz/signoz-otel-collector:v0.129.7
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -233,7 +233,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
deploy:
restart_policy:
condition: on-failure

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.103.0
image: signoz/signoz:v0.97.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -150,7 +150,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.12
image: signoz/signoz-otel-collector:v0.129.7
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -176,7 +176,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
deploy:
restart_policy:
condition: on-failure

View File

@@ -1,10 +1,3 @@
connectors:
signozmeter:
metrics_flush_interval: 1h
dimensions:
- name: service.name
- name: deployment.environment
- name: host.name
receivers:
otlp:
protocols:
@@ -28,10 +21,6 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
batch/meter:
send_batch_max_size: 25000
send_batch_size: 20000
timeout: 1s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
@@ -77,11 +66,6 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
signozclickhousemeter:
dsn: tcp://clickhouse:9000/signoz_meter
timeout: 45s
sending_queue:
enabled: false
service:
telemetry:
logs:
@@ -93,20 +77,16 @@ service:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces, signozmeter]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter, signozmeter]
metrics/meter:
receivers: [signozmeter]
processors: [batch/meter]
exporters: [signozclickhousemeter]
exporters: [clickhouselogsexporter]

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.103.0}
image: signoz/signoz:${VERSION:-v0.97.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -213,7 +213,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -239,7 +239,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-sync
command:
- sync
@@ -250,7 +250,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-async
command:
- async

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.103.0}
image: signoz/signoz:${VERSION:-v0.97.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-async
command:
- async

View File

@@ -1,10 +1,3 @@
connectors:
signozmeter:
metrics_flush_interval: 1h
dimensions:
- name: service.name
- name: deployment.environment
- name: host.name
receivers:
otlp:
protocols:
@@ -28,10 +21,6 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
batch/meter:
send_batch_max_size: 25000
send_batch_size: 20000
timeout: 1s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
@@ -77,11 +66,6 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
signozclickhousemeter:
dsn: tcp://clickhouse:9000/signoz_meter
timeout: 45s
sending_queue:
enabled: false
service:
telemetry:
logs:
@@ -93,20 +77,16 @@ service:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces, signozmeter]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter, signozmeter]
metrics/meter:
receivers: [signozmeter]
processors: [batch/meter]
exporters: [signozclickhousemeter]
exporters: [clickhouselogsexporter]

View File

@@ -103,19 +103,9 @@ Remember to replace the region and ingestion key with proper values as obtained
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
```sh
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
ENVOY_PORT=8081 docker compose up -d
```
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
```sh
docker compose up -d
```
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
```sh
docker compose ps -a

View File

@@ -48,26 +48,7 @@ func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
if err != nil {
return err
}

View File

@@ -15,18 +15,18 @@ type anonymous
type role
relations
define assignee: [user, anonymous]
define assignee: [user]
define read: [user, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
type metaresources
type resources
relations
define create: [user, role#assignee]
define list: [user, role#assignee]
type metaresource
type resource
relations
define read: [user, anonymous, role#assignee]
define update: [user, role#assignee]
@@ -35,6 +35,6 @@ type metaresource
define block: [user, role#assignee]
type telemetryresource
type telemetry
relations
define read: [user, role#assignee]
define read: [user, anonymous, role#assignee]

View File

@@ -1,10 +1,10 @@
package licensing
import (
"fmt"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
)
@@ -18,7 +18,7 @@ func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
once.Do(func() {
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
if err := config.Validate(); err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid licensing config"))
panic(fmt.Errorf("invalid licensing config: %w", err))
}
})

View File

@@ -20,10 +20,6 @@ import (
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -103,39 +99,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
// dashboards
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.CreatePublic)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.GetPublic)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.UpdatePublic)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.DeletePublic)).Methods(http.MethodDelete)
// public access for dashboards
router.HandleFunc("/api/v1/public/dashboards/{id}", am.CheckWithoutClaims(
ah.Signoz.Handlers.Dashboard.GetPublicData,
authtypes.RelationRead, authtypes.RelationRead,
dashboardtypes.TypeableMetaResourcePublicDashboard,
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
id, err := valuer.NewUUID(mux.Vars(req)["id"])
if err != nil {
return nil, valuer.UUID{}, err
}
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
})).Methods(http.MethodGet)
router.HandleFunc("/api/v1/public/dashboards/{id}/widgets/{index}/query_range", am.CheckWithoutClaims(
ah.Signoz.Handlers.Dashboard.GetPublicWidgetQueryRange,
authtypes.RelationRead, authtypes.RelationRead,
dashboardtypes.TypeableMetaResourcePublicDashboard,
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
id, err := valuer.NewUUID(mux.Vars(req)["id"])
if err != nil {
return nil, valuer.UUID{}, err
}
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
})).Methods(http.MethodGet)
// v3
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)

View File

@@ -10,6 +10,7 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -76,7 +77,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
@@ -185,37 +186,48 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
return cloudIntegrationUser, nil
}
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, string, *basemodel.ApiError,
) {
// TODO: remove this struct from here
url := fmt.Sprintf(
"%s%s",
strings.TrimSuffix(constants.ZeusURL, "/"),
"/v2/deployments/me",
)
type deploymentResponse struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
Status string `json:"status"`
Error string `json:"error"`
Data struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
} `json:"data"`
}
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
resp, apiErr := requestAndParseResponse[deploymentResponse](
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
)
if apiErr != nil {
return "", "", basemodel.WrapApiError(
apiErr, "couldn't query for deployment info",
)
}
if resp.Status != "success" {
return "", "", basemodel.InternalError(fmt.Errorf(
"couldn't query for deployment info: error: %w", err,
"couldn't query for deployment info: status: %s, error: %s",
resp.Status, resp.Error,
))
}
resp := new(deploymentResponse)
err = json.Unmarshal(respBytes, resp)
if err != nil {
return "", "", basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal deployment info response: error: %w", err,
))
}
regionDns := resp.ClusterInfo.Region.DNS
deploymentName := resp.Name
regionDns := resp.Data.ClusterInfo.Region.DNS
deploymentName := resp.Data.Name
if len(regionDns) < 1 || len(deploymentName) < 1 {
// Fail early if actual response structure and expectation here ever diverge

View File

@@ -9,7 +9,6 @@ import (
_ "net/http/pprof" // http profiler
"slices"
"github.com/SigNoz/signoz/pkg/cache/memorycache"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
@@ -75,26 +74,13 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
Memory: cache.Memory{
NumCounters: 10 * 10000,
MaxCost: 1 << 27, // 128 MB
},
})
if err != nil {
return nil, err
}
reader := clickhouseReader.NewReader(
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.TelemetryStore.Cluster(),
config.Querier.FluxInterval,
cacheForTraceDetail,
signoz.Cache,
nil,
)
rm, err := makeRulesManager(
@@ -206,7 +192,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
r.Use(otelmux.Middleware(
"apiserver",

View File

@@ -10,6 +10,9 @@ var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
// this is set via build time variable
var ZeusURL = "https://api.signoz.cloud"
func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key)
if len(v) == 0 {

View File

@@ -246,9 +246,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
continue
}
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
})
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
if err != nil {
return nil, err
}
@@ -298,9 +296,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
continue
}
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
})
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
if err != nil {
return nil, err
}
@@ -414,7 +410,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
GeneratorURL: r.GeneratorURL(),
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
Missing: smpl.IsMissing,
IsRecovering: smpl.IsRecovering,
}
}
@@ -427,9 +422,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
alert.Value = a.Value
alert.Annotations = a.Annotations
// Update the recovering and missing state of existing alert
alert.IsRecovering = a.IsRecovering
alert.Missing = a.Missing
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
alert.Receivers = ruleReceiverMap[v]
}
@@ -488,30 +480,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
Value: a.Value,
})
}
// We need to change firing alert to recovering if the returned sample meets recovery threshold
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
// We need to change recovering alerts to firing if the returned sample meets target threshold
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
// in any of the above case we need to update the status of alert
if changeFiringToRecovering || changeRecoveringToFiring {
state := model.StateRecovering
if changeRecoveringToFiring {
state = model.StateFiring
}
a.State = state
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
}
currentState := r.State()

View File

@@ -30,8 +30,6 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
return sqlschema.DataTypeBoolean
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
return sqlschema.DataTypeText
case "BYTEA":
return sqlschema.DataTypeBytea
}
return formatter.Formatter.DataTypeOf(dataType)

View File

@@ -2,7 +2,6 @@ package postgressqlschema
import (
"context"
"database/sql"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
@@ -48,45 +47,50 @@ func (provider *provider) Operator() sqlschema.SQLOperator {
}
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
columns := []struct {
ColumnName string `bun:"column_name"`
Nullable bool `bun:"nullable"`
SQLDataType string `bun:"udt_name"`
DefaultVal *string `bun:"column_default"`
}{}
err := provider.
rows, err := provider.
sqlstore.
BunDB().
NewRaw(`
QueryContext(ctx, `
SELECT
c.column_name,
c.is_nullable = 'YES' as nullable,
c.is_nullable = 'YES',
c.udt_name,
c.column_default
FROM
information_schema.columns AS c
WHERE
c.table_name = ?`, string(tableName)).
Scan(ctx, &columns)
c.table_name = ?`, string(tableName))
if err != nil {
return nil, nil, err
}
if len(columns) == 0 {
return nil, nil, sql.ErrNoRows
}
sqlschemaColumns := make([]*sqlschema.Column, 0)
for _, column := range columns {
columnDefault := ""
if column.DefaultVal != nil {
columnDefault = *column.DefaultVal
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
columns := make([]*sqlschema.Column, 0)
for rows.Next() {
var (
name string
sqlDataType string
nullable bool
defaultVal *string
)
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
return nil, nil, err
}
sqlschemaColumns = append(sqlschemaColumns, &sqlschema.Column{
Name: sqlschema.ColumnName(column.ColumnName),
Nullable: column.Nullable,
DataType: provider.fmter.DataTypeOf(column.SQLDataType),
columnDefault := ""
if defaultVal != nil {
columnDefault = *defaultVal
}
columns = append(columns, &sqlschema.Column{
Name: sqlschema.ColumnName(name),
Nullable: nullable,
DataType: provider.fmter.DataTypeOf(sqlDataType),
Default: columnDefault,
})
}
@@ -204,7 +208,7 @@ WHERE
return &sqlschema.Table{
Name: tableName,
Columns: sqlschemaColumns,
Columns: columns,
PrimaryKeyConstraint: primaryKeyConstraint,
ForeignKeyConstraints: foreignKeyConstraints,
}, uniqueConstraints, nil

View File

@@ -1,153 +0,0 @@
package postgressqlstore
import (
"strings"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun/schema"
)
type formatter struct {
bunf schema.Formatter
}
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
return &formatter{bunf: schema.NewFormatter(dialect)}
}
func (f *formatter) JSONExtractString(column, path string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgres(path)...)
return sql
}
func (f *formatter) JSONType(column, path string) []byte {
var sql []byte
sql = append(sql, "jsonb_typeof("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONIsArray(column, path string) []byte {
var sql []byte
sql = append(sql, f.JSONType(column, path)...)
sql = append(sql, " = "...)
sql = schema.Append(f.bunf, sql, "array")
return sql
}
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias)
}
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements_text("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, append([]byte(alias), "::text"...)
}
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_each("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, append([]byte(alias), ".key"...)
}
func (f *formatter) JSONArrayAgg(expression string) []byte {
var sql []byte
sql = append(sql, "jsonb_agg("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
var sql []byte
sql = append(sql, "jsonb_build_array("...)
for idx, value := range values {
if idx > 0 {
sql = append(sql, ", "...)
}
sql = schema.Append(f.bunf, sql, value)
}
sql = append(sql, ')')
return sql
}
func (f *formatter) TextToJsonColumn(column string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, "::jsonb"...)
return sql
}
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
}
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
if path == "" {
return nil
}
parts := strings.Split(path, ".")
var validParts []string
for _, part := range parts {
if part != "" {
validParts = append(validParts, part)
}
}
if len(validParts) == 0 {
return nil
}
var result []byte
for idx, part := range validParts {
if idx == len(validParts)-1 {
if asText {
result = append(result, "->>"...)
} else {
result = append(result, "->"...)
}
result = schema.Append(f.bunf, result, part)
return result
}
result = append(result, "->"...)
result = schema.Append(f.bunf, result, part)
}
return result
}
func (f *formatter) LowerExpression(expression string) []byte {
var sql []byte
sql = append(sql, "lower("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}

View File

@@ -1,500 +0,0 @@
package postgressqlstore
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/uptrace/bun/dialect/pgdialect"
)
func TestJSONExtractString(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `"data"->>'field'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.name",
expected: `"metadata"->'user'->>'name'`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.level1.level2.level3",
expected: `"json_col"->'level1'->'level2'->>'level3'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `"json_col"`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `"data"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONExtractString(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONType(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `jsonb_typeof("data"->'field')`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.age",
expected: `jsonb_typeof("metadata"->'user'->'age')`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col")`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `jsonb_typeof("data")`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONType(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONIsArray(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.items",
expected: `jsonb_typeof("data"->'items') = 'array'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.tags",
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col") = 'array'`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `jsonb_typeof("data") = 'array'`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONIsArray(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayElements(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "nested path",
column: "metadata",
path: "$.items",
alias: "item",
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.user.tags",
alias: "tag",
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayOfStrings(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "nested path",
column: "metadata",
path: "$.strings",
alias: "s",
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONKeys(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "nested path",
column: "metadata",
path: "$.object",
alias: "key",
expected: `jsonb_each("metadata"->'object') AS "key"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayAgg(t *testing.T) {
tests := []struct {
name string
expression string
expected string
}{
{
name: "simple column",
expression: "id",
expected: "jsonb_agg(id)",
},
{
name: "expression with function",
expression: "DISTINCT name",
expected: "jsonb_agg(DISTINCT name)",
},
{
name: "complex expression",
expression: "data->>'field'",
expected: "jsonb_agg(data->>'field')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayAgg(tt.expression))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayLiteral(t *testing.T) {
tests := []struct {
name string
values []string
expected string
}{
{
name: "empty array",
values: []string{},
expected: "jsonb_build_array()",
},
{
name: "single value",
values: []string{"value1"},
expected: "jsonb_build_array('value1')",
},
{
name: "multiple values",
values: []string{"value1", "value2", "value3"},
expected: "jsonb_build_array('value1', 'value2', 'value3')",
},
{
name: "values with special characters",
values: []string{"test", "with space", "with-dash"},
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayLiteral(tt.values...))
assert.Equal(t, tt.expected, got)
})
}
}
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
tests := []struct {
name string
jsonPath string
asText bool
expected string
}{
{
name: "simple path as text",
jsonPath: "$.field",
asText: true,
expected: "->>'field'",
},
{
name: "simple path as json",
jsonPath: "$.field",
asText: false,
expected: "->'field'",
},
{
name: "nested path as text",
jsonPath: "$.user.name",
asText: true,
expected: "->'user'->>'name'",
},
{
name: "nested path as json",
jsonPath: "$.user.name",
asText: false,
expected: "->'user'->'name'",
},
{
name: "deeply nested as text",
jsonPath: "$.a.b.c.d",
asText: true,
expected: "->'a'->'b'->'c'->>'d'",
},
{
name: "root path",
jsonPath: "$",
asText: true,
expected: "",
},
{
name: "empty path",
jsonPath: "",
asText: true,
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New()).(*formatter)
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
assert.Equal(t, tt.expected, got)
})
}
}
func TestTextToJsonColumn(t *testing.T) {
tests := []struct {
name string
column string
expected string
}{
{
name: "simple column name",
column: "data",
expected: `"data"::jsonb`,
},
{
name: "column with underscore",
column: "user_data",
expected: `"user_data"::jsonb`,
},
{
name: "column with special characters",
column: "json-col",
expected: `"json-col"::jsonb`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.TextToJsonColumn(tt.column))
assert.Equal(t, tt.expected, got)
})
}
}
func TestLowerExpression(t *testing.T) {
tests := []struct {
name string
expr string
expected string
}{
{
name: "simple column name",
expr: "name",
expected: "lower(name)",
},
{
name: "quoted column identifier",
expr: `"column_name"`,
expected: `lower("column_name")`,
},
{
name: "jsonb text extraction",
expr: "data->>'field'",
expected: "lower(data->>'field')",
},
{
name: "nested jsonb extraction",
expr: "metadata->'user'->>'name'",
expected: "lower(metadata->'user'->>'name')",
},
{
name: "jsonb_typeof expression",
expr: "jsonb_typeof(data->'field')",
expected: "lower(jsonb_typeof(data->'field'))",
},
{
name: "string concatenation",
expr: "first_name || ' ' || last_name",
expected: "lower(first_name || ' ' || last_name)",
},
{
name: "CAST expression",
expr: "CAST(value AS TEXT)",
expected: "lower(CAST(value AS TEXT))",
},
{
name: "COALESCE expression",
expr: "COALESCE(name, 'default')",
expected: "lower(COALESCE(name, 'default'))",
},
{
name: "subquery column",
expr: "users.email",
expected: "lower(users.email)",
},
{
name: "quoted identifier with special chars",
expr: `"user-name"`,
expected: `lower("user-name")`,
},
{
name: "jsonb to text cast",
expr: "data::text",
expected: "lower(data::text)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.LowerExpression(tt.expr))
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -15,11 +15,10 @@ import (
)
type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
formatter sqlstore.SQLFormatter
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
}
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
@@ -56,14 +55,11 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
sqldb := stdlib.OpenDBFromPool(pool)
pgDialect := pgdialect.New()
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
return &provider{
settings: settings,
sqldb: sqldb,
bundb: bunDB,
dialect: new(dialect),
formatter: newFormatter(bunDB.Dialect()),
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
dialect: new(dialect),
}, nil
}
@@ -79,10 +75,6 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}
func (provider *provider) Formatter() sqlstore.SQLFormatter {
return provider.formatter
}
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
return provider.bundb.BunDBCtx(ctx)
}

View File

@@ -1,10 +1,10 @@
package zeus
import (
"fmt"
neturl "net/url"
"sync"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/zeus"
)
@@ -24,17 +24,17 @@ func Config() zeus.Config {
once.Do(func() {
parsedURL, err := neturl.Parse(url)
if err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus URL"))
panic(fmt.Errorf("invalid zeus URL: %w", err))
}
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
if err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus deprecated URL"))
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
}
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
if err := config.Validate(); err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus config"))
panic(fmt.Errorf("invalid zeus config: %w", err))
}
})

View File

@@ -1,4 +1,6 @@
// Mock for useSafeNavigate hook to avoid React Router version conflicts in tests
export { isEventObject } from '../src/utils/isEventObject';
interface SafeNavigateOptions {
replace?: boolean;
state?: unknown;

View File

@@ -3,6 +3,5 @@ BUNDLE_ANALYSER="true"
FRONTEND_API_ENDPOINT="http://localhost:8080/"
PYLON_APP_ID="pylon-app-id"
APPCUES_APP_ID="appcess-app-id"
PYLON_IDENTITY_SECRET="pylon-identity-secret"
CI="1"

View File

@@ -38,7 +38,7 @@
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",
"@monaco-editor/react": "^4.3.1",
"@playwright/test": "1.55.1",
"@playwright/test": "1.54.1",
"@radix-ui/react-tabs": "1.0.4",
"@radix-ui/react-tooltip": "1.0.7",
"@sentry/react": "8.41.0",
@@ -69,7 +69,7 @@
"antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1",
"antlr4": "4.13.2",
"axios": "1.12.0",
"axios": "1.8.2",
"babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4",
"babel-loader": "9.1.3",
@@ -83,7 +83,6 @@
"color": "^4.2.1",
"color-alpha": "1.1.3",
"cross-env": "^7.0.3",
"crypto-js": "4.2.0",
"css-loader": "5.0.0",
"css-minimizer-webpack-plugin": "5.0.1",
"d3-hierarchy": "3.1.2",
@@ -113,7 +112,7 @@
"overlayscrollbars": "^2.8.1",
"overlayscrollbars-react": "^0.5.6",
"papaparse": "5.4.1",
"posthog-js": "1.298.0",
"posthog-js": "1.215.5",
"rc-tween-one": "3.0.6",
"react": "18.2.0",
"react-addons-update": "15.6.3",
@@ -150,6 +149,7 @@
"tsconfig-paths-webpack-plugin": "^3.5.1",
"typescript": "^4.0.5",
"uplot": "1.6.31",
"userpilot": "1.3.9",
"uuid": "^8.3.2",
"web-vitals": "^0.2.4",
"webpack": "5.94.0",
@@ -186,7 +186,6 @@
"@types/color": "^3.0.3",
"@types/compression-webpack-plugin": "^9.0.0",
"@types/copy-webpack-plugin": "^8.0.1",
"@types/crypto-js": "4.2.2",
"@types/dompurify": "^2.4.0",
"@types/event-source-polyfill": "^1.0.0",
"@types/fontfaceobserver": "2.1.0",
@@ -281,7 +280,6 @@
"got": "11.8.5",
"form-data": "4.0.4",
"brace-expansion": "^2.0.2",
"on-headers": "^1.1.0",
"tmp": "0.2.4"
"on-headers": "^1.1.0"
}
}

View File

@@ -7,12 +7,11 @@ import AppLoading from 'components/AppLoading/AppLoading';
import KBarCommandPalette from 'components/KBarCommandPalette/KBarCommandPalette';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import AppLayout from 'container/AppLayout';
import Hex from 'crypto-js/enc-hex';
import HmacSHA256 from 'crypto-js/hmac-sha256';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useThemeConfig } from 'hooks/useDarkMode';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
@@ -34,6 +33,7 @@ import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
import { CompatRouter } from 'react-router-dom-v5-compat';
import { LicenseStatus } from 'types/api/licensesV3/getActive';
import { Userpilot } from 'userpilot';
import { extractDomain } from 'utils/app';
import { Home } from './pageComponents';
@@ -84,9 +84,9 @@ function App(): JSX.Element {
email,
name: displayName,
company_name: orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
role,
@@ -94,9 +94,9 @@ function App(): JSX.Element {
const groupTraits = {
name: orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
};
@@ -111,23 +111,37 @@ function App(): JSX.Element {
if (window && window.Appcues) {
window.Appcues.identify(id, {
name: displayName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
companyName: orgName,
email,
paidUser: !!trialInfo?.trialConvertedToSubscription,
});
}
Userpilot.identify(email, {
email,
name: displayName,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.identify(id, {
email,
name: displayName,
orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
@@ -135,9 +149,9 @@ function App(): JSX.Element {
posthog?.group('company', orgId, {
name: orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
@@ -256,20 +270,11 @@ function App(): JSX.Element {
!showAddCreditCardModal &&
(isCloudUser || isEnterpriseSelfHostedUser)
) {
const email = user.email || '';
const secret = process.env.PYLON_IDENTITY_SECRET || '';
let emailHash = '';
if (email && secret) {
emailHash = HmacSHA256(email, Hex.parse(secret)).toString(Hex);
}
window.pylon = {
chat_settings: {
app_id: process.env.PYLON_APP_ID,
email: user.email,
name: user.displayName || user.email,
email_hash: emailHash,
name: user.displayName,
},
};
}
@@ -303,6 +308,10 @@ function App(): JSX.Element {
});
}
if (process.env.USERPILOT_KEY) {
Userpilot.initialize(process.env.USERPILOT_KEY);
}
if (!isSentryInitialized) {
Sentry.init({
dsn: process.env.SENTRY_DSN,
@@ -363,6 +372,7 @@ function App(): JSX.Element {
<Router history={history}>
<CompatRouter>
<KBarCommandPaletteProvider>
<UserpilotRouteTracker />
<KBarCommandPalette />
<NotificationProvider>
<ErrorModalProvider>

View File

@@ -1,8 +1,6 @@
import { LogEventAxiosInstance as axios } from 'api';
import getLocalStorageApi from 'api/browser/localstorage/get';
import { ApiBaseInstance as axios } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { EventSuccessPayloadProps } from 'types/api/events/types';
@@ -13,14 +11,9 @@ const logEvent = async (
rateLimited?: boolean,
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
try {
// add deployment_url and user_email to attributes
// add tenant_url to attributes
const { hostname } = window.location;
const userEmail = getLocalStorageApi(LOCALSTORAGE.LOGGED_IN_USER_EMAIL);
const updatedAttributes = {
...attributes,
deployment_url: hostname,
user_email: userEmail,
};
const updatedAttributes = { ...attributes, tenant_url: hostname };
const response = await axios.post('/event', {
eventName,
attributes: updatedAttributes,

View File

@@ -1,11 +1,13 @@
/* eslint-disable sonarjs/no-duplicate-string */
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { getFieldKeys } from '../getFieldKeys';
// Mock the API instance
jest.mock('api', () => ({
get: jest.fn(),
ApiBaseInstance: {
get: jest.fn(),
},
}));
describe('getFieldKeys API', () => {
@@ -29,33 +31,33 @@ describe('getFieldKeys API', () => {
it('should call API with correct parameters when no args provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call function with no parameters
await getFieldKeys();
// Verify API was called correctly with empty params object
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: {},
});
});
it('should call API with signal parameter when provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call function with signal parameter
await getFieldKeys('traces');
// Verify API was called with signal parameter
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { signal: 'traces' },
});
});
it('should call API with name parameter when provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -70,14 +72,14 @@ describe('getFieldKeys API', () => {
await getFieldKeys(undefined, 'service');
// Verify API was called with name parameter
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { name: 'service' },
});
});
it('should call API with both signal and name when provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -92,14 +94,14 @@ describe('getFieldKeys API', () => {
await getFieldKeys('logs', 'service');
// Verify API was called with both parameters
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { signal: 'logs', name: 'service' },
});
});
it('should return properly formatted response', async () => {
// Mock API to return our response
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call the function
const result = await getFieldKeys('traces');

View File

@@ -1,11 +1,13 @@
/* eslint-disable sonarjs/no-duplicate-string */
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { getFieldValues } from '../getFieldValues';
// Mock the API instance
jest.mock('api', () => ({
get: jest.fn(),
ApiBaseInstance: {
get: jest.fn(),
},
}));
describe('getFieldValues API', () => {
@@ -15,7 +17,7 @@ describe('getFieldValues API', () => {
it('should call the API with correct parameters (no options)', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -32,14 +34,14 @@ describe('getFieldValues API', () => {
await getFieldValues();
// Verify API was called correctly with empty params
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: {},
});
});
it('should call the API with signal parameter', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -56,14 +58,14 @@ describe('getFieldValues API', () => {
await getFieldValues('traces');
// Verify API was called with signal parameter
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { signal: 'traces' },
});
});
it('should call the API with name parameter', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -80,14 +82,14 @@ describe('getFieldValues API', () => {
await getFieldValues(undefined, 'service.name');
// Verify API was called with name parameter
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { name: 'service.name' },
});
});
it('should call the API with value parameter', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -104,14 +106,14 @@ describe('getFieldValues API', () => {
await getFieldValues(undefined, 'service.name', 'front');
// Verify API was called with value parameter
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { name: 'service.name', searchText: 'front' },
});
});
it('should call the API with time range parameters', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -136,7 +138,7 @@ describe('getFieldValues API', () => {
);
// Verify API was called with time range parameters (converted to milliseconds)
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: {
signal: 'logs',
name: 'service.name',
@@ -163,7 +165,7 @@ describe('getFieldValues API', () => {
},
};
(axios.get as jest.Mock).mockResolvedValueOnce(mockResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
// Call the function
const result = await getFieldValues('traces', 'mixed.values');
@@ -194,7 +196,7 @@ describe('getFieldValues API', () => {
};
// Mock API to return our response
(axios.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
// Call the function
const result = await getFieldValues('traces', 'service.name');

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
@@ -24,7 +24,7 @@ export const getFieldKeys = async (
}
try {
const response = await axios.get('/fields/keys', { params });
const response = await ApiBaseInstance.get('/fields/keys', { params });
return {
httpStatusCode: response.status,

View File

@@ -1,5 +1,5 @@
/* eslint-disable sonarjs/cognitive-complexity */
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
@@ -47,7 +47,7 @@ export const getFieldValues = async (
}
try {
const response = await axios.get('/fields/values', { params });
const response = await ApiBaseInstance.get('/fields/values', { params });
// Normalize values from different types (stringValues, boolValues, etc.)
if (response.data?.data?.values) {

View File

@@ -86,9 +86,8 @@ const interceptorRejected = async (
if (
response.status === 401 &&
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
response.config.url !== '/sessions/rotate' &&
response.config.url !== '/sessions/email_password' &&
!(
response.config.url === '/sessions' && response.config.method === 'delete'
)
@@ -200,15 +199,15 @@ ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
//
// axios Base
export const LogEventAxiosInstance = axios.create({
export const ApiBaseInstance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
});
LogEventAxiosInstance.interceptors.response.use(
ApiBaseInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejectedBase,
);
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError, AxiosResponse } from 'axios';
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
@@ -17,7 +17,7 @@ export const getHostAttributeKeys = async (
try {
const response: AxiosResponse<{
data: IQueryAutocompleteResponse;
}> = await axios.get(
}> = await ApiBaseInstance.get(
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
{
params: {

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { SOMETHING_WENT_WRONG } from 'constants/api';
@@ -20,7 +20,7 @@ const getOnboardingStatus = async (props: {
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
const { endpointService, ...rest } = props;
try {
const response = await axios.post(
const response = await ApiBaseInstance.post(
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
rest,
);

View File

@@ -1,20 +1,13 @@
import { ApiV2Instance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import axios from 'api';
import { PayloadProps, Props } from 'types/api/metrics/getService';
const getService = async (props: Props): Promise<PayloadProps> => {
try {
const response = await ApiV2Instance.post(`/services`, {
start: `${props.start}`,
end: `${props.end}`,
tags: props.selectedTags,
});
return response.data.data;
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
const response = await axios.post(`/services`, {
start: `${props.start}`,
end: `${props.end}`,
tags: props.selectedTags,
});
return response.data;
};
export default getService;

View File

@@ -1,27 +1,22 @@
import { ApiV2Instance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import axios from 'api';
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
try {
const endpoint = props.isEntryPoint
? '/service/entry_point_operations'
: '/service/top_operations';
const endpoint = props.isEntryPoint
? '/service/entry_point_operations'
: '/service/top_operations';
const response = await ApiV2Instance.post(endpoint, {
start: `${props.start}`,
end: `${props.end}`,
service: props.service,
tags: props.selectedTags,
limit: 5000,
});
const response = await axios.post(endpoint, {
start: `${props.start}`,
end: `${props.end}`,
service: props.service,
tags: props.selectedTags,
});
if (props.isEntryPoint) {
return response.data.data;
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
return response.data;
};
export default getTopOperations;

View File

@@ -9,7 +9,6 @@ export interface UpdateMetricMetadataProps {
metricType: MetricType;
temporality?: Temporality;
isMonotonic?: boolean;
unit?: string;
}
export interface UpdateMetricMetadataResponse {

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
@@ -9,7 +9,7 @@ const getCustomFilters = async (
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const { signal } = props;
try {
const response = await axios.get(`/orgs/me/filters/${signal}`);
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
return {
statusCode: 200,

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { AxiosError } from 'axios';
import { SuccessResponse } from 'types/api';
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
@@ -6,7 +6,7 @@ import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFil
const updateCustomFiltersAPI = async (
props: UpdateCustomFiltersProps,
): Promise<SuccessResponse<void> | AxiosError> =>
axios.put(`/orgs/me/filters`, {
ApiBaseInstance.put(`orgs/me/filters`, {
...props.data,
});

View File

@@ -8,7 +8,7 @@ const setRetentionV2 = async ({
type,
defaultTTLDays,
coldStorageVolume,
coldStorageDurationDays,
coldStorageDuration,
ttlConditions,
}: PropsV2): Promise<SuccessResponseV2<PayloadPropsV2>> => {
try {
@@ -16,7 +16,7 @@ const setRetentionV2 = async ({
type,
defaultTTLDays,
coldStorageVolume,
coldStorageDurationDays,
coldStorageDuration,
ttlConditions,
});

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
@@ -9,12 +9,15 @@ const listOverview = async (
): Promise<SuccessResponseV2<PayloadProps>> => {
const { start, end, show_ip: showIp, filter } = props;
try {
const response = await axios.post(`/third-party-apis/overview/list`, {
start,
end,
show_ip: showIp,
filter,
});
const response = await ApiBaseInstance.post(
`/third-party-apis/overview/list`,
{
start,
end,
show_ip: showIp,
filter,
},
);
return {
httpStatusCode: response.status,

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import {
GetSpanPercentilesProps,
GetSpanPercentilesResponseDataProps,
} from 'types/api/trace/getSpanPercentiles';
const getSpanPercentiles = async (
props: GetSpanPercentilesProps,
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
try {
const response = await axios.post('/span_percentile', {
...props,
});
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};
export default getSpanPercentiles;

View File

@@ -11,7 +11,7 @@ import {
export const getQueryRangeV5 = async (
props: QueryRangePayloadV5,
version: string,
signal?: AbortSignal,
signal: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
try {

View File

@@ -1,30 +1,30 @@
interface ConfigureIconProps {
width?: number;
height?: number;
color?: string;
fill?: string;
}
function ConfigureIcon({
width,
height,
color,
fill,
}: ConfigureIconProps): JSX.Element {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={width}
height={height}
fill="none"
fill={fill}
>
<path
stroke={color}
stroke="#C0C1C3"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="1.333"
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
/>
<path
stroke={color}
stroke="#C0C1C3"
strokeLinecap="round"
strokeWidth="1.333"
d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3m5.417 7.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"
@@ -36,6 +36,6 @@ function ConfigureIcon({
ConfigureIcon.defaultProps = {
width: 16,
height: 16,
color: 'currentColor',
fill: 'none',
};
export default ConfigureIcon;

View File

@@ -1,371 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { getYAxisFormattedValue, PrecisionOptionsEnum } from '../yAxisConfig';
const testFullPrecisionGetYAxisFormattedValue = (
value: string,
format: string,
): string => getYAxisFormattedValue(value, format, PrecisionOptionsEnum.FULL);
describe('getYAxisFormattedValue - none (full precision legacy assertions)', () => {
test('large integers and decimals', () => {
expect(testFullPrecisionGetYAxisFormattedValue('250034', 'none')).toBe(
'250034',
);
expect(
testFullPrecisionGetYAxisFormattedValue('250034897.12345', 'none'),
).toBe('250034897.12345');
expect(
testFullPrecisionGetYAxisFormattedValue('250034897.02354', 'none'),
).toBe('250034897.02354');
expect(testFullPrecisionGetYAxisFormattedValue('9999999.9999', 'none')).toBe(
'9999999.9999',
);
});
test('preserves leading zeros after decimal until first non-zero', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1.0000234', 'none')).toBe(
'1.0000234',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.00003', 'none')).toBe(
'0.00003',
);
});
test('trims to three significant decimals and removes trailing zeros', () => {
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'none'),
).toBe('0.000000250034');
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'none')).toBe(
'0.00000025',
);
// Big precision, limiting the javascript precision (~16 digits)
expect(
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'none'),
).toBe('1');
expect(
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'none'),
).toBe('1.005555555595958');
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
'0.000000001',
);
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250000', 'none'),
).toBe('0.00000025');
});
test('whole numbers normalize', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'none')).toBe('1000');
expect(testFullPrecisionGetYAxisFormattedValue('99.5458', 'none')).toBe(
'99.5458',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.234567', 'none')).toBe(
'1.234567',
);
expect(testFullPrecisionGetYAxisFormattedValue('99.998', 'none')).toBe(
'99.998',
);
});
test('strip redundant decimal zeros', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000.000', 'none')).toBe(
'1000',
);
expect(testFullPrecisionGetYAxisFormattedValue('99.500', 'none')).toBe(
'99.5',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.000', 'none')).toBe('1');
});
test('edge values', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'none')).toBe('0');
expect(testFullPrecisionGetYAxisFormattedValue('-0', 'none')).toBe('0');
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'none')).toBe('∞');
expect(testFullPrecisionGetYAxisFormattedValue('-Infinity', 'none')).toBe(
'-∞',
);
expect(testFullPrecisionGetYAxisFormattedValue('invalid', 'none')).toBe(
'NaN',
);
expect(testFullPrecisionGetYAxisFormattedValue('', 'none')).toBe('NaN');
expect(testFullPrecisionGetYAxisFormattedValue('abc123', 'none')).toBe('NaN');
});
test('small decimals keep precision as-is', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'none')).toBe(
'0.0001',
);
expect(testFullPrecisionGetYAxisFormattedValue('-0.0001', 'none')).toBe(
'-0.0001',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
'0.000000001',
);
});
test('simple decimals preserved', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.1', 'none')).toBe('0.1');
expect(testFullPrecisionGetYAxisFormattedValue('0.2', 'none')).toBe('0.2');
expect(testFullPrecisionGetYAxisFormattedValue('0.3', 'none')).toBe('0.3');
expect(testFullPrecisionGetYAxisFormattedValue('1.0000000001', 'none')).toBe(
'1.0000000001',
);
});
});
describe('getYAxisFormattedValue - units (full precision legacy assertions)', () => {
test('ms', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'ms')).toBe('1.5 s');
expect(testFullPrecisionGetYAxisFormattedValue('500', 'ms')).toBe('500 ms');
expect(testFullPrecisionGetYAxisFormattedValue('60000', 'ms')).toBe('1 min');
expect(testFullPrecisionGetYAxisFormattedValue('295.429', 'ms')).toBe(
'295.429 ms',
);
expect(testFullPrecisionGetYAxisFormattedValue('4353.81', 'ms')).toBe(
'4.35381 s',
);
});
test('s', () => {
expect(testFullPrecisionGetYAxisFormattedValue('90', 's')).toBe('1.5 mins');
expect(testFullPrecisionGetYAxisFormattedValue('30', 's')).toBe('30 s');
expect(testFullPrecisionGetYAxisFormattedValue('3600', 's')).toBe('1 hour');
});
test('m', () => {
expect(testFullPrecisionGetYAxisFormattedValue('90', 'm')).toBe('1.5 hours');
expect(testFullPrecisionGetYAxisFormattedValue('30', 'm')).toBe('30 min');
expect(testFullPrecisionGetYAxisFormattedValue('1440', 'm')).toBe('1 day');
});
test('bytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'bytes')).toBe(
'1 KiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'bytes')).toBe('512 B');
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'bytes')).toBe(
'1.5 KiB',
);
});
test('mbytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'mbytes')).toBe(
'1 GiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'mbytes')).toBe(
'512 MiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'mbytes')).toBe(
'1.5 GiB',
);
});
test('kbytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'kbytes')).toBe(
'1 MiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'kbytes')).toBe(
'512 KiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'kbytes')).toBe(
'1.5 MiB',
);
});
test('short', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'short')).toBe('1 K');
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'short')).toBe(
'1.5 K',
);
expect(testFullPrecisionGetYAxisFormattedValue('999', 'short')).toBe('999');
expect(testFullPrecisionGetYAxisFormattedValue('1000000', 'short')).toBe(
'1 Mil',
);
expect(testFullPrecisionGetYAxisFormattedValue('1555600', 'short')).toBe(
'1.5556 Mil',
);
expect(testFullPrecisionGetYAxisFormattedValue('999999', 'short')).toBe(
'999.999 K',
);
expect(testFullPrecisionGetYAxisFormattedValue('1000000000', 'short')).toBe(
'1 Bil',
);
expect(testFullPrecisionGetYAxisFormattedValue('1500000000', 'short')).toBe(
'1.5 Bil',
);
expect(testFullPrecisionGetYAxisFormattedValue('999999999', 'short')).toBe(
'999.999999 Mil',
);
});
test('percent', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.15', 'percent')).toBe(
'0.15%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.1234', 'percent')).toBe(
'0.1234%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.123499', 'percent')).toBe(
'0.123499%',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.5', 'percent')).toBe(
'1.5%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'percent')).toBe(
'0.0001%',
);
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000001', 'percent'),
).toBe('1e-9%');
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'percent'),
).toBe('0.000000250034%');
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'percent')).toBe(
'0.00000025%',
);
// Big precision, limiting the javascript precision (~16 digits)
expect(
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'percent'),
).toBe('1%');
expect(
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'percent'),
).toBe('1.005555555595959%');
});
test('ratio', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.5', 'ratio')).toBe(
'0.5 ratio',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.25', 'ratio')).toBe(
'1.25 ratio',
);
expect(testFullPrecisionGetYAxisFormattedValue('2.0', 'ratio')).toBe(
'2 ratio',
);
});
test('temperature units', () => {
expect(testFullPrecisionGetYAxisFormattedValue('25', 'celsius')).toBe(
'25 °C',
);
expect(testFullPrecisionGetYAxisFormattedValue('0', 'celsius')).toBe('0 °C');
expect(testFullPrecisionGetYAxisFormattedValue('-10', 'celsius')).toBe(
'-10 °C',
);
expect(testFullPrecisionGetYAxisFormattedValue('77', 'fahrenheit')).toBe(
'77 °F',
);
expect(testFullPrecisionGetYAxisFormattedValue('32', 'fahrenheit')).toBe(
'32 °F',
);
expect(testFullPrecisionGetYAxisFormattedValue('14', 'fahrenheit')).toBe(
'14 °F',
);
});
test('ms edge cases', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'ms')).toBe('0 ms');
expect(testFullPrecisionGetYAxisFormattedValue('-1500', 'ms')).toBe('-1.5 s');
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'ms')).toBe('∞');
});
test('bytes edge cases', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'bytes')).toBe('0 B');
expect(testFullPrecisionGetYAxisFormattedValue('-1024', 'bytes')).toBe(
'-1 KiB',
);
});
});
describe('getYAxisFormattedValue - precision option tests', () => {
test('precision 0 drops decimal part', () => {
expect(getYAxisFormattedValue('1.2345', 'none', 0)).toBe('1');
expect(getYAxisFormattedValue('0.9999', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('12345.6789', 'none', 0)).toBe('12345');
expect(getYAxisFormattedValue('0.0000123456', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('1000.000', 'none', 0)).toBe('1000');
expect(getYAxisFormattedValue('0.000000250034', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 0)).toBe('1');
// with unit
expect(getYAxisFormattedValue('4353.81', 'ms', 0)).toBe('4 s');
});
test('precision 1,2,3,4 decimals', () => {
expect(getYAxisFormattedValue('1.2345', 'none', 1)).toBe('1.2');
expect(getYAxisFormattedValue('1.2345', 'none', 2)).toBe('1.23');
expect(getYAxisFormattedValue('1.2345', 'none', 3)).toBe('1.234');
expect(getYAxisFormattedValue('1.2345', 'none', 4)).toBe('1.2345');
expect(getYAxisFormattedValue('0.0000123456', 'none', 1)).toBe('0.00001');
expect(getYAxisFormattedValue('0.0000123456', 'none', 2)).toBe('0.000012');
expect(getYAxisFormattedValue('0.0000123456', 'none', 3)).toBe('0.0000123');
expect(getYAxisFormattedValue('0.0000123456', 'none', 4)).toBe('0.00001234');
expect(getYAxisFormattedValue('1000.000', 'none', 1)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 2)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 3)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 4)).toBe('1000');
expect(getYAxisFormattedValue('0.000000250034', 'none', 1)).toBe('0.0000002');
expect(getYAxisFormattedValue('0.000000250034', 'none', 2)).toBe(
'0.00000025',
); // leading zeros + 2 significant => same trimmed
expect(getYAxisFormattedValue('0.000000250034', 'none', 3)).toBe(
'0.00000025',
);
expect(getYAxisFormattedValue('0.000000250304', 'none', 4)).toBe(
'0.0000002503',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 1)).toBe(
'1.005',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 2)).toBe(
'1.0055',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 3)).toBe(
'1.00555',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 4)).toBe(
'1.005555',
);
// with unit
expect(getYAxisFormattedValue('4353.81', 'ms', 1)).toBe('4.4 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 2)).toBe('4.35 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 3)).toBe('4.354 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 4)).toBe('4.3538 s');
// Percentages
expect(getYAxisFormattedValue('0.123456', 'percent', 2)).toBe('0.12%');
expect(getYAxisFormattedValue('0.123456', 'percent', 4)).toBe('0.1235%'); // approximation
});
test('precision full uses up to DEFAULT_SIGNIFICANT_DIGITS significant digits', () => {
expect(
getYAxisFormattedValue(
'0.00002625429914148441',
'none',
PrecisionOptionsEnum.FULL,
),
).toBe('0.000026254299141');
expect(
getYAxisFormattedValue(
'0.000026254299141484417',
's',
PrecisionOptionsEnum.FULL,
),
).toBe('26.254299141484417 µs');
expect(
getYAxisFormattedValue('4353.81', 'ms', PrecisionOptionsEnum.FULL),
).toBe('4.35381 s');
expect(getYAxisFormattedValue('500', 'ms', PrecisionOptionsEnum.FULL)).toBe(
'500 ms',
);
});
});

View File

@@ -149,7 +149,6 @@ export const getGraphOptions = (
scales: {
x: {
stacked: isStacked,
offset: false,
grid: {
display: true,
color: getGridColor(),

View File

@@ -1,154 +1,58 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { formattedValueToString, getValueFormat } from '@grafana/data';
import * as Sentry from '@sentry/react';
import { isNaN } from 'lodash-es';
const DEFAULT_SIGNIFICANT_DIGITS = 15;
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const MAX_DECIMALS = 15;
export enum PrecisionOptionsEnum {
ZERO = 0,
ONE = 1,
TWO = 2,
THREE = 3,
FOUR = 4,
FULL = 'full',
}
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
/**
* Formats a number for display, preserving leading zeros after the decimal point
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
* It avoids scientific notation and removes unnecessary trailing zeros.
*
* @example
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
* formatDecimalWithLeadingZeros(5.0); // "5"
*
* @param value The number to format.
* @returns The formatted string.
*/
const formatDecimalWithLeadingZeros = (
value: number,
precision: PrecisionOption,
): string => {
if (value === 0) {
return '0';
}
// Use toLocaleString to get a full decimal representation without scientific notation.
const numStr = value.toLocaleString('en-US', {
useGrouping: false,
maximumFractionDigits: 20,
});
const [integerPart, decimalPart = ''] = numStr.split('.');
// If there's no decimal part, the integer part is the result.
if (!decimalPart) {
return integerPart;
}
// Find the index of the first non-zero digit in the decimal part.
const firstNonZeroIndex = decimalPart.search(/[^0]/);
// If the decimal part consists only of zeros, return just the integer part.
if (firstNonZeroIndex === -1) {
return integerPart;
}
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
const significantDigits =
precision === PrecisionOptionsEnum.FULL
? DEFAULT_SIGNIFICANT_DIGITS
: precision;
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
// If precision is 0, we drop the decimal part entirely.
if (precision === 0) {
return integerPart;
}
// Remove any trailing zeros from the result to keep it clean.
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
// Return the integer part, or the integer and decimal parts combined.
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
};
/**
* Formats a Y-axis value based on a given format string.
*
* @param value The string value from the axis.
* @param format The format identifier (e.g. 'none', 'ms', 'bytes', 'short').
* @returns A formatted string ready for display.
*/
export const getYAxisFormattedValue = (
value: string,
format: string,
precision: PrecisionOption = 2, // default precision requested
): string => {
const numValue = parseFloat(value);
// Handle non-numeric or special values first.
if (isNaN(numValue)) return 'NaN';
if (numValue === Infinity) return '∞';
if (numValue === -Infinity) return '-∞';
// For all other standard formats, delegate to grafana/data's built-in formatter.
const computeDecimals = (): number | undefined => {
if (precision === PrecisionOptionsEnum.FULL) {
return DEFAULT_SIGNIFICANT_DIGITS;
}
return precision;
};
const fallbackFormat = (): string => {
if (precision === PrecisionOptionsEnum.FULL) return numValue.toString();
if (precision === 0) return Math.round(numValue).toString();
return precision !== undefined
? numValue
.toFixed(precision)
.replace(/(\.[0-9]*[1-9])0+$/, '$1') // trimming zeros
.replace(/\.$/, '')
: numValue.toString();
};
let decimalPrecision: number | undefined;
const parsedValue = getValueFormat(format)(
parseFloat(value),
undefined,
undefined,
undefined,
);
try {
// Use custom formatter for the 'none' format honoring precision
if (format === 'none') {
return formatDecimalWithLeadingZeros(numValue, precision);
const decimalSplitted = parsedValue.text.split('.');
if (decimalSplitted.length === 1) {
decimalPrecision = 0;
} else {
const decimalDigits = decimalSplitted[1].split('');
decimalPrecision = decimalDigits.length;
let nonZeroCtr = 0;
for (let idx = 0; idx < decimalDigits.length; idx += 1) {
if (decimalDigits[idx] !== '0') {
nonZeroCtr += 1;
if (nonZeroCtr >= 2) {
decimalPrecision = idx + 1;
}
} else if (nonZeroCtr) {
decimalPrecision = idx;
break;
}
}
}
const formatter = getValueFormat(format);
const formattedValue = formatter(numValue, computeDecimals(), undefined);
if (formattedValue.text && formattedValue.text.includes('.')) {
formattedValue.text = formatDecimalWithLeadingZeros(
parseFloat(formattedValue.text),
precision,
);
}
return formattedValueToString(formattedValue);
return formattedValueToString(
getValueFormat(format)(
parseFloat(value),
decimalPrecision,
undefined,
undefined,
),
);
} catch (error) {
Sentry.captureEvent({
message: `Error applying formatter: ${
error instanceof Error ? error.message : 'Unknown error'
}`,
level: 'error',
});
return fallbackFormat();
console.error(error);
}
return `${parseFloat(value)}`;
};
export const getToolTipValue = (
value: string | number,
format?: string,
precision?: PrecisionOption,
): string =>
getYAxisFormattedValue(value?.toString(), format || 'none', precision);
export const getToolTipValue = (value: string, format?: string): string => {
try {
return formattedValueToString(
getValueFormat(format)(parseFloat(value), undefined, undefined, undefined),
);
} catch (error) {
console.error(error);
}
return `${value}`;
};

View File

@@ -60,14 +60,6 @@ function Metrics({
setElement,
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const queryPayloads = useMemo(
() =>
getHostQueryPayload(
@@ -155,13 +147,6 @@ function Metrics({
maxTimeScale: graphTimeIntervals[idx].end,
onDragSelect: (start, end) => onDragSelect(start, end, idx),
query: currentQuery,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
}),
),
[

View File

@@ -37,6 +37,7 @@
border-radius: 2px 0px 0px 2px;
border: 1px solid var(--bg-slate-400);
background: var(--bg-ink-300);
border-right: none;
border-left: none;
@@ -44,12 +45,6 @@
border-bottom-right-radius: 0px;
border-top-left-radius: 0px;
border-bottom-left-radius: 0px;
font-size: 12px !important;
line-height: 27px;
&::placeholder {
color: var(--bg-vanilla-400) !important;
font-size: 12px !important;
}
}
.close-btn {

View File

@@ -132,9 +132,9 @@
justify-content: center;
}
.log-detail-drawer__actions {
.json-action-btn {
display: flex;
gap: 4px;
gap: 8px;
}
}

View File

@@ -133,7 +133,7 @@ function LogDetailInner({
};
// Go to logs explorer page with the log data
const handleOpenInExplorer = (): void => {
const handleOpenInExplorer = (event: React.MouseEvent): void => {
const queryParams = {
[QueryParams.activeLogId]: `"${log?.id}"`,
[QueryParams.startTime]: minTime?.toString() || '',
@@ -146,7 +146,10 @@ function LogDetailInner({
),
),
};
safeNavigate(`${ROUTES.LOGS_EXPLORER}?${createQueryParams(queryParams)}`);
safeNavigate(
`${ROUTES.LOGS_EXPLORER}?${createQueryParams(queryParams)}`,
event,
);
};
const handleQueryExpressionChange = useCallback(
@@ -239,7 +242,7 @@ function LogDetailInner({
<Button
className="open-in-explorer-btn"
icon={<Compass size={16} />}
onClick={handleOpenInExplorer}
onClick={(event: React.MouseEvent): void => handleOpenInExplorer(event)}
>
Open in Explorer
</Button>
@@ -319,35 +322,31 @@ function LogDetailInner({
</Radio.Button>
</Radio.Group>
<div className="log-detail-drawer__actions">
{selectedView === VIEW_TYPES.CONTEXT && (
<Tooltip
title="Show Filters"
placement="topLeft"
aria-label="Show Filters"
>
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
</Tooltip>
)}
<Tooltip
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
placement="topLeft"
aria-label={
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
}
>
{selectedView === VIEW_TYPES.JSON && (
<div className="json-action-btn">
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
onClick={handleJSONCopy}
/>
</Tooltip>
</div>
</div>
)}
{selectedView === VIEW_TYPES.CONTEXT && (
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
)}
<Tooltip title="Copy Log Link" placement="left" aria-label="Copy Log Link">
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={onLogCopy}
/>
</Tooltip>
</div>
{isFilterVisible && contextQuery?.builder.queryData[0] && (
<div className="log-detail-drawer-query-container">
@@ -387,8 +386,7 @@ function LogDetailInner({
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
timestamp={log.timestamp.toString()}
dataSource={DataSource.LOGS}
logLineTimestamp={log.timestamp.toString()}
/>
)}
</Drawer>

View File

@@ -6,7 +6,6 @@ import { useCopyToClipboard } from 'react-use';
function CopyClipboardHOC({
entityKey,
textToCopy,
tooltipText = 'Copy to clipboard',
children,
}: CopyClipboardHOCProps): JSX.Element {
const [value, setCopy] = useCopyToClipboard();
@@ -32,7 +31,7 @@ function CopyClipboardHOC({
<span onClick={onClick} role="presentation" tabIndex={-1}>
<Popover
placement="top"
content={<span style={{ fontSize: '0.9rem' }}>{tooltipText}</span>}
content={<span style={{ fontSize: '0.9rem' }}>Copy to clipboard</span>}
>
{children}
</Popover>
@@ -43,11 +42,7 @@ function CopyClipboardHOC({
interface CopyClipboardHOCProps {
entityKey: string | undefined;
textToCopy: string;
tooltipText?: string;
children: ReactNode;
}
export default CopyClipboardHOC;
CopyClipboardHOC.defaultProps = {
tooltipText: 'Copy to clipboard',
};

View File

@@ -57,8 +57,8 @@ export const RawLogViewContainer = styled(Row)<{
transition: background-color 2s ease-in;`
: ''}
${({ $isCustomHighlighted }): string =>
getCustomHighlightBackground($isCustomHighlighted)}
${({ $isCustomHighlighted, $isDarkMode, $logType }): string =>
getCustomHighlightBackground($isCustomHighlighted, $isDarkMode, $logType)}
`;
export const InfoIconWrapper = styled(Info)`

View File

@@ -153,9 +153,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
children: (
<TableBodyContent
dangerouslySetInnerHTML={{
__html: getSanitizedLogBody(field as string, {
shouldEscapeHtml: true,
}),
__html: getSanitizedLogBody(field as string),
}}
fontSize={fontSize}
linesPerRow={linesPerRow}

View File

@@ -471,13 +471,11 @@ function LogsFormatOptionsMenu({
rootClassName="format-options-popover"
destroyTooltipOnHide
>
<Tooltip title="Options">
<Button
className="periscope-btn ghost"
icon={<Sliders size={14} />}
data-testid="periscope-btn-format-options"
/>
</Tooltip>
<Button
className="periscope-btn ghost"
icon={<Sliders size={14} />}
data-testid="periscope-btn-format-options"
/>
</Popover>
);
}

View File

@@ -32,7 +32,6 @@ import { popupContainer } from 'utils/selectPopupContainer';
import { CustomMultiSelectProps, CustomTagProps, OptionData } from './types';
import {
ALL_SELECTED_VALUE,
filterOptionsBySearch,
handleScrollToBottom,
prioritizeOrAddOptionForMultiSelect,
@@ -44,6 +43,8 @@ enum ToggleTagValue {
All = 'All',
}
const ALL_SELECTED_VALUE = '__ALL__'; // Constant for the special value
const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
placeholder = 'Search...',
className,

View File

@@ -5,8 +5,6 @@ import { OptionData } from './types';
export const SPACEKEY = ' ';
export const ALL_SELECTED_VALUE = '__ALL__'; // Constant for the special value
export const prioritizeOrAddOptionForSingleSelect = (
options: OptionData[],
value: string,

View File

@@ -251,10 +251,6 @@
.ant-input-group-addon {
border-top-left-radius: 0px !important;
border-top-right-radius: 0px !important;
background: var(--bg-ink-300);
color: var(--bg-vanilla-400);
font-size: 12px;
font-weight: 300;
}
.ant-input {
@@ -300,10 +296,6 @@
}
}
.qb-trace-operator-button-container {
display: flex;
align-items: center;
gap: 8px;
&-text {
display: flex;
align-items: center;
@@ -406,7 +398,7 @@
}
.qb-search-container {
.metrics-container {
.metrics-select-container {
margin-bottom: 12px;
}
}

View File

@@ -22,8 +22,6 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
showOnlyWhereClause = false,
showTraceOperator = false,
version,
onSignalSourceChange,
signalSourceChangeEnabled = false,
}: QueryBuilderProps): JSX.Element {
const {
currentQuery,
@@ -177,9 +175,6 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
queryVariant={config?.queryVariant || 'dropdown'}
showOnlyWhereClause={showOnlyWhereClause}
isListViewPanel={isListViewPanel}
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
signalSourceChangeEnabled={signalSourceChangeEnabled}
queriesCount={1}
/>
) : (
currentQuery.builder.queryData.map((query, index) => (
@@ -198,10 +193,7 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
queryVariant={config?.queryVariant || 'dropdown'}
showOnlyWhereClause={showOnlyWhereClause}
isListViewPanel={isListViewPanel}
signalSource={query.source as 'meter' | ''}
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
signalSourceChangeEnabled={signalSourceChangeEnabled}
queriesCount={currentQuery.builder.queryData.length}
signalSource={config?.signalSource || ''}
/>
))
)}

View File

@@ -98,13 +98,6 @@
border-radius: 2px;
border: 1.005px solid var(--Slate-400, #1d212d);
background: var(--Ink-300, #16181d);
color: var(--bg-vanilla-400);
font-family: 'Geist Mono';
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.07px;
}
.input-with-label {

View File

@@ -1,23 +1,5 @@
.metrics-source-select-container {
.metrics-select-container {
margin-bottom: 8px;
display: flex;
flex-direction: row;
align-items: flex-start;
gap: 8px;
width: 100%;
.ant-select-selection-search-input {
font-size: 12px !important;
line-height: 27px;
&::placeholder {
color: var(--bg-vanilla-400) !important;
font-size: 12px !important;
}
}
.source-selector {
width: 120px;
}
.ant-select-selector {
width: 100%;
@@ -31,11 +13,6 @@
font-weight: 400;
line-height: 20px; /* 142.857% */
min-height: 36px;
.ant-select-selection-placeholder {
color: var(--bg-vanilla-400) !important;
font-size: 12px !important;
}
}
.ant-select-dropdown {
@@ -65,7 +42,7 @@
}
.lightMode {
.metrics-source-select-container {
.metrics-select-container {
.ant-select-selector {
border: 1px solid var(--bg-vanilla-300) !important;
background: var(--bg-vanilla-100);

View File

@@ -1,121 +1,34 @@
import './MetricsSelect.styles.scss';
import { Select } from 'antd';
import {
initialQueriesMap,
initialQueryMeterWithType,
PANEL_TYPES,
} from 'constants/queryBuilder';
import { AggregatorFilter } from 'container/QueryBuilder/filters';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { memo, useCallback, useMemo, useState } from 'react';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { memo } from 'react';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { SelectOption } from 'types/common/select';
export const SOURCE_OPTIONS: SelectOption<string, string>[] = [
{ value: 'metrics', label: 'Metrics' },
{ value: 'meter', label: 'Meter' },
];
export const MetricsSelect = memo(function MetricsSelect({
query,
index,
version,
signalSource,
onSignalSourceChange,
signalSourceChangeEnabled = false,
}: {
query: IBuilderQuery;
index: number;
version: string;
signalSource: 'meter' | '';
onSignalSourceChange: (value: string) => void;
signalSourceChangeEnabled: boolean;
}): JSX.Element {
const [attributeKeys, setAttributeKeys] = useState<BaseAutocompleteData[]>([]);
const { handleChangeAggregatorAttribute } = useQueryOperations({
index,
query,
entityVersion: version,
});
const handleAggregatorAttributeChange = useCallback(
(value: BaseAutocompleteData, isEditMode?: boolean) => {
handleChangeAggregatorAttribute(value, isEditMode, attributeKeys || []);
},
[handleChangeAggregatorAttribute, attributeKeys],
);
const { updateAllQueriesOperators, handleSetQueryData } = useQueryBuilder();
const source = useMemo(
() => (signalSource === 'meter' ? 'meter' : 'metrics'),
[signalSource],
);
const defaultMeterQuery = useMemo(
() =>
updateAllQueriesOperators(
initialQueryMeterWithType,
PANEL_TYPES.BAR,
DataSource.METRICS,
'meter' as 'meter' | '',
),
[updateAllQueriesOperators],
);
const defaultMetricsQuery = useMemo(
() =>
updateAllQueriesOperators(
initialQueriesMap.metrics,
PANEL_TYPES.BAR,
DataSource.METRICS,
'',
),
[updateAllQueriesOperators],
);
const handleSignalSourceChange = (value: string): void => {
onSignalSourceChange(value);
handleSetQueryData(
index,
value === 'meter'
? {
...defaultMeterQuery.builder.queryData[0],
source: 'meter',
queryName: query.queryName,
}
: {
...defaultMetricsQuery.builder.queryData[0],
source: '',
queryName: query.queryName,
},
);
};
return (
<div className="metrics-source-select-container">
{signalSourceChangeEnabled && (
<Select
className="source-selector"
placeholder="Source"
options={SOURCE_OPTIONS}
value={source}
defaultValue="metrics"
onChange={handleSignalSourceChange}
/>
)}
<div className="metrics-select-container">
<AggregatorFilter
onChange={handleAggregatorAttributeChange}
onChange={handleChangeAggregatorAttribute}
query={query}
index={index}
signalSource={signalSource || ''}
setAttributeKeys={setAttributeKeys}
/>
</div>
);

View File

@@ -236,10 +236,6 @@
background: var(--bg-ink-100) !important;
opacity: 0.5 !important;
}
.cm-activeLine > span {
font-size: 12px !important;
}
}
}
@@ -275,9 +271,6 @@
box-sizing: border-box;
position: relative;
.cm-placeholder {
font-size: 12px !important;
}
}
}

View File

@@ -500,10 +500,7 @@ function QueryAddOns({
}
value={addOn}
>
<div
className="add-on-tab-title"
data-testid={`query-add-on-${addOn.key}`}
>
<div className="add-on-tab-title">
{addOn.icon}
{addOn.label}
</div>

View File

@@ -20,8 +20,6 @@
border-radius: 2px;
flex: 1;
min-width: 0;
font-size: 12px;
color: var(--bg-vanilla-400) !important;
&.error {
.cm-editor {
@@ -233,9 +231,6 @@
.query-aggregation-interval-input {
input {
max-width: 120px;
&::placeholder {
color: var(--bg-vanilla-400);
}
}
}
}

View File

@@ -1,7 +0,0 @@
.add-trace-operator-button,
.add-new-query-button,
.add-formula-button {
border: 1px solid var(--bg-slate-400);
background: var(--bg-ink-300);
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
}

View File

@@ -1,75 +1,7 @@
import './QueryFooter.styles.scss';
/* eslint-disable react/require-default-props */
import { Button, Tooltip, Typography } from 'antd';
import WarningPopover from 'components/WarningPopover/WarningPopover';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { DraftingCompass, Plus, Sigma } from 'lucide-react';
import BetaTag from 'periscope/components/BetaTag/BetaTag';
import { useMemo } from 'react';
function TraceOperatorSection({
addTraceOperator,
}: {
addTraceOperator?: () => void;
}): JSX.Element {
const { currentQuery, panelType } = useQueryBuilder();
const showTraceOperatorWarning = useMemo(() => {
const isListViewPanel =
panelType === PANEL_TYPES.LIST || panelType === PANEL_TYPES.TRACE;
const hasMultipleQueries = currentQuery.builder.queryData.length > 1;
const hasTraceOperator =
currentQuery.builder.queryTraceOperator &&
currentQuery.builder.queryTraceOperator.length > 0;
return isListViewPanel && hasMultipleQueries && !hasTraceOperator;
}, [
currentQuery?.builder?.queryData,
currentQuery?.builder?.queryTraceOperator,
panelType,
]);
const traceOperatorWarning = useMemo(() => {
if (currentQuery.builder.queryData.length === 0) return '';
const firstQuery = currentQuery.builder.queryData[0];
return `Currently, you are only seeing results from query ${firstQuery.queryName}. Add a trace operator to combine results of multiple queries.`;
}, [currentQuery]);
return (
<div className="qb-trace-operator-button-container">
<Tooltip
title={
<div style={{ textAlign: 'center' }}>
Add Trace Matching
<Typography.Link
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-trace-operators"
target="_blank"
style={{ textDecoration: 'underline' }}
>
{' '}
<br />
Learn more
</Typography.Link>
</div>
}
>
<Button
className="add-trace-operator-button periscope-btn"
icon={<DraftingCompass size={16} />}
onClick={(): void => addTraceOperator?.()}
>
<div className="qb-trace-operator-button-container-text">
Add Trace Matching
<BetaTag />
</div>
</Button>
</Tooltip>
{showTraceOperatorWarning && (
<WarningPopover message={traceOperatorWarning} />
)}
</div>
);
}
export default function QueryFooter({
addNewBuilderQuery,
@@ -90,7 +22,8 @@ export default function QueryFooter({
<div className="qb-add-new-query">
<Tooltip title={<div style={{ textAlign: 'center' }}>Add New Query</div>}>
<Button
className="add-new-query-button periscope-btn "
className="add-new-query-button periscope-btn secondary"
type="text"
icon={<Plus size={16} />}
onClick={addNewBuilderQuery}
/>
@@ -116,7 +49,7 @@ export default function QueryFooter({
}
>
<Button
className="add-formula-button periscope-btn "
className="add-formula-button periscope-btn secondary"
icon={<Sigma size={16} />}
onClick={addNewFormula}
>
@@ -126,7 +59,35 @@ export default function QueryFooter({
</div>
)}
{showAddTraceOperator && (
<TraceOperatorSection addTraceOperator={addTraceOperator} />
<div className="qb-trace-operator-button-container">
<Tooltip
title={
<div style={{ textAlign: 'center' }}>
Add Trace Matching
<Typography.Link
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-trace-operators"
target="_blank"
style={{ textDecoration: 'underline' }}
>
{' '}
<br />
Learn more
</Typography.Link>
</div>
}
>
<Button
className="add-trace-operator-button periscope-btn secondary"
icon={<DraftingCompass size={16} />}
onClick={(): void => addTraceOperator?.()}
>
<div className="qb-trace-operator-button-container-text">
Add Trace Matching
<BetaTag />
</div>
</Button>
</Tooltip>
</div>
)}
</div>
</div>

View File

@@ -12,7 +12,6 @@ import {
startCompletion,
} from '@codemirror/autocomplete';
import { javascript } from '@codemirror/lang-javascript';
import * as Sentry from '@sentry/react';
import { Color } from '@signozhq/design-tokens';
import { copilot } from '@uiw/codemirror-theme-copilot';
import { githubLight } from '@uiw/codemirror-theme-github';
@@ -80,16 +79,6 @@ const stopEventsExtension = EditorView.domEventHandlers({
},
});
interface QuerySearchProps {
placeholder?: string;
onChange: (value: string) => void;
queryData: IBuilderQuery;
dataSource: DataSource;
signalSource?: string;
hardcodedAttributeKeys?: QueryKeyDataSuggestionsProps[];
onRun?: (query: string) => void;
}
function QuerySearch({
placeholder,
onChange,
@@ -98,8 +87,17 @@ function QuerySearch({
onRun,
signalSource,
hardcodedAttributeKeys,
}: QuerySearchProps): JSX.Element {
}: {
placeholder?: string;
onChange: (value: string) => void;
queryData: IBuilderQuery;
dataSource: DataSource;
signalSource?: string;
hardcodedAttributeKeys?: QueryKeyDataSuggestionsProps[];
onRun?: (query: string) => void;
}): JSX.Element {
const isDarkMode = useIsDarkMode();
const [query, setQuery] = useState<string>(queryData.filter?.expression || '');
const [valueSuggestions, setValueSuggestions] = useState<any[]>([]);
const [activeKey, setActiveKey] = useState<string>('');
const [isLoadingSuggestions, setIsLoadingSuggestions] = useState(false);
@@ -109,12 +107,8 @@ function QuerySearch({
message: '',
errors: [],
});
const isProgrammaticChangeRef = useRef(false);
const [isEditorReady, setIsEditorReady] = useState(false);
const [isFocused, setIsFocused] = useState(false);
const editorRef = useRef<EditorView | null>(null);
const handleQueryValidation = useCallback((newQuery: string): void => {
const handleQueryValidation = (newQuery: string): void => {
try {
const validationResponse = validateQuery(newQuery);
setValidation(validationResponse);
@@ -125,67 +119,29 @@ function QuerySearch({
errors: [error as IDetailedError],
});
}
}, []);
};
const getCurrentQuery = useCallback(
(): string => editorRef.current?.state.doc.toString() || '',
[],
);
// Track if the query was changed externally (from queryData) vs internally (user input)
const [isExternalQueryChange, setIsExternalQueryChange] = useState(false);
const [lastExternalQuery, setLastExternalQuery] = useState<string>('');
const updateEditorValue = useCallback(
(value: string, options: { skipOnChange?: boolean } = {}): void => {
const view = editorRef.current;
if (!view) return;
useEffect(() => {
const newQuery = queryData.filter?.expression || '';
// Only mark as external change if the query actually changed from external source
if (newQuery !== lastExternalQuery) {
setQuery(newQuery);
setIsExternalQueryChange(true);
setLastExternalQuery(newQuery);
}
}, [queryData.filter?.expression, lastExternalQuery]);
const currentValue = view.state.doc.toString();
if (currentValue === value) return;
if (options.skipOnChange) {
isProgrammaticChangeRef.current = true;
}
view.dispatch({
changes: {
from: 0,
to: currentValue.length,
insert: value,
},
selection: {
anchor: value.length,
},
});
},
[],
);
const handleEditorCreate = useCallback((view: EditorView): void => {
editorRef.current = view;
setIsEditorReady(true);
}, []);
useEffect(
() => {
if (!isEditorReady) return;
const newQuery = queryData.filter?.expression || '';
const currentQuery = getCurrentQuery();
/* eslint-disable-next-line sonarjs/no-collapsible-if */
if (newQuery !== currentQuery && !isFocused) {
// Prevent clearing a non-empty editor when queryData becomes empty temporarily
// Only update if newQuery has a value, or if both are empty (initial state)
if (newQuery || !currentQuery) {
updateEditorValue(newQuery, { skipOnChange: true });
if (newQuery) {
handleQueryValidation(newQuery);
}
}
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[isEditorReady, queryData.filter?.expression, isFocused],
);
// Validate query when it changes externally (from queryData)
useEffect(() => {
if (isExternalQueryChange && query) {
handleQueryValidation(query);
setIsExternalQueryChange(false);
}
}, [isExternalQueryChange, query]);
const [keySuggestions, setKeySuggestions] = useState<
QueryKeyDataSuggestionsProps[] | null
@@ -194,6 +150,7 @@ function QuerySearch({
const [showExamples] = useState(false);
const [cursorPos, setCursorPos] = useState({ line: 0, ch: 0 });
const [isFocused, setIsFocused] = useState(false);
const [
isFetchingCompleteValuesList,
@@ -202,6 +159,8 @@ function QuerySearch({
const lastPosRef = useRef<{ line: number; ch: number }>({ line: 0, ch: 0 });
// Reference to the editor view for programmatic autocompletion
const editorRef = useRef<EditorView | null>(null);
const lastKeyRef = useRef<string>('');
const lastFetchedKeyRef = useRef<string>('');
const lastValueRef = useRef<string>('');
@@ -547,7 +506,6 @@ function QuerySearch({
if (!editorRef.current) {
editorRef.current = viewUpdate.view;
setIsEditorReady(true);
}
const selection = viewUpdate.view.state.selection.main;
@@ -563,15 +521,7 @@ function QuerySearch({
const lastPos = lastPosRef.current;
if (newPos.line !== lastPos.line || newPos.ch !== lastPos.ch) {
setCursorPos((lastPos) => {
if (newPos.ch !== lastPos.ch && newPos.ch === 0) {
Sentry.captureEvent({
message: `Cursor jumped to start of line from ${lastPos.ch} to ${newPos.ch}`,
level: 'warning',
});
}
return newPos;
});
setCursorPos(newPos);
lastPosRef.current = newPos;
if (doc) {
@@ -604,17 +554,16 @@ function QuerySearch({
}, []);
const handleChange = (value: string): void => {
if (isProgrammaticChangeRef.current) {
isProgrammaticChangeRef.current = false;
return;
}
setQuery(value);
onChange(value);
// Mark as internal change to avoid triggering external validation
setIsExternalQueryChange(false);
// Update lastExternalQuery to prevent external validation trigger
setLastExternalQuery(value);
};
const handleBlur = (): void => {
const currentQuery = getCurrentQuery();
handleQueryValidation(currentQuery);
handleQueryValidation(query);
setIsFocused(false);
};
@@ -633,11 +582,12 @@ function QuerySearch({
const handleExampleClick = (exampleQuery: string): void => {
// If there's an existing query, append the example with AND
const currentQuery = getCurrentQuery();
const newQuery = currentQuery
? `${currentQuery} AND ${exampleQuery}`
: exampleQuery;
updateEditorValue(newQuery);
const newQuery = query ? `${query} AND ${exampleQuery}` : exampleQuery;
setQuery(newQuery);
// Mark as internal change to avoid triggering external validation
setIsExternalQueryChange(false);
// Update lastExternalQuery to prevent external validation trigger
setLastExternalQuery(newQuery);
};
// Helper function to render a badge for the current context mode
@@ -672,10 +622,8 @@ function QuerySearch({
const word = context.matchBefore(/[a-zA-Z0-9_.:/?&=#%\-\[\]]*/);
if (word?.from === word?.to && !context.explicit) return null;
// Get current query from editor
const currentQuery = editorRef.current?.state.doc.toString() || '';
// Get the query context at the cursor position
const queryContext = getQueryContextAtCursor(currentQuery, cursorPos.ch);
const queryContext = getQueryContextAtCursor(query, cursorPos.ch);
// Define autocomplete options based on the context
let options: {
@@ -1171,8 +1119,7 @@ function QuerySearch({
if (queryContext.isInParenthesis) {
// Different suggestions based on the context within parenthesis or bracket
const currentQuery = editorRef.current?.state.doc.toString() || '';
const curChar = currentQuery.charAt(cursorPos.ch - 1) || '';
const curChar = query.charAt(cursorPos.ch - 1) || '';
if (curChar === '(' || curChar === '[') {
// Right after opening parenthesis/bracket
@@ -1321,7 +1268,7 @@ function QuerySearch({
style={{
position: 'absolute',
top: 8,
right: validation.isValid === false && getCurrentQuery() ? 40 : 8, // Move left when error shown
right: validation.isValid === false && query ? 40 : 8, // Move left when error shown
cursor: 'help',
zIndex: 10,
transition: 'right 0.2s ease',
@@ -1342,10 +1289,10 @@ function QuerySearch({
</Tooltip>
<CodeMirror
value={query}
theme={isDarkMode ? copilot : githubLight}
onChange={handleChange}
onUpdate={handleUpdate}
onCreateEditor={handleEditorCreate}
className={cx('query-where-clause-editor', {
isValid: validation.isValid === true,
hasErrors: validation.errors.length > 0,
@@ -1383,7 +1330,7 @@ function QuerySearch({
// Mod-Enter is usually Ctrl-Enter or Cmd-Enter based on OS
run: (): boolean => {
if (onRun && typeof onRun === 'function') {
onRun(getCurrentQuery());
onRun(query);
} else {
handleRunQuery();
}
@@ -1409,7 +1356,7 @@ function QuerySearch({
onBlur={handleBlur}
/>
{getCurrentQuery() && validation.isValid === false && !isFocused && (
{query && validation.isValid === false && !isFocused && (
<div
className={cx('query-status-container', {
hasErrors: validation.errors.length > 0,

View File

@@ -9,13 +9,7 @@ import SpanScopeSelector from 'container/QueryBuilder/filters/QueryBuilderSearch
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { Copy, Ellipsis, Trash } from 'lucide-react';
import {
ForwardedRef,
forwardRef,
useCallback,
useMemo,
useState,
} from 'react';
import { memo, useCallback, useMemo, useState } from 'react';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { HandleChangeQueryDataV5 } from 'types/common/operations.types';
import { DataSource } from 'types/common/queryBuilder';
@@ -26,29 +20,20 @@ import QueryAddOns from './QueryAddOns/QueryAddOns';
import QueryAggregation from './QueryAggregation/QueryAggregation';
import QuerySearch from './QuerySearch/QuerySearch';
export const QueryV2 = forwardRef(function QueryV2(
{
index,
queryVariant,
query,
filterConfigs,
isListViewPanel = false,
showTraceOperator = false,
hasTraceOperator = false,
version,
showOnlyWhereClause = false,
signalSource = '',
isMultiQueryAllowed = false,
onSignalSourceChange,
signalSourceChangeEnabled = false,
queriesCount = 1,
}: QueryProps & {
onSignalSourceChange: (value: string) => void;
signalSourceChangeEnabled: boolean;
queriesCount: number;
},
ref: ForwardedRef<HTMLDivElement>,
): JSX.Element {
export const QueryV2 = memo(function QueryV2({
ref,
index,
queryVariant,
query,
filterConfigs,
isListViewPanel = false,
showTraceOperator = false,
hasTraceOperator = false,
version,
showOnlyWhereClause = false,
signalSource = '',
isMultiQueryAllowed = false,
}: QueryProps & { ref: React.RefObject<HTMLDivElement> }): JSX.Element {
const { cloneQuery, panelType } = useQueryBuilder();
const showFunctions = query?.functions?.length > 0;
@@ -201,16 +186,12 @@ export const QueryV2 = forwardRef(function QueryV2(
icon: <Copy size={14} />,
onClick: handleCloneEntity,
},
...(queriesCount && queriesCount > 1
? [
{
label: 'Delete',
key: 'delete-query',
icon: <Trash size={14} />,
onClick: handleDeleteQuery,
},
]
: []),
{
label: 'Delete',
key: 'delete-query',
icon: <Trash size={14} />,
onClick: handleDeleteQuery,
},
],
}}
placement="bottomRight"
@@ -226,14 +207,12 @@ export const QueryV2 = forwardRef(function QueryV2(
<div className="qb-elements-container">
<div className="qb-search-container">
{dataSource === DataSource.METRICS && (
<div className="metrics-container">
<div className="metrics-select-container">
<MetricsSelect
query={query}
index={index}
version={ENTITY_VERSION_V5}
signalSource={signalSource as 'meter' | ''}
onSignalSourceChange={onSignalSourceChange}
signalSourceChangeEnabled={signalSourceChangeEnabled}
/>
</div>
)}
@@ -279,7 +258,7 @@ export const QueryV2 = forwardRef(function QueryV2(
panelType={panelType}
query={query}
index={index}
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}-${signalSource}`}
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}`}
version="v4"
signalSource={signalSource as 'meter' | ''}
/>
@@ -302,5 +281,3 @@ export const QueryV2 = forwardRef(function QueryV2(
</div>
);
});
QueryV2.displayName = 'QueryV2';

View File

@@ -92,9 +92,6 @@
.qb-trace-operator-editor-container {
flex: 1;
.cm-activeLine > span {
font-size: 12px;
}
}
&.arrow-left {
@@ -116,8 +113,6 @@
text-overflow: ellipsis;
padding: 0px 8px;
border-right: 1px solid var(--bg-slate-400);
font-size: 12px;
font-weight: 300;
}
}
}

View File

@@ -68,7 +68,7 @@ export default function TraceOperator({
!isListViewPanel && 'qb-trace-operator-arrow',
)}
>
<Typography.Text className="label">Trace Operator</Typography.Text>
<Typography.Text className="label">TRACE OPERATOR</Typography.Text>
<div className="qb-trace-operator-editor-container">
<TraceOperatorEditor
value={traceOperator?.expression || ''}

View File

@@ -5,85 +5,13 @@ import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
import { initialQueriesMap } from 'constants/queryBuilder';
import * as UseQBModule from 'hooks/queryBuilder/useQueryBuilder';
import { fireEvent, render, userEvent, waitFor } from 'tests/test-utils';
import React from 'react';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import type { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
import { DataSource } from 'types/common/queryBuilder';
import QuerySearch from '../QuerySearch/QuerySearch';
const CM_EDITOR_SELECTOR = '.cm-editor .cm-content';
// Mock DOM APIs that CodeMirror needs
beforeAll(() => {
// Mock getClientRects and getBoundingClientRect for Range objects
const mockRect: DOMRect = {
width: 100,
height: 20,
top: 0,
left: 0,
right: 100,
bottom: 20,
x: 0,
y: 0,
toJSON: (): DOMRect => mockRect,
} as DOMRect;
// Create a minimal Range mock with only what CodeMirror actually uses
const createMockRange = (): Range => {
let startContainer: Node = document.createTextNode('');
let endContainer: Node = document.createTextNode('');
let startOffset = 0;
let endOffset = 0;
const mockRange = {
// CodeMirror uses these for text measurement
getClientRects: (): DOMRectList =>
(({
length: 1,
item: (index: number): DOMRect | null => (index === 0 ? mockRect : null),
0: mockRect,
*[Symbol.iterator](): Generator<DOMRect> {
yield mockRect;
},
} as unknown) as DOMRectList),
getBoundingClientRect: (): DOMRect => mockRect,
// CodeMirror calls these to set up text ranges
setStart: (node: Node, offset: number): void => {
startContainer = node;
startOffset = offset;
},
setEnd: (node: Node, offset: number): void => {
endContainer = node;
endOffset = offset;
},
// Minimal Range properties (TypeScript requires these)
get startContainer(): Node {
return startContainer;
},
get endContainer(): Node {
return endContainer;
},
get startOffset(): number {
return startOffset;
},
get endOffset(): number {
return endOffset;
},
get collapsed(): boolean {
return startContainer === endContainer && startOffset === endOffset;
},
commonAncestorContainer: document.body,
};
return (mockRange as unknown) as Range;
};
// Mock document.createRange to return a new Range instance each time
document.createRange = (): Range => createMockRange();
// Mock getBoundingClientRect for elements
Element.prototype.getBoundingClientRect = (): DOMRect => mockRect;
});
jest.mock('hooks/useDarkMode', () => ({
useIsDarkMode: (): boolean => false,
}));
@@ -103,6 +31,24 @@ jest.mock('hooks/queryBuilder/useQueryBuilder', () => {
};
});
jest.mock('@codemirror/autocomplete', () => ({
autocompletion: (): Record<string, unknown> => ({}),
closeCompletion: (): boolean => true,
completionKeymap: [] as unknown[],
startCompletion: (): boolean => true,
}));
jest.mock('@codemirror/lang-javascript', () => ({
javascript: (): Record<string, unknown> => ({}),
}));
jest.mock('@uiw/codemirror-theme-copilot', () => ({
copilot: {},
}));
jest.mock('@uiw/codemirror-theme-github', () => ({
githubLight: {},
}));
jest.mock('api/querySuggestions/getKeySuggestions', () => ({
getKeySuggestions: jest.fn().mockResolvedValue({
data: {
@@ -117,19 +63,153 @@ jest.mock('api/querySuggestions/getValueSuggestion', () => ({
}),
}));
// Note: We're NOT mocking CodeMirror here - using the real component
// This provides integration testing with the actual CodeMirror editor
// Mock CodeMirror to a simple textarea to make it testable and call onUpdate
jest.mock(
'@uiw/react-codemirror',
(): Record<string, unknown> => {
// Minimal EditorView shape used by the component
class EditorViewMock {}
(EditorViewMock as any).domEventHandlers = (): unknown => ({} as unknown);
(EditorViewMock as any).lineWrapping = {} as unknown;
(EditorViewMock as any).editable = { of: () => ({}) } as unknown;
const keymap = { of: (arr: unknown) => arr } as unknown;
const Prec = { highest: (ext: unknown) => ext } as unknown;
type CodeMirrorProps = {
value?: string;
onChange?: (v: string) => void;
onFocus?: () => void;
onBlur?: () => void;
placeholder?: string;
onCreateEditor?: (view: unknown) => unknown;
onUpdate?: (arg: {
view: {
state: {
selection: { main: { head: number } };
doc: {
toString: () => string;
lineAt: (
_pos: number,
) => { number: number; from: number; to: number; text: string };
};
};
};
}) => void;
'data-testid'?: string;
extensions?: unknown[];
};
function CodeMirrorMock({
value,
onChange,
onFocus,
onBlur,
placeholder,
onCreateEditor,
onUpdate,
'data-testid': dataTestId,
extensions,
}: CodeMirrorProps): JSX.Element {
const [localValue, setLocalValue] = React.useState<string>(value ?? '');
// Provide a fake editor instance
React.useEffect(() => {
if (onCreateEditor) {
onCreateEditor(new EditorViewMock() as any);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
// Call onUpdate whenever localValue changes to simulate cursor and doc
React.useEffect(() => {
if (onUpdate) {
const text = String(localValue ?? '');
const head = text.length;
onUpdate({
view: {
state: {
selection: { main: { head } },
doc: {
toString: (): string => text,
lineAt: () => ({
number: 1,
from: 0,
to: text.length,
text,
}),
},
},
},
});
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [localValue]);
const handleKeyDown = (
e: React.KeyboardEvent<HTMLTextAreaElement>,
): void => {
const isModEnter = e.key === 'Enter' && (e.metaKey || e.ctrlKey);
if (!isModEnter) return;
const exts: unknown[] = Array.isArray(extensions) ? extensions : [];
const flat: unknown[] = exts.flatMap((x: unknown) =>
Array.isArray(x) ? x : [x],
);
const keyBindings = flat.filter(
(x) =>
Boolean(x) &&
typeof x === 'object' &&
'key' in (x as Record<string, unknown>),
) as Array<{ key?: string; run?: () => boolean | void }>;
keyBindings
.filter((b) => b.key === 'Mod-Enter' && typeof b.run === 'function')
.forEach((b) => {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
b.run!();
});
};
return (
<textarea
data-testid={dataTestId || 'query-where-clause-editor'}
placeholder={placeholder}
value={localValue}
onChange={(e): void => {
setLocalValue(e.target.value);
if (onChange) {
onChange(e.target.value);
}
}}
onFocus={onFocus}
onBlur={onBlur}
onKeyDown={handleKeyDown}
style={{ width: '100%', minHeight: 80 }}
/>
);
}
return {
__esModule: true,
default: CodeMirrorMock,
EditorView: EditorViewMock,
keymap,
Prec,
};
},
);
const handleRunQueryMock = ((UseQBModule as unknown) as {
handleRunQuery: jest.MockedFunction<() => void>;
}).handleRunQuery;
const PLACEHOLDER_TEXT =
"Enter your filter query (e.g., http.status_code >= 500 AND service.name = 'frontend')";
const TESTID_EDITOR = 'query-where-clause-editor';
const SAMPLE_KEY_TYPING = 'http.';
const SAMPLE_VALUE_TYPING_INCOMPLETE = "service.name = '";
const SAMPLE_VALUE_TYPING_COMPLETE = "service.name = 'frontend'";
const SAMPLE_STATUS_QUERY = "http.status_code = '200'";
const SAMPLE_VALUE_TYPING_INCOMPLETE = " service.name = '";
const SAMPLE_VALUE_TYPING_COMPLETE = " service.name = 'frontend'";
const SAMPLE_STATUS_QUERY = " status_code = '200'";
describe('QuerySearch (Integration with Real CodeMirror)', () => {
describe('QuerySearch', () => {
it('renders with placeholder', () => {
render(
<QuerySearch
@@ -139,19 +219,21 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
/>,
);
// CodeMirror renders a contenteditable div, so we check for the container
const editorContainer = document.querySelector('.query-where-clause-editor');
expect(editorContainer).toBeInTheDocument();
expect(screen.getByPlaceholderText(PLACEHOLDER_TEXT)).toBeInTheDocument();
});
it('fetches key suggestions when typing a key (debounced)', async () => {
// Use real timers for CodeMirror integration tests
jest.useFakeTimers();
const advance = (ms: number): void => {
jest.advanceTimersByTime(ms);
};
const user = userEvent.setup({
advanceTimers: advance,
pointerEventsCheck: 0,
});
const mockedGetKeys = getKeySuggestions as jest.MockedFunction<
typeof getKeySuggestions
>;
mockedGetKeys.mockClear();
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<QuerySearch
@@ -161,33 +243,28 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
/>,
);
// Wait for CodeMirror to initialize
await waitFor(() => {
const editor = document.querySelector(CM_EDITOR_SELECTOR);
expect(editor).toBeInTheDocument();
});
// Find the CodeMirror editor contenteditable element
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
// Focus and type into the editor
await user.click(editor);
const editor = screen.getByTestId(TESTID_EDITOR);
await user.type(editor, SAMPLE_KEY_TYPING);
advance(1000);
// Wait for debounced API call (300ms debounce + some buffer)
await waitFor(() => expect(mockedGetKeys).toHaveBeenCalled(), {
timeout: 2000,
timeout: 3000,
});
jest.useRealTimers();
});
it('fetches value suggestions when editing value context', async () => {
// Use real timers for CodeMirror integration tests
jest.useFakeTimers();
const advance = (ms: number): void => {
jest.advanceTimersByTime(ms);
};
const user = userEvent.setup({
advanceTimers: advance,
pointerEventsCheck: 0,
});
const mockedGetValues = getValueSuggestions as jest.MockedFunction<
typeof getValueSuggestions
>;
mockedGetValues.mockClear();
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<QuerySearch
@@ -197,28 +274,21 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
/>,
);
// Wait for CodeMirror to initialize
await waitFor(() => {
const editor = document.querySelector(CM_EDITOR_SELECTOR);
expect(editor).toBeInTheDocument();
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
await user.click(editor);
const editor = screen.getByTestId(TESTID_EDITOR);
await user.type(editor, SAMPLE_VALUE_TYPING_INCOMPLETE);
advance(1000);
// Wait for debounced API call (300ms debounce + some buffer)
await waitFor(() => expect(mockedGetValues).toHaveBeenCalled(), {
timeout: 2000,
timeout: 3000,
});
jest.useRealTimers();
});
it('fetches key suggestions on mount for LOGS', async () => {
// Use real timers for CodeMirror integration tests
jest.useFakeTimers();
const mockedGetKeysOnMount = getKeySuggestions as jest.MockedFunction<
typeof getKeySuggestions
>;
mockedGetKeysOnMount.mockClear();
render(
<QuerySearch
@@ -228,15 +298,17 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
/>,
);
// Wait for debounced API call (300ms debounce + some buffer)
jest.advanceTimersByTime(1000);
await waitFor(() => expect(mockedGetKeysOnMount).toHaveBeenCalled(), {
timeout: 2000,
timeout: 3000,
});
const lastArgs = mockedGetKeysOnMount.mock.calls[
mockedGetKeysOnMount.mock.calls.length - 1
]?.[0] as { signal: unknown; searchText: string };
expect(lastArgs).toMatchObject({ signal: DataSource.LOGS, searchText: '' });
jest.useRealTimers();
});
it('calls provided onRun on Mod-Enter', async () => {
@@ -252,26 +324,12 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
/>,
);
// Wait for CodeMirror to initialize
await waitFor(() => {
const editor = document.querySelector(CM_EDITOR_SELECTOR);
expect(editor).toBeInTheDocument();
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
const editor = screen.getByTestId(TESTID_EDITOR);
await user.click(editor);
await user.type(editor, SAMPLE_STATUS_QUERY);
await user.keyboard('{Meta>}{Enter}{/Meta}');
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
fireEvent.keyDown(editor, {
key: 'Enter',
code: 'Enter',
[modKey]: true,
keyCode: 13,
});
await waitFor(() => expect(onRun).toHaveBeenCalled(), { timeout: 2000 });
await waitFor(() => expect(onRun).toHaveBeenCalled());
});
it('calls handleRunQuery when Mod-Enter without onRun', async () => {
@@ -290,62 +348,11 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
/>,
);
// Wait for CodeMirror to initialize
await waitFor(() => {
const editor = document.querySelector(CM_EDITOR_SELECTOR);
expect(editor).toBeInTheDocument();
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
const editor = screen.getByTestId(TESTID_EDITOR);
await user.click(editor);
await user.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
await user.keyboard('{Meta>}{Enter}{/Meta}');
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
fireEvent.keyDown(editor, {
key: 'Enter',
code: 'Enter',
[modKey]: true,
keyCode: 13,
});
await waitFor(() => expect(mockedHandleRunQuery).toHaveBeenCalled(), {
timeout: 2000,
});
});
it('initializes CodeMirror with expression from queryData.filter.expression on mount', async () => {
const testExpression =
"http.status_code >= 500 AND service.name = 'frontend'";
const queryDataWithExpression = {
...initialQueriesMap.logs.builder.queryData[0],
filter: {
expression: testExpression,
},
};
render(
<QuerySearch
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
queryData={queryDataWithExpression}
dataSource={DataSource.LOGS}
/>,
);
// Wait for CodeMirror to initialize and the expression to be set
await waitFor(
() => {
// CodeMirror stores content in .cm-content, check the text content
const editorContent = document.querySelector(
CM_EDITOR_SELECTOR,
) as HTMLElement;
expect(editorContent).toBeInTheDocument();
// CodeMirror may render the text in multiple ways, check if it contains our expression
const textContent = editorContent.textContent || '';
expect(textContent).toContain('http.status_code');
expect(textContent).toContain('service.name');
},
{ timeout: 3000 },
);
await waitFor(() => expect(mockedHandleRunQuery).toHaveBeenCalled());
});
});

View File

@@ -224,7 +224,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
const visitedPairs: Set<string> = new Set(); // Set to track visited query pairs
// Map extracted query pairs to key-specific pair information for faster access
let queryPairsMap = getQueryPairsMap(existingQuery);
let queryPairsMap = getQueryPairsMap(existingQuery.trim());
filters?.items?.forEach((filter) => {
const { key, op, value } = filter;
@@ -309,7 +309,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
)}${OPERATORS.IN} ${formattedValue} ${modifiedQuery.slice(
notInPair.position.valueEnd + 1,
)}`;
queryPairsMap = getQueryPairsMap(modifiedQuery);
queryPairsMap = getQueryPairsMap(modifiedQuery.trim());
}
shouldAddToNonExisting = false; // Don't add this to non-existing filters
} else if (

View File

@@ -45,12 +45,6 @@
flex-direction: column;
gap: 8px;
.filter-separator {
height: 1px;
background-color: var(--bg-slate-400);
margin: 4px 0;
}
.value {
display: flex;
align-items: center;
@@ -183,12 +177,6 @@
}
}
}
.values {
.filter-separator {
background-color: var(--bg-vanilla-300);
}
}
}
}

View File

@@ -1,191 +0,0 @@
import { FiltersType, QuickFiltersSource } from 'components/QuickFilters/types';
import { useGetAggregateValues } from 'hooks/queryBuilder/useGetAggregateValues';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQueryKeyValueSuggestions';
import { quickFiltersAttributeValuesResponse } from 'mocks-server/__mockdata__/customQuickFilters';
import { rest, server } from 'mocks-server/server';
import { UseQueryResult } from 'react-query';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { SuccessResponse } from 'types/api';
import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { DataSource } from 'types/common/queryBuilder';
import CheckboxFilter from './Checkbox';
// Mock the query builder hook
jest.mock('hooks/queryBuilder/useQueryBuilder');
const mockUseQueryBuilder = jest.mocked(useQueryBuilder);
// Mock the aggregate values hook
jest.mock('hooks/queryBuilder/useGetAggregateValues');
const mockUseGetAggregateValues = jest.mocked(useGetAggregateValues);
// Mock the key value suggestions hook
jest.mock('hooks/querySuggestions/useGetQueryKeyValueSuggestions');
const mockUseGetQueryKeyValueSuggestions = jest.mocked(
useGetQueryKeyValueSuggestions,
);
interface MockFilterConfig {
title: string;
attributeKey: {
key: string;
dataType: DataTypes;
type: string;
};
dataSource: DataSource;
defaultOpen: boolean;
type: FiltersType;
}
const createMockFilter = (
overrides: Partial<MockFilterConfig> = {},
): MockFilterConfig => ({
// eslint-disable-next-line sonarjs/no-duplicate-string
title: 'Service Name',
attributeKey: {
key: 'service.name',
dataType: DataTypes.String,
type: 'resource',
},
dataSource: DataSource.LOGS,
defaultOpen: false,
type: FiltersType.CHECKBOX,
...overrides,
});
const createMockQueryBuilderData = (hasActiveFilters = false): any => ({
lastUsedQuery: 0,
currentQuery: {
builder: {
queryData: [
{
filters: {
items: hasActiveFilters
? [
{
key: {
key: 'service.name',
dataType: DataTypes.String,
type: 'resource',
},
op: 'in',
value: ['otel-demo', 'sample-flask'],
},
]
: [],
},
},
],
},
},
redirectWithQueryBuilderData: jest.fn(),
});
describe('CheckboxFilter - User Flows', () => {
beforeEach(() => {
// Reset all mocks
jest.clearAllMocks();
// Default mock implementations using the same structure as existing tests
mockUseGetAggregateValues.mockReturnValue({
data: {
payload: {
stringAttributeValues: [
'mq-kafka',
'otel-demo',
'otlp-python',
'sample-flask',
],
},
},
isLoading: false,
} as UseQueryResult<SuccessResponse<IAttributeValuesResponse>>);
mockUseGetQueryKeyValueSuggestions.mockReturnValue({
data: null,
isLoading: false,
} as any);
// Setup MSW server for API calls
server.use(
rest.get('*/api/v3/autocomplete/attribute_values', (_req, res, ctx) =>
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
),
);
});
it('should auto-open filter and prioritize checked items with visual separator when user opens page with active filters', async () => {
// Mock query builder with active filters
mockUseQueryBuilder.mockReturnValue(createMockQueryBuilderData(true) as any);
const mockFilter = createMockFilter({ defaultOpen: false });
render(
<CheckboxFilter
filter={mockFilter}
source={QuickFiltersSource.LOGS_EXPLORER}
/>,
);
// User should see the filter is automatically opened (not collapsed)
expect(screen.getByText('Service Name')).toBeInTheDocument();
await waitFor(() => {
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
});
// User should see visual separator between checked and unchecked items
expect(screen.getByTestId('filter-separator')).toBeInTheDocument();
// User should see checked items at the top
await waitFor(() => {
const checkboxes = screen.getAllByRole('checkbox');
expect(checkboxes).toHaveLength(4); // Ensure we have exactly 4 checkboxes
expect(checkboxes[0]).toBeChecked(); // otel-demo should be first and checked
expect(checkboxes[1]).toBeChecked(); // sample-flask should be second and checked
expect(checkboxes[2]).not.toBeChecked(); // mq-kafka should be unchecked
expect(checkboxes[3]).not.toBeChecked(); // otlp-python should be unchecked
});
});
it('should respect user preference when user manually toggles filter over auto-open behavior', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
// Mock query builder with active filters
mockUseQueryBuilder.mockReturnValue(createMockQueryBuilderData(true) as any);
const mockFilter = createMockFilter({ defaultOpen: false });
render(
<CheckboxFilter
filter={mockFilter}
source={QuickFiltersSource.LOGS_EXPLORER}
/>,
);
// Initially auto-opened due to active filters
await waitFor(() => {
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
});
// User manually closes the filter
await user.click(screen.getByText('Service Name'));
// User should see filter is now closed (respecting user preference)
expect(
screen.queryByPlaceholderText('Filter values'),
).not.toBeInTheDocument();
// User manually opens the filter again
await user.click(screen.getByText('Service Name'));
// User should see filter is now open (respecting user preference)
await waitFor(() => {
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
});
});
});

View File

@@ -21,7 +21,7 @@ import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQue
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { cloneDeep, isArray, isEqual, isFunction } from 'lodash-es';
import { ChevronDown, ChevronRight } from 'lucide-react';
import { Fragment, useMemo, useState } from 'react';
import { useMemo, useState } from 'react';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
@@ -54,8 +54,7 @@ interface ICheckboxProps {
export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
const { source, filter, onFilterChange } = props;
const [searchText, setSearchText] = useState<string>('');
// null = no user action, true = user opened, false = user closed
const [userToggleState, setUserToggleState] = useState<boolean | null>(null);
const [isOpen, setIsOpen] = useState<boolean>(filter.defaultOpen);
const [visibleItemsCount, setVisibleItemsCount] = useState<number>(10);
const {
@@ -64,33 +63,6 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
redirectWithQueryBuilderData,
} = useQueryBuilder();
// Check if this filter has active filters in the query
const isSomeFilterPresentForCurrentAttribute = useMemo(
() =>
currentQuery.builder.queryData?.[
lastUsedQuery || 0
]?.filters?.items?.some((item) =>
isEqual(item.key?.key, filter.attributeKey.key),
),
[currentQuery.builder.queryData, lastUsedQuery, filter.attributeKey.key],
);
// Derive isOpen from filter state + user action
const isOpen = useMemo(() => {
// If user explicitly toggled, respect that
if (userToggleState !== null) return userToggleState;
// Auto-open if this filter has active filters in the query
if (isSomeFilterPresentForCurrentAttribute) return true;
// Otherwise use default behavior (first 2 filters open)
return filter.defaultOpen;
}, [
userToggleState,
isSomeFilterPresentForCurrentAttribute,
filter.defaultOpen,
]);
const { data, isLoading } = useGetAggregateValues(
{
aggregateOperator: filter.aggregateOperator || 'noop',
@@ -156,6 +128,8 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
);
}, [data?.payload, filter.attributeKey.dataType, keyValueSuggestions, source]);
const currentAttributeKeys = attributeValues.slice(0, visibleItemsCount);
const setSearchTextDebounced = useDebouncedFn((...args) => {
setSearchText(args[0] as string);
}, DEBOUNCE_DELAY);
@@ -228,23 +202,6 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
const isMultipleValuesTrueForTheKey =
Object.values(currentFilterState).filter((val) => val).length > 1;
// Sort checked items to the top, then unchecked items
const currentAttributeKeys = useMemo(() => {
const checkedValues = attributeValues.filter(
(val) => currentFilterState[val],
);
const uncheckedValues = attributeValues.filter(
(val) => !currentFilterState[val],
);
return [...checkedValues, ...uncheckedValues].slice(0, visibleItemsCount);
}, [attributeValues, currentFilterState, visibleItemsCount]);
// Count of checked values in the currently visible items
const checkedValuesCount = useMemo(
() => currentAttributeKeys.filter((val) => currentFilterState[val]).length,
[currentAttributeKeys, currentFilterState],
);
const handleClearFilterAttribute = (): void => {
const preparedQuery: Query = {
...currentQuery,
@@ -278,6 +235,12 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
}
};
const isSomeFilterPresentForCurrentAttribute = currentQuery.builder.queryData?.[
lastUsedQuery || 0
]?.filters?.items?.some((item) =>
isEqual(item.key?.key, filter.attributeKey.key),
);
const onChange = (
value: string,
checked: boolean,
@@ -527,10 +490,10 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
className="filter-header-checkbox"
onClick={(): void => {
if (isOpen) {
setUserToggleState(false);
setIsOpen(false);
setVisibleItemsCount(10);
} else {
setUserToggleState(true);
setIsOpen(true);
}
}}
>
@@ -577,59 +540,50 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
)}
{attributeValues.length > 0 ? (
<section className="values">
{currentAttributeKeys.map((value: string, index: number) => (
<Fragment key={value}>
{index === checkedValuesCount && checkedValuesCount > 0 && (
<div
key="separator"
className="filter-separator"
data-testid="filter-separator"
/>
)}
<div className="value">
<Checkbox
onChange={(e): void => onChange(value, e.target.checked, false)}
checked={currentFilterState[value]}
disabled={isFilterDisabled}
rootClassName="check-box"
/>
{currentAttributeKeys.map((value: string) => (
<div key={value} className="value">
<Checkbox
onChange={(e): void => onChange(value, e.target.checked, false)}
checked={currentFilterState[value]}
disabled={isFilterDisabled}
rootClassName="check-box"
/>
<div
className={cx(
'checkbox-value-section',
isFilterDisabled ? 'filter-disabled' : '',
)}
onClick={(): void => {
if (isFilterDisabled) {
return;
}
onChange(value, currentFilterState[value], true);
}}
>
<div className={`${filter.title} label-${value}`} />
{filter.customRendererForValue ? (
filter.customRendererForValue(value)
) : (
<Typography.Text
className="value-string"
ellipsis={{ tooltip: { placement: 'right' } }}
>
{String(value)}
</Typography.Text>
)}
<Button type="text" className="only-btn">
{isSomeFilterPresentForCurrentAttribute
? currentFilterState[value] && !isMultipleValuesTrueForTheKey
? 'All'
: 'Only'
: 'Only'}
</Button>
<Button type="text" className="toggle-btn">
Toggle
</Button>
</div>
<div
className={cx(
'checkbox-value-section',
isFilterDisabled ? 'filter-disabled' : '',
)}
onClick={(): void => {
if (isFilterDisabled) {
return;
}
onChange(value, currentFilterState[value], true);
}}
>
<div className={`${filter.title} label-${value}`} />
{filter.customRendererForValue ? (
filter.customRendererForValue(value)
) : (
<Typography.Text
className="value-string"
ellipsis={{ tooltip: { placement: 'right' } }}
>
{String(value)}
</Typography.Text>
)}
<Button type="text" className="only-btn">
{isSomeFilterPresentForCurrentAttribute
? currentFilterState[value] && !isMultipleValuesTrueForTheKey
? 'All'
: 'Only'
: 'Only'}
</Button>
<Button type="text" className="toggle-btn">
Toggle
</Button>
</div>
</Fragment>
</div>
))}
</section>
) : isEmptyStateWithDocsEnabled ? (

View File

@@ -18,6 +18,11 @@ import UPlot from 'uplot';
import { dataMatch, optionsUpdateState } from './utils';
// Extended uPlot interface with custom properties
interface ExtendedUPlot extends uPlot {
_legendScrollCleanup?: () => void;
}
export interface UplotProps {
options: uPlot.Options;
data: uPlot.AlignedData;
@@ -66,6 +71,12 @@ const Uplot = forwardRef<ToggleGraphProps | undefined, UplotProps>(
const destroy = useCallback((chart: uPlot | null) => {
if (chart) {
// Clean up legend scroll event listener
const extendedChart = chart as ExtendedUPlot;
if (extendedChart._legendScrollCleanup) {
extendedChart._legendScrollCleanup();
}
onDeleteRef.current?.(chart);
chart.destroy();
chartRef.current = null;

View File

@@ -0,0 +1,223 @@
import { render } from '@testing-library/react';
import { act } from 'react-dom/test-utils';
import { MemoryRouter } from 'react-router-dom';
import { Userpilot } from 'userpilot';
import UserpilotRouteTracker from './UserpilotRouteTracker';
// Mock constants
const INITIAL_PATH = '/initial';
const TIMER_DELAY = 100;
// Mock the userpilot module
jest.mock('userpilot', () => ({
Userpilot: {
reload: jest.fn(),
},
}));
// Mock location state
let mockLocation = {
pathname: INITIAL_PATH,
search: '',
hash: '',
state: null,
};
// Mock react-router-dom
jest.mock('react-router-dom', () => {
const originalModule = jest.requireActual('react-router-dom');
return {
...originalModule,
useLocation: jest.fn(() => mockLocation),
};
});
describe('UserpilotRouteTracker', () => {
beforeEach(() => {
jest.clearAllMocks();
// Reset timers
jest.useFakeTimers();
// Reset error mock implementation
(Userpilot.reload as jest.Mock).mockImplementation(() => {});
// Reset location to initial state
mockLocation = {
pathname: INITIAL_PATH,
search: '',
hash: '',
state: null,
};
});
afterEach(() => {
jest.useRealTimers();
});
it('calls Userpilot.reload on initial render', () => {
render(
<MemoryRouter>
<UserpilotRouteTracker />
</MemoryRouter>,
);
// Fast-forward timer to trigger the setTimeout in reloadUserpilot
act(() => {
jest.advanceTimersByTime(TIMER_DELAY);
});
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
});
it('calls Userpilot.reload when pathname changes', () => {
const { rerender } = render(
<MemoryRouter>
<UserpilotRouteTracker />
</MemoryRouter>,
);
// Fast-forward initial render timer
act(() => {
jest.advanceTimersByTime(TIMER_DELAY);
});
jest.clearAllMocks();
// Create a new location object with different pathname
const newLocation = {
...mockLocation,
pathname: '/new-path',
};
// Update the mock location with new path and trigger re-render
act(() => {
mockLocation = newLocation;
// Force a component update with the new location
rerender(
<MemoryRouter>
<UserpilotRouteTracker />
</MemoryRouter>,
);
});
// Fast-forward timer to allow the setTimeout to execute
act(() => {
jest.advanceTimersByTime(TIMER_DELAY);
});
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
});
it('calls Userpilot.reload when search parameters change', () => {
const { rerender } = render(
<MemoryRouter>
<UserpilotRouteTracker />
</MemoryRouter>,
);
// Fast-forward initial render timer
act(() => {
jest.advanceTimersByTime(TIMER_DELAY);
});
jest.clearAllMocks();
// Create a new location object with different search params
const newLocation = {
...mockLocation,
search: '?param=value',
};
// Update the mock location with new search and trigger re-render
// eslint-disable-next-line sonarjs/no-identical-functions
act(() => {
mockLocation = newLocation;
// Force a component update with the new location
rerender(
<MemoryRouter>
<UserpilotRouteTracker />
</MemoryRouter>,
);
});
// Fast-forward timer to allow the setTimeout to execute
act(() => {
jest.advanceTimersByTime(TIMER_DELAY);
});
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
});
it('handles errors in Userpilot.reload gracefully', () => {
// Mock console.error to prevent test output noise and capture calls
const consoleErrorSpy = jest
.spyOn(console, 'error')
.mockImplementation(() => {});
// Instead of using the component, we test the error handling behavior directly
const errorMsg = 'Error message';
// Set up a function that has the same error handling behavior as in component
const testErrorHandler = (): void => {
try {
if (typeof Userpilot !== 'undefined' && Userpilot.reload) {
Userpilot.reload();
}
} catch (error) {
console.error('[Userpilot] Error reloading on route change:', error);
}
};
// Make Userpilot.reload throw an error
(Userpilot.reload as jest.Mock).mockImplementation(() => {
throw new Error(errorMsg);
});
// Execute the function that should handle errors
testErrorHandler();
// Verify error was logged
expect(consoleErrorSpy).toHaveBeenCalledWith(
'[Userpilot] Error reloading on route change:',
expect.any(Error),
);
// Restore console mock
consoleErrorSpy.mockRestore();
});
it('does not call Userpilot.reload when same route is rendered again', () => {
const { rerender } = render(
<MemoryRouter>
<UserpilotRouteTracker />
</MemoryRouter>,
);
// Fast-forward initial render timer
act(() => {
jest.advanceTimersByTime(TIMER_DELAY);
});
jest.clearAllMocks();
act(() => {
mockLocation = {
pathname: mockLocation.pathname,
search: mockLocation.search,
hash: mockLocation.hash,
state: mockLocation.state,
};
// Force a component update with the same location
rerender(
<MemoryRouter>
<UserpilotRouteTracker />
</MemoryRouter>,
);
});
// Fast-forward timer
act(() => {
jest.advanceTimersByTime(TIMER_DELAY);
});
// Should not call reload since path and search are the same
expect(Userpilot.reload).not.toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,60 @@
import { useCallback, useEffect, useRef } from 'react';
import { useLocation } from 'react-router-dom';
import { Userpilot } from 'userpilot';
/**
* UserpilotRouteTracker - A component that tracks route changes and calls Userpilot.reload
* on actual page changes (pathname changes or significant query parameter changes).
*
* This component renders nothing and is designed to be placed once high in the component tree.
*/
function UserpilotRouteTracker(): null {
const location = useLocation();
const prevPathRef = useRef<string>(location.pathname);
const prevSearchRef = useRef<string>(location.search);
const isFirstRenderRef = useRef<boolean>(true);
// Function to reload Userpilot safely - using useCallback to avoid dependency issues
const reloadUserpilot = useCallback((): void => {
try {
if (typeof Userpilot !== 'undefined' && Userpilot.reload) {
setTimeout(() => {
Userpilot.reload();
}, 100);
}
} catch (error) {
console.error('[Userpilot] Error reloading on route change:', error);
}
}, []);
// Handle first render
useEffect(() => {
if (isFirstRenderRef.current) {
isFirstRenderRef.current = false;
reloadUserpilot();
}
}, [reloadUserpilot]);
// Handle route/query changes
useEffect(() => {
// Skip first render as it's handled by the effect above
if (isFirstRenderRef.current) {
return;
}
// Check if the path has changed or if significant query params have changed
const pathChanged = location.pathname !== prevPathRef.current;
const searchChanged = location.search !== prevSearchRef.current;
if (pathChanged || searchChanged) {
// Update refs
prevPathRef.current = location.pathname;
prevSearchRef.current = location.search;
reloadUserpilot();
}
}, [location.pathname, location.search, reloadUserpilot]);
return null;
}
export default UserpilotRouteTracker;

View File

@@ -7,7 +7,7 @@ import ErrorIcon from 'assets/Error';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { BookOpenText, ChevronsDown, TriangleAlert } from 'lucide-react';
import KeyValueLabel from 'periscope/components/KeyValueLabel';
import { ReactNode, useMemo } from 'react';
import { ReactNode } from 'react';
import { Warning } from 'types/api';
interface WarningContentProps {
@@ -106,51 +106,19 @@ export function WarningContent({ warning }: WarningContentProps): JSX.Element {
);
}
function PopoverMessage({
message,
}: {
message: string | ReactNode;
}): JSX.Element {
return (
<section className="warning-content">
<section className="warning-content__summary-section">
<header className="warning-content__summary">
<div className="warning-content__summary-left">
<div className="warning-content__summary-text">
<p className="warning-content__warning-message">{message}</p>
</div>
</div>
</header>
</section>
</section>
);
}
interface WarningPopoverProps extends PopoverProps {
children?: ReactNode;
warningData?: Warning;
message?: string | ReactNode;
warningData: Warning;
}
function WarningPopover({
children,
warningData,
message = '',
...popoverProps
}: WarningPopoverProps): JSX.Element {
const content = useMemo(() => {
if (message) {
return <PopoverMessage message={message} />;
}
if (warningData) {
return <WarningContent warning={warningData} />;
}
return null;
}, [message, warningData]);
return (
<Popover
content={content}
content={<WarningContent warning={warningData} />}
overlayStyle={{ padding: 0, maxWidth: '600px' }}
overlayInnerStyle={{ padding: 0 }}
autoAdjustOverflow
@@ -169,8 +137,6 @@ function WarningPopover({
WarningPopover.defaultProps = {
children: undefined,
warningData: null,
message: null,
};
export default WarningPopover;

View File

@@ -12,7 +12,6 @@ function YAxisUnitSelector({
onChange,
placeholder = 'Please select a unit',
loading = false,
'data-testid': dataTestId,
}: YAxisUnitSelectorProps): JSX.Element {
const universalUnit = mapMetricUnitToUniversalUnit(value);
@@ -46,7 +45,6 @@ function YAxisUnitSelector({
placeholder={placeholder}
filterOption={(input, option): boolean => handleSearch(input, option)}
loading={loading}
data-testid={dataTestId}
>
{Y_AXIS_CATEGORIES.map((category) => (
<Select.OptGroup key={category.name} label={category.name}>

View File

@@ -4,7 +4,6 @@ export interface YAxisUnitSelectorProps {
placeholder?: string;
loading?: boolean;
disabled?: boolean;
'data-testid'?: string;
}
export enum UniversalYAxisUnit {

View File

@@ -24,7 +24,6 @@ export const DATE_TIME_FORMATS = {
TIME_SECONDS: 'HH:mm:ss',
TIME_UTC: 'HH:mm:ss (UTC Z)',
TIME_UTC_MS: 'HH:mm:ss.SSS (UTC Z)',
TIME_SPAN_PERCENTILE: 'HH:mm:ss MMM DD',
// Short date formats
DATE_SHORT: 'MM/DD',

View File

@@ -50,5 +50,4 @@ export enum QueryParams {
tab = 'tab',
thresholds = 'thresholds',
selectedExplorerView = 'selectedExplorerView',
variables = 'variables',
}

View File

@@ -86,11 +86,7 @@ export const REACT_QUERY_KEY = {
SPAN_LOGS: 'SPAN_LOGS',
SPAN_BEFORE_LOGS: 'SPAN_BEFORE_LOGS',
SPAN_AFTER_LOGS: 'SPAN_AFTER_LOGS',
TRACE_ONLY_LOGS: 'TRACE_ONLY_LOGS',
// Routing Policies Query Keys
GET_ROUTING_POLICIES: 'GET_ROUTING_POLICIES',
// Span Percentiles Query Keys
GET_SPAN_PERCENTILES: 'GET_SPAN_PERCENTILES',
} as const;

View File

@@ -34,7 +34,7 @@ const themeColors = {
cyan: '#00FFFF',
},
chartcolors: {
radicalRed: '#FF1A66',
robin: '#3F5ECC',
dodgerBlue: '#2F80ED',
mediumOrchid: '#BB6BD9',
seaBuckthorn: '#F2994A',
@@ -58,7 +58,7 @@ const themeColors = {
oliveDrab: '#66991A',
lavenderRose: '#FF99E6',
electricLime: '#CCFF1A',
robin: '#3F5ECC',
radicalRed: '#FF1A66',
harleyOrange: '#E6331A',
turquoise: '#33FFCC',
gladeGreen: '#66994D',
@@ -80,7 +80,7 @@ const themeColors = {
maroon: '#800000',
navy: '#000080',
aquamarine: '#7FFFD4',
darkSeaGreen: '#8FBC8F',
gold: '#FFD700',
gray: '#808080',
skyBlue: '#87CEEB',
indigo: '#4B0082',
@@ -105,7 +105,7 @@ const themeColors = {
lawnGreen: '#7CFC00',
mediumSeaGreen: '#3CB371',
lightCoral: '#F08080',
gold: '#FFD700',
darkSeaGreen: '#8FBC8F',
sandyBrown: '#F4A460',
darkKhaki: '#BDB76B',
cornflowerBlue: '#6495ED',
@@ -113,7 +113,7 @@ const themeColors = {
paleGreen: '#98FB98',
},
lightModeColor: {
radicalRed: '#FF1A66',
robin: '#3F5ECC',
dodgerBlueDark: '#0C6EED',
steelgrey: '#2f4b7c',
steelpurple: '#665191',
@@ -143,7 +143,7 @@ const themeColors = {
oliveDrab: '#66991A',
lavenderRoseDark: '#F024BD',
electricLimeDark: '#84A800',
robin: '#3F5ECC',
radicalRed: '#FF1A66',
harleyOrange: '#E6331A',
gladeGreen: '#66994D',
hemlock: '#66664D',
@@ -181,7 +181,7 @@ const themeColors = {
darkOrchid: '#9932CC',
mediumSeaGreenDark: '#109E50',
lightCoralDark: '#F85959',
gold: '#FFD700',
darkSeaGreenDark: '#509F50',
sandyBrownDark: '#D97117',
darkKhakiDark: '#99900A',
cornflowerBlueDark: '#3371E6',

View File

@@ -3,5 +3,4 @@ export const USER_PREFERENCES = {
NAV_SHORTCUTS: 'nav_shortcuts',
LAST_SEEN_CHANGELOG_VERSION: 'last_seen_changelog_version',
SPAN_DETAILS_PINNED_ATTRIBUTES: 'span_details_pinned_attributes',
SPAN_PERCENTILE_RESOURCE_ATTRIBUTES: 'span_percentile_resource_attributes',
};

View File

@@ -20,13 +20,17 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
const { user } = useAppContext();
const [action] = useComponentPermission(['new_alert_action'], user.role);
const onClickEditHandler = useCallback((id: string) => {
history.push(
generatePath(ROUTES.CHANNELS_EDIT, {
channelId: id,
}),
);
}, []);
const onClickEditHandler = useCallback(
(id: string, event?: React.MouseEvent) => {
history.push(
generatePath(ROUTES.CHANNELS_EDIT, {
channelId: id,
}),
event,
);
},
[],
);
const columns: ColumnsType<Channels> = [
{
@@ -52,7 +56,10 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
width: 80,
render: (id: string): JSX.Element => (
<>
<Button onClick={(): void => onClickEditHandler(id)} type="link">
<Button
onClick={(event: React.MouseEvent): void => onClickEditHandler(id, event)}
type="link"
>
{t('column_channel_edit')}
</Button>
<Delete id={id} notifications={notifications} />

Some files were not shown because too many files have changed in this diff Show More