Compare commits
55 Commits
chore/filt
...
SIG-9314
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
02e9152ee0 | ||
|
|
2b79678e63 | ||
|
|
a4f54baf1f | ||
|
|
4e6c42dd17 | ||
|
|
39bd169b89 | ||
|
|
c7c2d2a7ef | ||
|
|
0cfb809605 | ||
|
|
6a378ed7b4 | ||
|
|
8e41847523 | ||
|
|
779df62093 | ||
|
|
3763794531 | ||
|
|
e9fa68e1f3 | ||
|
|
7bd3e1c453 | ||
|
|
a48455b2b3 | ||
|
|
fbb66f14ba | ||
|
|
54b67d9cfd | ||
|
|
1a193015a7 | ||
|
|
245179cbf7 | ||
|
|
dbb6b333c8 | ||
|
|
56f8e53d88 | ||
|
|
2f4e371dac | ||
|
|
db75ec56bc | ||
|
|
02755a6527 | ||
|
|
9f089e0784 | ||
|
|
fb9a7ad3cd | ||
|
|
ad631d70b6 | ||
|
|
c44efeab33 | ||
|
|
e9743fa7ac | ||
|
|
b7ece08d3e | ||
|
|
e5f4f5cc72 | ||
|
|
4437630127 | ||
|
|
89639b239e | ||
|
|
785ae9f0bd | ||
|
|
8752022cef | ||
|
|
c7e4a9c45d | ||
|
|
bf92c92204 | ||
|
|
bd63633be7 | ||
|
|
1158e1199b | ||
|
|
0a60c49314 | ||
|
|
c25e3beb81 | ||
|
|
c9e0f2b9ca | ||
|
|
6d831849c1 | ||
|
|
83eeb46f99 | ||
|
|
287558dc9d | ||
|
|
83aad793c2 | ||
|
|
3eff689c85 | ||
|
|
f5bcd65e2e | ||
|
|
e7772d93af | ||
|
|
bbf987ebd7 | ||
|
|
105c3a3b8c | ||
|
|
c1a4a5b8db | ||
|
|
c9591f4341 | ||
|
|
fd216fdee1 | ||
|
|
f5bf4293a1 | ||
|
|
155a44a25d |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
1
.github/workflows/build-enterprise.yaml
vendored
1
.github/workflows/build-enterprise.yaml
vendored
@@ -107,7 +107,6 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
|
||||
1
.github/workflows/build-staging.yaml
vendored
1
.github/workflows/build-staging.yaml
vendored
@@ -106,7 +106,6 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
|
||||
2
.github/workflows/integrationci.yaml
vendored
2
.github/workflows/integrationci.yaml
vendored
@@ -17,6 +17,8 @@ jobs:
|
||||
- bootstrap
|
||||
- passwordauthn
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- dashboard
|
||||
- querier
|
||||
- ttl
|
||||
sqlstore-provider:
|
||||
|
||||
12
Makefile
12
Makefile
@@ -84,10 +84,9 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@@ -102,10 +101,9 @@ go-run-community: ## Runs the community go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
@@ -208,4 +206,4 @@ py-lint: ## Run lint for integration tests
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
|
||||
@@ -5,9 +5,12 @@ import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
@@ -76,6 +79,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -31,7 +31,6 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
|
||||
@@ -8,6 +8,8 @@ import (
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
@@ -17,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -105,6 +108,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.99.0
|
||||
image: signoz/signoz:v0.102.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,7 +209,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.11
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +233,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.99.0
|
||||
image: signoz/signoz:v0.102.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.11
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.99.0}
|
||||
image: signoz/signoz:${VERSION:-v0.102.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +213,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +239,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +250,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.99.0}
|
||||
image: signoz/signoz:${VERSION:-v0.102.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -103,9 +103,19 @@ Remember to replace the region and ingestion key with proper values as obtained
|
||||
|
||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||
|
||||
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
|
||||
|
||||
|
||||
```sh
|
||||
ENVOY_PORT=8081 docker compose up -d
|
||||
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
|
||||
```
|
||||
|
||||
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||
```sh
|
||||
docker compose ps -a
|
||||
|
||||
@@ -48,7 +48,26 @@ func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -15,18 +15,18 @@ type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user]
|
||||
define assignee: [user, anonymous]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type resources
|
||||
type metaresources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
|
||||
type resource
|
||||
type metaresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
@@ -35,6 +35,6 @@ type resource
|
||||
define block: [user, role#assignee]
|
||||
|
||||
|
||||
type telemetry
|
||||
type telemetryresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define read: [user, role#assignee]
|
||||
|
||||
@@ -20,6 +20,10 @@ import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -99,6 +103,39 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
||||
|
||||
// dashboards
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.CreatePublic)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.GetPublic)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.UpdatePublic)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.DeletePublic)).Methods(http.MethodDelete)
|
||||
|
||||
// public access for dashboards
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicData,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}/widgets/{index}/query_range", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicWidgetQueryRange,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -77,7 +76,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
@@ -186,48 +185,37 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
return cloudIntegrationUser, nil
|
||||
}
|
||||
|
||||
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, string, *basemodel.ApiError,
|
||||
) {
|
||||
url := fmt.Sprintf(
|
||||
"%s%s",
|
||||
strings.TrimSuffix(constants.ZeusURL, "/"),
|
||||
"/v2/deployments/me",
|
||||
)
|
||||
|
||||
// TODO: remove this struct from here
|
||||
type deploymentResponse struct {
|
||||
Status string `json:"status"`
|
||||
Error string `json:"error"`
|
||||
Data struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
} `json:"data"`
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
resp, apiErr := requestAndParseResponse[deploymentResponse](
|
||||
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
return "", "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't query for deployment info",
|
||||
)
|
||||
}
|
||||
|
||||
if resp.Status != "success" {
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: status: %s, error: %s",
|
||||
resp.Status, resp.Error,
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.Data.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Data.Name
|
||||
resp := new(deploymentResponse)
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Name
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
|
||||
@@ -192,7 +192,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
||||
|
||||
r.Use(otelmux.Middleware(
|
||||
"apiserver",
|
||||
|
||||
@@ -10,9 +10,6 @@ var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
|
||||
// this is set via build time variable
|
||||
var ZeusURL = "https://api.signoz.cloud"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
if len(v) == 0 {
|
||||
|
||||
@@ -30,6 +30,8 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
|
||||
return sqlschema.DataTypeBoolean
|
||||
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
|
||||
return sqlschema.DataTypeText
|
||||
case "BYTEA":
|
||||
return sqlschema.DataTypeBytea
|
||||
}
|
||||
|
||||
return formatter.Formatter.DataTypeOf(dataType)
|
||||
|
||||
@@ -34,7 +34,8 @@ func (f *formatter) JSONType(column, path string) []byte {
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = 'array'"...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
@@ -42,9 +43,7 @@ func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byt
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
}
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
@@ -55,26 +54,22 @@ func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []by
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements_text("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
}
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + "::text")
|
||||
return sql, append([]byte(alias), "::text"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
}
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + ".key")
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
@@ -86,18 +81,13 @@ func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
if len(values) == 0 {
|
||||
return []byte("jsonb_build_array()")
|
||||
}
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_build_array("...)
|
||||
for i, v := range values {
|
||||
if i > 0 {
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = append(sql, '\'')
|
||||
sql = append(sql, v...)
|
||||
sql = append(sql, '\'')
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
@@ -110,42 +100,48 @@ func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) string {
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
|
||||
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) string {
|
||||
path := strings.TrimPrefix(jsonPath, "$")
|
||||
if path == "" || path == "." {
|
||||
return ""
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
|
||||
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
|
||||
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := strings.Split(strings.TrimPrefix(path, "."), ".")
|
||||
if len(parts) == 0 {
|
||||
return ""
|
||||
}
|
||||
parts := strings.Split(path, ".")
|
||||
|
||||
var result strings.Builder
|
||||
|
||||
for i, part := range parts {
|
||||
if i < len(parts)-1 {
|
||||
result.WriteString("->")
|
||||
result.WriteString("'")
|
||||
result.WriteString(part)
|
||||
result.WriteString("'")
|
||||
} else {
|
||||
if asText {
|
||||
result.WriteString("->>")
|
||||
} else {
|
||||
result.WriteString("->")
|
||||
}
|
||||
result.WriteString("'")
|
||||
result.WriteString(part)
|
||||
result.WriteString("'")
|
||||
var validParts []string
|
||||
for _, part := range parts {
|
||||
if part != "" {
|
||||
validParts = append(validParts, part)
|
||||
}
|
||||
}
|
||||
|
||||
return result.String()
|
||||
if len(validParts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []byte
|
||||
|
||||
for idx, part := range validParts {
|
||||
if idx == len(validParts)-1 {
|
||||
if asText {
|
||||
result = append(result, "->>"...)
|
||||
} else {
|
||||
result = append(result, "->"...)
|
||||
}
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
return result
|
||||
}
|
||||
|
||||
result = append(result, "->"...)
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
|
||||
@@ -2,7 +2,7 @@ package postgressqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
@@ -80,6 +80,12 @@ func TestJSONType(t *testing.T) {
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col")`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data")`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
@@ -116,6 +122,12 @@ func TestJSONIsArray(t *testing.T) {
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col") = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data") = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
@@ -377,7 +389,7 @@ func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New()).(*formatter)
|
||||
got := f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText)
|
||||
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package postgressqlstore
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -60,7 +61,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks),
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
|
||||
@@ -280,6 +280,7 @@
|
||||
"got": "11.8.5",
|
||||
"form-data": "4.0.4",
|
||||
"brace-expansion": "^2.0.2",
|
||||
"on-headers": "^1.1.0"
|
||||
"on-headers": "^1.1.0",
|
||||
"tmp": "0.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -274,7 +274,7 @@ function App(): JSX.Element {
|
||||
chat_settings: {
|
||||
app_id: process.env.PYLON_APP_ID,
|
||||
email: user.email,
|
||||
name: user.displayName,
|
||||
name: user.displayName || user.email,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance as axios } from 'api';
|
||||
import { LogEventAxiosInstance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
|
||||
import { getFieldKeys } from '../getFieldKeys';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
get: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('getFieldKeys API', () => {
|
||||
@@ -31,33 +29,33 @@ describe('getFieldKeys API', () => {
|
||||
|
||||
it('should call API with correct parameters when no args provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with no parameters
|
||||
await getFieldKeys();
|
||||
|
||||
// Verify API was called correctly with empty params object
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with signal parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with signal parameter
|
||||
await getFieldKeys('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with name parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -72,14 +70,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys(undefined, 'service');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with both signal and name when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -94,14 +92,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys('logs', 'service');
|
||||
|
||||
// Verify API was called with both parameters
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'logs', name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should return properly formatted response', async () => {
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldKeys('traces');
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
|
||||
import { getFieldValues } from '../getFieldValues';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
get: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('getFieldValues API', () => {
|
||||
@@ -17,7 +15,7 @@ describe('getFieldValues API', () => {
|
||||
|
||||
it('should call the API with correct parameters (no options)', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -34,14 +32,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues();
|
||||
|
||||
// Verify API was called correctly with empty params
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with signal parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -58,14 +56,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with name parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -82,14 +80,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with value parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -106,14 +104,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name', 'front');
|
||||
|
||||
// Verify API was called with value parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name', searchText: 'front' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with time range parameters', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -138,7 +136,7 @@ describe('getFieldValues API', () => {
|
||||
);
|
||||
|
||||
// Verify API was called with time range parameters (converted to milliseconds)
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {
|
||||
signal: 'logs',
|
||||
name: 'service.name',
|
||||
@@ -165,7 +163,7 @@ describe('getFieldValues API', () => {
|
||||
},
|
||||
};
|
||||
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'mixed.values');
|
||||
@@ -196,7 +194,7 @@ describe('getFieldValues API', () => {
|
||||
};
|
||||
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'service.name');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -24,7 +24,7 @@ export const getFieldKeys = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/keys', { params });
|
||||
const response = await axios.get('/fields/keys', { params });
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -47,7 +47,7 @@ export const getFieldValues = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/values', { params });
|
||||
const response = await axios.get('/fields/values', { params });
|
||||
|
||||
// Normalize values from different types (stringValues, boolValues, etc.)
|
||||
if (response.data?.data?.values) {
|
||||
|
||||
@@ -86,8 +86,9 @@ const interceptorRejected = async (
|
||||
|
||||
if (
|
||||
response.status === 401 &&
|
||||
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
response.config.url !== '/sessions/rotate' &&
|
||||
response.config.url !== '/sessions/email_password' &&
|
||||
!(
|
||||
response.config.url === '/sessions' && response.config.method === 'delete'
|
||||
)
|
||||
@@ -199,15 +200,15 @@ ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios Base
|
||||
export const ApiBaseInstance = axios.create({
|
||||
export const LogEventAxiosInstance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
});
|
||||
|
||||
ApiBaseInstance.interceptors.response.use(
|
||||
LogEventAxiosInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejectedBase,
|
||||
);
|
||||
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
||||
@@ -17,7 +17,7 @@ export const getHostAttributeKeys = async (
|
||||
try {
|
||||
const response: AxiosResponse<{
|
||||
data: IQueryAutocompleteResponse;
|
||||
}> = await ApiBaseInstance.get(
|
||||
}> = await axios.get(
|
||||
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
|
||||
{
|
||||
params: {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
@@ -20,7 +20,7 @@ const getOnboardingStatus = async (props: {
|
||||
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
||||
const { endpointService, ...rest } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
||||
rest,
|
||||
);
|
||||
|
||||
@@ -1,13 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getService';
|
||||
|
||||
const getService = async (props: Props): Promise<PayloadProps> => {
|
||||
const response = await axios.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data;
|
||||
try {
|
||||
const response = await ApiV2Instance.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getService;
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
import axios from 'api';
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
|
||||
|
||||
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
try {
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
|
||||
const response = await axios.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
const response = await ApiV2Instance.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
limit: 5000,
|
||||
});
|
||||
|
||||
if (props.isEntryPoint) {
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getTopOperations;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
@@ -9,7 +9,7 @@ const getCustomFilters = async (
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
const { signal } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
|
||||
const response = await axios.get(`/orgs/me/filters/${signal}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
|
||||
@@ -6,7 +6,7 @@ import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFil
|
||||
const updateCustomFiltersAPI = async (
|
||||
props: UpdateCustomFiltersProps,
|
||||
): Promise<SuccessResponse<void> | AxiosError> =>
|
||||
ApiBaseInstance.put(`orgs/me/filters`, {
|
||||
axios.put(`/orgs/me/filters`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -9,15 +9,12 @@ const listOverview = async (
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
const { start, end, show_ip: showIp, filter } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
`/third-party-apis/overview/list`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
},
|
||||
);
|
||||
const response = await axios.post(`/third-party-apis/overview/list`, {
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
28
frontend/src/api/trace/getSpanPercentiles.ts
Normal file
28
frontend/src/api/trace/getSpanPercentiles.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetSpanPercentilesProps,
|
||||
GetSpanPercentilesResponseDataProps,
|
||||
} from 'types/api/trace/getSpanPercentiles';
|
||||
|
||||
const getSpanPercentiles = async (
|
||||
props: GetSpanPercentilesProps,
|
||||
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/span_percentile', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export default getSpanPercentiles;
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
export const getQueryRangeV5 = async (
|
||||
props: QueryRangePayloadV5,
|
||||
version: string,
|
||||
signal: AbortSignal,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
|
||||
try {
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
|
||||
border-radius: 2px 0px 0px 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
border-right: none;
|
||||
border-left: none;
|
||||
@@ -45,6 +44,12 @@
|
||||
border-bottom-right-radius: 0px;
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
font-size: 12px !important;
|
||||
line-height: 27px;
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
|
||||
@@ -132,9 +132,9 @@
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.json-action-btn {
|
||||
.log-detail-drawer__actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
gap: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -319,31 +319,35 @@ function LogDetailInner({
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
{selectedView === VIEW_TYPES.JSON && (
|
||||
<div className="json-action-btn">
|
||||
<div className="log-detail-drawer__actions">
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Tooltip
|
||||
title="Show Filters"
|
||||
placement="topLeft"
|
||||
aria-label="Show Filters"
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<Tooltip
|
||||
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
|
||||
placement="topLeft"
|
||||
aria-label={
|
||||
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={handleJSONCopy}
|
||||
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Tooltip title="Copy Log Link" placement="left" aria-label="Copy Log Link">
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={onLogCopy}
|
||||
/>
|
||||
</Tooltip>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
@@ -383,7 +387,8 @@ function LogDetailInner({
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
logLineTimestamp={log.timestamp.toString()}
|
||||
timestamp={log.timestamp.toString()}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
)}
|
||||
</Drawer>
|
||||
|
||||
@@ -6,6 +6,7 @@ import { useCopyToClipboard } from 'react-use';
|
||||
function CopyClipboardHOC({
|
||||
entityKey,
|
||||
textToCopy,
|
||||
tooltipText = 'Copy to clipboard',
|
||||
children,
|
||||
}: CopyClipboardHOCProps): JSX.Element {
|
||||
const [value, setCopy] = useCopyToClipboard();
|
||||
@@ -31,7 +32,7 @@ function CopyClipboardHOC({
|
||||
<span onClick={onClick} role="presentation" tabIndex={-1}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<span style={{ fontSize: '0.9rem' }}>Copy to clipboard</span>}
|
||||
content={<span style={{ fontSize: '0.9rem' }}>{tooltipText}</span>}
|
||||
>
|
||||
{children}
|
||||
</Popover>
|
||||
@@ -42,7 +43,11 @@ function CopyClipboardHOC({
|
||||
interface CopyClipboardHOCProps {
|
||||
entityKey: string | undefined;
|
||||
textToCopy: string;
|
||||
tooltipText?: string;
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export default CopyClipboardHOC;
|
||||
CopyClipboardHOC.defaultProps = {
|
||||
tooltipText: 'Copy to clipboard',
|
||||
};
|
||||
|
||||
@@ -251,6 +251,10 @@
|
||||
.ant-input-group-addon {
|
||||
border-top-left-radius: 0px !important;
|
||||
border-top-right-radius: 0px !important;
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 12px;
|
||||
font-weight: 300;
|
||||
}
|
||||
|
||||
.ant-input {
|
||||
@@ -398,7 +402,7 @@
|
||||
}
|
||||
|
||||
.qb-search-container {
|
||||
.metrics-select-container {
|
||||
.metrics-container {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,8 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
showOnlyWhereClause = false,
|
||||
showTraceOperator = false,
|
||||
version,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: QueryBuilderProps): JSX.Element {
|
||||
const {
|
||||
currentQuery,
|
||||
@@ -175,6 +177,9 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
queriesCount={1}
|
||||
/>
|
||||
) : (
|
||||
currentQuery.builder.queryData.map((query, index) => (
|
||||
@@ -193,7 +198,10 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
signalSource={config?.signalSource || ''}
|
||||
signalSource={query.source as 'meter' | ''}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
queriesCount={currentQuery.builder.queryData.length}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
|
||||
@@ -98,6 +98,13 @@
|
||||
border-radius: 2px;
|
||||
border: 1.005px solid var(--Slate-400, #1d212d);
|
||||
background: var(--Ink-300, #16181d);
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Geist Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.input-with-label {
|
||||
|
||||
@@ -1,5 +1,23 @@
|
||||
.metrics-select-container {
|
||||
.metrics-source-select-container {
|
||||
margin-bottom: 8px;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
|
||||
.ant-select-selection-search-input {
|
||||
font-size: 12px !important;
|
||||
line-height: 27px;
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.source-selector {
|
||||
width: 120px;
|
||||
}
|
||||
|
||||
.ant-select-selector {
|
||||
width: 100%;
|
||||
@@ -13,6 +31,11 @@
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
min-height: 36px;
|
||||
|
||||
.ant-select-selection-placeholder {
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-dropdown {
|
||||
@@ -42,7 +65,7 @@
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.metrics-select-container {
|
||||
.metrics-source-select-container {
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
@@ -1,21 +1,39 @@
|
||||
import './MetricsSelect.styles.scss';
|
||||
|
||||
import { Select } from 'antd';
|
||||
import {
|
||||
initialQueriesMap,
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { AggregatorFilter } from 'container/QueryBuilder/filters';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { memo, useCallback, useState } from 'react';
|
||||
import { memo, useCallback, useMemo, useState } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { SelectOption } from 'types/common/select';
|
||||
|
||||
export const SOURCE_OPTIONS: SelectOption<string, string>[] = [
|
||||
{ value: 'metrics', label: 'Metrics' },
|
||||
{ value: 'meter', label: 'Meter' },
|
||||
];
|
||||
|
||||
export const MetricsSelect = memo(function MetricsSelect({
|
||||
query,
|
||||
index,
|
||||
version,
|
||||
signalSource,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
index: number;
|
||||
version: string;
|
||||
signalSource: 'meter' | '';
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
}): JSX.Element {
|
||||
const [attributeKeys, setAttributeKeys] = useState<BaseAutocompleteData[]>([]);
|
||||
|
||||
@@ -31,8 +49,67 @@ export const MetricsSelect = memo(function MetricsSelect({
|
||||
},
|
||||
[handleChangeAggregatorAttribute, attributeKeys],
|
||||
);
|
||||
|
||||
const { updateAllQueriesOperators, handleSetQueryData } = useQueryBuilder();
|
||||
|
||||
const source = useMemo(
|
||||
() => (signalSource === 'meter' ? 'meter' : 'metrics'),
|
||||
[signalSource],
|
||||
);
|
||||
|
||||
const defaultMeterQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'meter' as 'meter' | '',
|
||||
),
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const defaultMetricsQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueriesMap.metrics,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'',
|
||||
),
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const handleSignalSourceChange = (value: string): void => {
|
||||
onSignalSourceChange(value);
|
||||
handleSetQueryData(
|
||||
index,
|
||||
value === 'meter'
|
||||
? {
|
||||
...defaultMeterQuery.builder.queryData[0],
|
||||
source: 'meter',
|
||||
queryName: query.queryName,
|
||||
}
|
||||
: {
|
||||
...defaultMetricsQuery.builder.queryData[0],
|
||||
source: '',
|
||||
queryName: query.queryName,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="metrics-select-container">
|
||||
<div className="metrics-source-select-container">
|
||||
{signalSourceChangeEnabled && (
|
||||
<Select
|
||||
className="source-selector"
|
||||
placeholder="Source"
|
||||
options={SOURCE_OPTIONS}
|
||||
value={source}
|
||||
defaultValue="metrics"
|
||||
onChange={handleSignalSourceChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
<AggregatorFilter
|
||||
onChange={handleAggregatorAttributeChange}
|
||||
query={query}
|
||||
|
||||
@@ -236,6 +236,10 @@
|
||||
background: var(--bg-ink-100) !important;
|
||||
opacity: 0.5 !important;
|
||||
}
|
||||
|
||||
.cm-activeLine > span {
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,6 +275,9 @@
|
||||
|
||||
box-sizing: border-box;
|
||||
position: relative;
|
||||
.cm-placeholder {
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
border-radius: 2px;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
font-size: 12px;
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
|
||||
&.error {
|
||||
.cm-editor {
|
||||
@@ -231,6 +233,9 @@
|
||||
.query-aggregation-interval-input {
|
||||
input {
|
||||
max-width: 120px;
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
.add-trace-operator-button,
|
||||
.add-new-query-button,
|
||||
.add-formula-button {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
import './QueryFooter.styles.scss';
|
||||
|
||||
/* eslint-disable react/require-default-props */
|
||||
import { Button, Tooltip, Typography } from 'antd';
|
||||
import { DraftingCompass, Plus, Sigma } from 'lucide-react';
|
||||
@@ -22,8 +24,7 @@ export default function QueryFooter({
|
||||
<div className="qb-add-new-query">
|
||||
<Tooltip title={<div style={{ textAlign: 'center' }}>Add New Query</div>}>
|
||||
<Button
|
||||
className="add-new-query-button periscope-btn secondary"
|
||||
type="text"
|
||||
className="add-new-query-button periscope-btn "
|
||||
icon={<Plus size={16} />}
|
||||
onClick={addNewBuilderQuery}
|
||||
/>
|
||||
@@ -49,7 +50,7 @@ export default function QueryFooter({
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-formula-button periscope-btn secondary"
|
||||
className="add-formula-button periscope-btn "
|
||||
icon={<Sigma size={16} />}
|
||||
onClick={addNewFormula}
|
||||
>
|
||||
@@ -77,7 +78,7 @@ export default function QueryFooter({
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-trace-operator-button periscope-btn secondary"
|
||||
className="add-trace-operator-button periscope-btn "
|
||||
icon={<DraftingCompass size={16} />}
|
||||
onClick={(): void => addTraceOperator?.()}
|
||||
>
|
||||
|
||||
@@ -33,7 +33,15 @@ export const QueryV2 = memo(function QueryV2({
|
||||
showOnlyWhereClause = false,
|
||||
signalSource = '',
|
||||
isMultiQueryAllowed = false,
|
||||
}: QueryProps & { ref: React.RefObject<HTMLDivElement> }): JSX.Element {
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
queriesCount = 1,
|
||||
}: QueryProps & {
|
||||
ref: React.RefObject<HTMLDivElement>;
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
queriesCount: number;
|
||||
}): JSX.Element {
|
||||
const { cloneQuery, panelType } = useQueryBuilder();
|
||||
|
||||
const showFunctions = query?.functions?.length > 0;
|
||||
@@ -186,12 +194,16 @@ export const QueryV2 = memo(function QueryV2({
|
||||
icon: <Copy size={14} />,
|
||||
onClick: handleCloneEntity,
|
||||
},
|
||||
{
|
||||
label: 'Delete',
|
||||
key: 'delete-query',
|
||||
icon: <Trash size={14} />,
|
||||
onClick: handleDeleteQuery,
|
||||
},
|
||||
...(queriesCount && queriesCount > 1
|
||||
? [
|
||||
{
|
||||
label: 'Delete',
|
||||
key: 'delete-query',
|
||||
icon: <Trash size={14} />,
|
||||
onClick: handleDeleteQuery,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
],
|
||||
}}
|
||||
placement="bottomRight"
|
||||
@@ -207,12 +219,14 @@ export const QueryV2 = memo(function QueryV2({
|
||||
<div className="qb-elements-container">
|
||||
<div className="qb-search-container">
|
||||
{dataSource === DataSource.METRICS && (
|
||||
<div className="metrics-select-container">
|
||||
<div className="metrics-container">
|
||||
<MetricsSelect
|
||||
query={query}
|
||||
index={index}
|
||||
version={ENTITY_VERSION_V5}
|
||||
signalSource={signalSource as 'meter' | ''}
|
||||
onSignalSourceChange={onSignalSourceChange}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -258,7 +272,7 @@ export const QueryV2 = memo(function QueryV2({
|
||||
panelType={panelType}
|
||||
query={query}
|
||||
index={index}
|
||||
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}`}
|
||||
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}-${signalSource}`}
|
||||
version="v4"
|
||||
signalSource={signalSource as 'meter' | ''}
|
||||
/>
|
||||
|
||||
@@ -92,6 +92,9 @@
|
||||
|
||||
.qb-trace-operator-editor-container {
|
||||
flex: 1;
|
||||
.cm-activeLine > span {
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
&.arrow-left {
|
||||
@@ -113,6 +116,8 @@
|
||||
text-overflow: ellipsis;
|
||||
padding: 0px 8px;
|
||||
border-right: 1px solid var(--bg-slate-400);
|
||||
font-size: 12px;
|
||||
font-weight: 300;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ export default function TraceOperator({
|
||||
!isListViewPanel && 'qb-trace-operator-arrow',
|
||||
)}
|
||||
>
|
||||
<Typography.Text className="label">TRACE OPERATOR</Typography.Text>
|
||||
<Typography.Text className="label">Trace Operator</Typography.Text>
|
||||
<div className="qb-trace-operator-editor-container">
|
||||
<TraceOperatorEditor
|
||||
value={traceOperator?.expression || ''}
|
||||
|
||||
@@ -224,7 +224,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
||||
const visitedPairs: Set<string> = new Set(); // Set to track visited query pairs
|
||||
|
||||
// Map extracted query pairs to key-specific pair information for faster access
|
||||
let queryPairsMap = getQueryPairsMap(existingQuery.trim());
|
||||
let queryPairsMap = getQueryPairsMap(existingQuery);
|
||||
|
||||
filters?.items?.forEach((filter) => {
|
||||
const { key, op, value } = filter;
|
||||
@@ -309,7 +309,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
||||
)}${OPERATORS.IN} ${formattedValue} ${modifiedQuery.slice(
|
||||
notInPair.position.valueEnd + 1,
|
||||
)}`;
|
||||
queryPairsMap = getQueryPairsMap(modifiedQuery.trim());
|
||||
queryPairsMap = getQueryPairsMap(modifiedQuery);
|
||||
}
|
||||
shouldAddToNonExisting = false; // Don't add this to non-existing filters
|
||||
} else if (
|
||||
|
||||
@@ -24,6 +24,7 @@ export const DATE_TIME_FORMATS = {
|
||||
TIME_SECONDS: 'HH:mm:ss',
|
||||
TIME_UTC: 'HH:mm:ss (UTC Z)',
|
||||
TIME_UTC_MS: 'HH:mm:ss.SSS (UTC Z)',
|
||||
TIME_SPAN_PERCENTILE: 'HH:mm:ss MMM DD',
|
||||
|
||||
// Short date formats
|
||||
DATE_SHORT: 'MM/DD',
|
||||
|
||||
@@ -68,8 +68,8 @@ export const metricQueryFunctionOptions: SelectOption<string, string>[] = [
|
||||
label: 'Time Shift',
|
||||
},
|
||||
{
|
||||
value: QueryFunctionsTypes.TIME_SHIFT,
|
||||
label: 'Time Shift',
|
||||
value: QueryFunctionsTypes.FILL_ZERO,
|
||||
label: 'Fill Zero',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -156,4 +156,7 @@ export const queryFunctionsTypesConfig: QueryFunctionConfigType = {
|
||||
showInput: true,
|
||||
inputType: 'text',
|
||||
},
|
||||
fillZero: {
|
||||
showInput: false,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -90,4 +90,7 @@ export const REACT_QUERY_KEY = {
|
||||
|
||||
// Routing Policies Query Keys
|
||||
GET_ROUTING_POLICIES: 'GET_ROUTING_POLICIES',
|
||||
|
||||
// Span Percentiles Query Keys
|
||||
GET_SPAN_PERCENTILES: 'GET_SPAN_PERCENTILES',
|
||||
} as const;
|
||||
|
||||
@@ -34,7 +34,7 @@ const themeColors = {
|
||||
cyan: '#00FFFF',
|
||||
},
|
||||
chartcolors: {
|
||||
robin: '#3F5ECC',
|
||||
radicalRed: '#FF1A66',
|
||||
dodgerBlue: '#2F80ED',
|
||||
mediumOrchid: '#BB6BD9',
|
||||
seaBuckthorn: '#F2994A',
|
||||
@@ -58,7 +58,7 @@ const themeColors = {
|
||||
oliveDrab: '#66991A',
|
||||
lavenderRose: '#FF99E6',
|
||||
electricLime: '#CCFF1A',
|
||||
radicalRed: '#FF1A66',
|
||||
robin: '#3F5ECC',
|
||||
harleyOrange: '#E6331A',
|
||||
turquoise: '#33FFCC',
|
||||
gladeGreen: '#66994D',
|
||||
@@ -80,7 +80,7 @@ const themeColors = {
|
||||
maroon: '#800000',
|
||||
navy: '#000080',
|
||||
aquamarine: '#7FFFD4',
|
||||
gold: '#FFD700',
|
||||
darkSeaGreen: '#8FBC8F',
|
||||
gray: '#808080',
|
||||
skyBlue: '#87CEEB',
|
||||
indigo: '#4B0082',
|
||||
@@ -105,7 +105,7 @@ const themeColors = {
|
||||
lawnGreen: '#7CFC00',
|
||||
mediumSeaGreen: '#3CB371',
|
||||
lightCoral: '#F08080',
|
||||
darkSeaGreen: '#8FBC8F',
|
||||
gold: '#FFD700',
|
||||
sandyBrown: '#F4A460',
|
||||
darkKhaki: '#BDB76B',
|
||||
cornflowerBlue: '#6495ED',
|
||||
@@ -113,7 +113,7 @@ const themeColors = {
|
||||
paleGreen: '#98FB98',
|
||||
},
|
||||
lightModeColor: {
|
||||
robin: '#3F5ECC',
|
||||
radicalRed: '#FF1A66',
|
||||
dodgerBlueDark: '#0C6EED',
|
||||
steelgrey: '#2f4b7c',
|
||||
steelpurple: '#665191',
|
||||
@@ -143,7 +143,7 @@ const themeColors = {
|
||||
oliveDrab: '#66991A',
|
||||
lavenderRoseDark: '#F024BD',
|
||||
electricLimeDark: '#84A800',
|
||||
radicalRed: '#FF1A66',
|
||||
robin: '#3F5ECC',
|
||||
harleyOrange: '#E6331A',
|
||||
gladeGreen: '#66994D',
|
||||
hemlock: '#66664D',
|
||||
@@ -181,7 +181,7 @@ const themeColors = {
|
||||
darkOrchid: '#9932CC',
|
||||
mediumSeaGreenDark: '#109E50',
|
||||
lightCoralDark: '#F85959',
|
||||
darkSeaGreenDark: '#509F50',
|
||||
gold: '#FFD700',
|
||||
sandyBrownDark: '#D97117',
|
||||
darkKhakiDark: '#99900A',
|
||||
cornflowerBlueDark: '#3371E6',
|
||||
|
||||
@@ -3,4 +3,5 @@ export const USER_PREFERENCES = {
|
||||
NAV_SHORTCUTS: 'nav_shortcuts',
|
||||
LAST_SEEN_CHANGELOG_VERSION: 'last_seen_changelog_version',
|
||||
SPAN_DETAILS_PINNED_ATTRIBUTES: 'span_details_pinned_attributes',
|
||||
SPAN_PERCENTILE_RESOURCE_ATTRIBUTES: 'span_percentile_resource_attributes',
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Select } from 'antd';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
getAllEndpointsWidgetData,
|
||||
@@ -264,6 +265,7 @@ function AllEndPoints({
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customOnRowClick={onRowClick}
|
||||
version={ENTITY_VERSION_V5}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useApiMonitoringParams } from 'container/ApiMonitoring/queryParams';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
@@ -178,18 +179,33 @@ function EndPointDetails({
|
||||
[domainName, filters, minTime, maxTime],
|
||||
);
|
||||
|
||||
const V5_QUERIES = [
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_METRICS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DEPENDENT_SERVICES_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
||||
] as const;
|
||||
|
||||
const endPointDetailsDataQueries = useQueries(
|
||||
endPointDetailsQueryPayload.map((payload, index) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
filters?.items, // Include filters.items in queryKey for better caching
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
endPointDetailsQueryPayload.map((payload, index) => {
|
||||
const queryKey = END_POINT_DETAILS_QUERY_KEYS_ARRAY[index];
|
||||
const version = (V5_QUERIES as readonly string[]).includes(queryKey)
|
||||
? ENTITY_VERSION_V5
|
||||
: ENTITY_VERSION_V4;
|
||||
return {
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
...(filters?.items?.length ? filters.items : []), // Include filters.items in queryKey for better caching
|
||||
version,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, version),
|
||||
enabled: !!payload,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
const [
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Switch, Table, Tooltip, Typography } from 'antd';
|
||||
import { getQueryRangeV5 } from 'api/v5/queryRange/getQueryRange';
|
||||
import { MetricRangePayloadV5, ScalarData } from 'api/v5/v5';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { withErrorBoundary } from 'components/ErrorBoundaryHOC';
|
||||
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
@@ -11,13 +13,12 @@ import {
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { Info } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { QueryFunctionContext, useQueries, useQuery } from 'react-query';
|
||||
import { SuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -46,7 +47,7 @@ function TopErrors({
|
||||
true,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
const queryPayload = useMemo(
|
||||
() =>
|
||||
getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
@@ -55,6 +56,10 @@ function TopErrors({
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
||||
...(initialFilters?.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
) || []),
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
@@ -65,7 +70,6 @@ function TopErrors({
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...(initialFilters?.items || []),
|
||||
]
|
||||
: [...(initialFilters?.items || [])],
|
||||
op: 'AND',
|
||||
@@ -82,37 +86,34 @@ function TopErrors({
|
||||
],
|
||||
);
|
||||
|
||||
const topErrorsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
payload,
|
||||
DEFAULT_ENTITY_VERSION,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, DEFAULT_ENTITY_VERSION),
|
||||
enabled: !!payload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
})),
|
||||
);
|
||||
|
||||
const topErrorsDataQuery = topErrorsDataQueries[0];
|
||||
const {
|
||||
data: topErrorsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = topErrorsDataQuery;
|
||||
} = useQuery({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
queryPayload,
|
||||
ENTITY_VERSION_V5,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: ({
|
||||
signal,
|
||||
}: QueryFunctionContext): Promise<SuccessResponseV2<MetricRangePayloadV5>> =>
|
||||
getQueryRangeV5(queryPayload, ENTITY_VERSION_V5, signal),
|
||||
enabled: !!queryPayload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
});
|
||||
|
||||
const topErrorsColumnsConfig = useMemo(() => getTopErrorsColumnsConfig(), []);
|
||||
|
||||
const formattedTopErrorsData = useMemo(
|
||||
() =>
|
||||
formatTopErrorsDataForTable(
|
||||
topErrorsData?.payload?.data?.result as TopErrorsResponseRow[],
|
||||
topErrorsData?.data?.data?.data?.results[0] as ScalarData,
|
||||
),
|
||||
[topErrorsData],
|
||||
);
|
||||
@@ -130,12 +131,12 @@ function TopErrors({
|
||||
const endPointDropDownDataQueries = useQueries(
|
||||
endPointDropDownQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[4],
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[2],
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
ENTITY_VERSION_V5,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000,
|
||||
})),
|
||||
|
||||
@@ -0,0 +1,337 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { TraceAggregation } from 'api/v5/v5';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import DomainMetrics from './DomainMetrics';
|
||||
|
||||
// Mock the API call
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
GetMetricQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock ErrorState component
|
||||
jest.mock('./ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(({ refetch }) => (
|
||||
<div data-testid="error-state">
|
||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const mockProps = {
|
||||
domainName: '0.0.0.0',
|
||||
timeRange: {
|
||||
startTime: 1758259531000,
|
||||
endTime: 1758261331000,
|
||||
},
|
||||
domainListFilters: {
|
||||
items: [],
|
||||
op: 'AND' as const,
|
||||
} as IBuilderQuery['filters'],
|
||||
};
|
||||
|
||||
const mockSuccessResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: '150',
|
||||
B: '125000000',
|
||||
D: '2021-01-01T23:00:00Z',
|
||||
F1: '5.5',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
const renderComponent = (props = mockProps): ReturnType<typeof render> =>
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<DomainMetrics {...props} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
describe('1. V5 Query Payload with Filters', () => {
|
||||
it('sends correct V5 payload structure with domain name filters', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(GetMetricQueryRange).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
const [payload, version] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
||||
|
||||
// Verify it's using V5
|
||||
expect(version).toBe(ENTITY_VERSION_V5);
|
||||
|
||||
// Verify time range
|
||||
expect(payload.start).toBe(1758259531000);
|
||||
expect(payload.end).toBe(1758261331000);
|
||||
|
||||
// Verify V3 payload structure (getDomainMetricsQueryPayload returns V3 format)
|
||||
expect(payload.query).toBeDefined();
|
||||
expect(payload.query.builder).toBeDefined();
|
||||
expect(payload.query.builder.queryData).toBeDefined();
|
||||
|
||||
const queryData = payload.query.builder.queryData;
|
||||
|
||||
// Verify Query A - count with URL filter
|
||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
||||
expect(queryA).toBeDefined();
|
||||
expect(queryA.dataSource).toBe('traces');
|
||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain(
|
||||
'url.full EXISTS OR http.url EXISTS',
|
||||
);
|
||||
|
||||
// Verify Query B - p99 latency
|
||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
||||
expect(queryB).toBeDefined();
|
||||
expect(queryB.aggregateOperator).toBe('p99');
|
||||
expect(queryB.aggregations?.[0]).toBeDefined();
|
||||
expect((queryB.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
);
|
||||
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
|
||||
// Verify Query D - max timestamp
|
||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
||||
expect(queryD).toBeDefined();
|
||||
expect(queryD.aggregateOperator).toBe('max');
|
||||
expect(queryD.aggregations?.[0]).toBeDefined();
|
||||
expect((queryD.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'max(timestamp)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = payload.query.builder.queryFormulas;
|
||||
expect(formulas).toBeDefined();
|
||||
expect(formulas.length).toBeGreaterThan(0);
|
||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
||||
expect(formulaF1).toBeDefined();
|
||||
expect(formulaF1.expression).toBe('(C/A)*100');
|
||||
});
|
||||
|
||||
it('includes custom filters in filter expressions', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'my-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
renderComponent({
|
||||
...mockProps,
|
||||
domainListFilters: customFilters,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(GetMetricQueryRange).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
const [payload] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
||||
const queryData = payload.query.builder.queryData;
|
||||
|
||||
// Verify all queries include the custom filters
|
||||
queryData.forEach((query: any) => {
|
||||
if (query.filter && query.filter.expression) {
|
||||
expect(query.filter.expression).toContain('service.name');
|
||||
expect(query.filter.expression).toContain('my-service');
|
||||
expect(query.filter.expression).toContain('deployment.environment');
|
||||
expect(query.filter.expression).toContain('production');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Data Display State', () => {
|
||||
it('displays metrics when data is successfully loaded', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
// Wait for skeletons to disappear
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// Verify all metric labels are displayed
|
||||
expect(screen.getByText('EXTERNAL API')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
|
||||
// Verify metric values are displayed
|
||||
expect(screen.getByText('150')).toBeInTheDocument();
|
||||
expect(screen.getByText('0.125s')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Empty/Missing Data State', () => {
|
||||
it('displays "-" for missing data values', async () => {
|
||||
const emptyResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(emptyResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// When no data, all values should show "-"
|
||||
const dashValues = screen.getAllByText('-');
|
||||
expect(dashValues.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. Error State', () => {
|
||||
it('displays error state when API call fails', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockRejectedValue(new Error('API Error'));
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('retries API call when retry button is clicked', async () => {
|
||||
let callCount = 0;
|
||||
(GetMetricQueryRange as jest.Mock).mockImplementation(() => {
|
||||
callCount += 1;
|
||||
if (callCount === 1) {
|
||||
return Promise.reject(new Error('API Error'));
|
||||
}
|
||||
return Promise.resolve(mockSuccessResponse);
|
||||
});
|
||||
|
||||
renderComponent();
|
||||
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click retry
|
||||
const retryButton = screen.getByTestId('retry-button');
|
||||
retryButton.click();
|
||||
|
||||
// Wait for successful load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('150')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(callCount).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
DomainMetricsResponseRow,
|
||||
@@ -44,10 +44,10 @@ function DomainMetrics({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_DOMAIN_METRICS_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
ENTITY_VERSION_V5,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
@@ -132,7 +132,9 @@ function DomainMetrics({
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s
|
||||
{formattedDomainMetricsData.latency !== '-'
|
||||
? `${(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s`
|
||||
: '-'}
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
@@ -143,23 +145,27 @@ function DomainMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
{formattedDomainMetricsData.errorRate !== '-' ? (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
) : (
|
||||
'-'
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
@@ -0,0 +1,419 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { QueryClient, QueryClientProvider, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointMetrics from './EndPointMetrics';
|
||||
|
||||
// Mock the API call
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
GetMetricQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock ErrorState component
|
||||
jest.mock('./ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(({ refetch }) => (
|
||||
<div data-testid="error-state">
|
||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const mockSuccessResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: '85.5',
|
||||
B: '245000000',
|
||||
D: '2021-01-01T22:30:00Z',
|
||||
F1: '3.2',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
// Helper to create mock query result
|
||||
const createMockQueryResult = (
|
||||
response: any,
|
||||
overrides?: Partial<UseQueryResult<SuccessResponse<any>, unknown>>,
|
||||
): UseQueryResult<SuccessResponse<any>, unknown> =>
|
||||
({
|
||||
data: response,
|
||||
error: null,
|
||||
isError: false,
|
||||
isIdle: false,
|
||||
isLoading: false,
|
||||
isLoadingError: false,
|
||||
isRefetchError: false,
|
||||
isRefetching: false,
|
||||
isStale: true,
|
||||
isSuccess: true,
|
||||
status: 'success' as const,
|
||||
dataUpdatedAt: Date.now(),
|
||||
errorUpdateCount: 0,
|
||||
errorUpdatedAt: 0,
|
||||
failureCount: 0,
|
||||
isFetched: true,
|
||||
isFetchedAfterMount: true,
|
||||
isFetching: false,
|
||||
isPlaceholderData: false,
|
||||
isPreviousData: false,
|
||||
refetch: jest.fn(),
|
||||
remove: jest.fn(),
|
||||
...overrides,
|
||||
} as UseQueryResult<SuccessResponse<any>, unknown>);
|
||||
|
||||
const renderComponent = (
|
||||
endPointMetricsDataQuery: UseQueryResult<SuccessResponse<any>, unknown>,
|
||||
): ReturnType<typeof render> =>
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
describe('1. V5 Query Payload with Filters', () => {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
it('sends correct V5 payload structure with domain and endpoint filters', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const domainName = 'api.example.com';
|
||||
const startTime = 1758259531000;
|
||||
const endTime = 1758261331000;
|
||||
const filters = {
|
||||
items: [],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
// Get the actual payload that would be generated
|
||||
const payloads = getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
startTime,
|
||||
endTime,
|
||||
filters,
|
||||
);
|
||||
|
||||
// First payload is for endpoint metrics
|
||||
const metricsPayload = payloads[0];
|
||||
|
||||
// Verify it's using the correct structure (V3 format for V5 API)
|
||||
expect(metricsPayload.query).toBeDefined();
|
||||
expect(metricsPayload.query.builder).toBeDefined();
|
||||
expect(metricsPayload.query.builder.queryData).toBeDefined();
|
||||
|
||||
const queryData = metricsPayload.query.builder.queryData;
|
||||
|
||||
// Verify Query A - rate with domain and client kind filters
|
||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
||||
expect(queryA).toBeDefined();
|
||||
if (queryA) {
|
||||
expect(queryA.dataSource).toBe('traces');
|
||||
expect(queryA.aggregateOperator).toBe('rate');
|
||||
expect(queryA.timeAggregation).toBe('rate');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryA.filter) {
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query B - p99 latency with duration_nano
|
||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
||||
expect(queryB).toBeDefined();
|
||||
if (queryB) {
|
||||
expect(queryB.aggregateOperator).toBe('p99');
|
||||
if (queryB.aggregateAttribute) {
|
||||
expect(queryB.aggregateAttribute.key).toBe('duration_nano');
|
||||
}
|
||||
expect(queryB.timeAggregation).toBe('p99');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryB.filter) {
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
if (queryC) {
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.aggregateOperator).toBe('count');
|
||||
if (queryC.filter) {
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query D - max timestamp for last used
|
||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
||||
expect(queryD).toBeDefined();
|
||||
if (queryD) {
|
||||
expect(queryD.aggregateOperator).toBe('max');
|
||||
if (queryD.aggregateAttribute) {
|
||||
expect(queryD.aggregateAttribute.key).toBe('timestamp');
|
||||
}
|
||||
expect(queryD.timeAggregation).toBe('max');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryD.filter) {
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query E - total count (disabled)
|
||||
const queryE = queryData.find((q: any) => q.queryName === 'E');
|
||||
expect(queryE).toBeDefined();
|
||||
if (queryE) {
|
||||
expect(queryE.disabled).toBe(true);
|
||||
expect(queryE.aggregateOperator).toBe('count');
|
||||
if (queryE.aggregateAttribute) {
|
||||
expect(queryE.aggregateAttribute.key).toBe('span_id');
|
||||
}
|
||||
if (queryE.filter) {
|
||||
expect(queryE.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = metricsPayload.query.builder.queryFormulas;
|
||||
expect(formulas).toBeDefined();
|
||||
expect(formulas.length).toBeGreaterThan(0);
|
||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
||||
expect(formulaF1).toBeDefined();
|
||||
if (formulaF1) {
|
||||
expect(formulaF1.expression).toBe('(C/E)*100');
|
||||
expect(formulaF1.disabled).toBe(false);
|
||||
expect(formulaF1.legend).toBe('error percentage');
|
||||
}
|
||||
});
|
||||
|
||||
it('includes custom domainListFilters in all query expressions', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const customFilters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'payment-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'staging',
|
||||
},
|
||||
],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
const payloads = getEndPointDetailsQueryPayload(
|
||||
'api.internal.com',
|
||||
1758259531000,
|
||||
1758261331000,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const queryData = payloads[0].query.builder.queryData;
|
||||
|
||||
// Verify ALL queries (A, B, C, D, E) include the custom filters
|
||||
const allQueryNames = ['A', 'B', 'C', 'D', 'E'];
|
||||
allQueryNames.forEach((queryName) => {
|
||||
const query = queryData.find((q: any) => q.queryName === queryName);
|
||||
expect(query).toBeDefined();
|
||||
if (query && query.filter && query.filter.expression) {
|
||||
// Check for exact filter inclusion
|
||||
expect(query.filter.expression).toContain('service.name');
|
||||
expect(query.filter.expression).toContain('payment-service');
|
||||
expect(query.filter.expression).toContain('deployment.environment');
|
||||
expect(query.filter.expression).toContain('staging');
|
||||
// Also verify domain filter is still present
|
||||
expect(query.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
||||
);
|
||||
// Verify client kind filter is present
|
||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Data Display State', () => {
|
||||
it('displays metrics when data is successfully loaded', async () => {
|
||||
const mockQuery = createMockQueryResult(mockSuccessResponse);
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
// Wait for skeletons to disappear
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// Verify all metric labels are displayed
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
|
||||
// Verify metric values are displayed
|
||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
||||
expect(screen.getByText('245ms')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Empty/Missing Data State', () => {
|
||||
it("displays '-' for missing data values", async () => {
|
||||
const emptyResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockQuery = createMockQueryResult(emptyResponse);
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// When no data, all values should show "-"
|
||||
const dashValues = screen.getAllByText('-');
|
||||
// Should have at least 2 dashes (rate and last used - latency shows "-", error % shows progress bar)
|
||||
expect(dashValues.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. Error State', () => {
|
||||
it('displays error state when API call fails', async () => {
|
||||
const mockQuery = createMockQueryResult(null, {
|
||||
isError: true,
|
||||
isSuccess: false,
|
||||
status: 'error',
|
||||
error: new Error('API Error'),
|
||||
});
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('retries API call when retry button is clicked', async () => {
|
||||
const refetch = jest.fn().mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
// Start with error state
|
||||
const mockQuery = createMockQueryResult(null, {
|
||||
isError: true,
|
||||
isSuccess: false,
|
||||
status: 'error',
|
||||
error: new Error('API Error'),
|
||||
refetch,
|
||||
});
|
||||
|
||||
const { rerender } = renderComponent(mockQuery);
|
||||
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click retry
|
||||
const retryButton = screen.getByTestId('retry-button');
|
||||
retryButton.click();
|
||||
|
||||
// Verify refetch was called
|
||||
expect(refetch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Simulate successful refetch by rerendering with success state
|
||||
const successQuery = createMockQueryResult(mockSuccessResponse);
|
||||
rerender(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<EndPointMetrics endPointMetricsDataQuery={successQuery} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
// Wait for successful load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,12 +1,16 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
getDisplayValue,
|
||||
getFormattedEndPointMetricsData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function EndPointMetrics({
|
||||
endPointMetricsDataQuery,
|
||||
}: {
|
||||
@@ -70,7 +74,9 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.rate}>
|
||||
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||
<span className="round-metric-tag">
|
||||
{metricsData?.rate !== '-' ? `${metricsData?.rate} ops/sec` : '-'}
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -79,7 +85,7 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.latency}>
|
||||
<span className="round-metric-tag">{metricsData?.latency}ms</span>
|
||||
{metricsData?.latency !== '-' ? `${metricsData?.latency}ms` : '-'}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -88,21 +94,25 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
{metricsData?.errorRate !== '-' ? (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
) : (
|
||||
'-'
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -110,7 +120,9 @@ function EndPointMetrics({
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.lastUsed}>{metricsData?.lastUsed}</Tooltip>
|
||||
<Tooltip title={metricsData?.lastUsed}>
|
||||
{getDisplayValue(metricsData?.lastUsed)}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Card } from 'antd';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
@@ -22,6 +23,7 @@ function MetricOverTimeGraph({
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customTimeRangeWindowForCoRelation="5m"
|
||||
version={ENTITY_VERSION_V5}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -8,23 +8,14 @@ import {
|
||||
endPointStatusCodeColumns,
|
||||
extractPortAndEndpoint,
|
||||
formatDataForTable,
|
||||
formatTopErrorsDataForTable,
|
||||
getAllEndpointsWidgetData,
|
||||
getCustomFiltersForBarChart,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getFormattedDependentServicesData,
|
||||
getFormattedEndPointDropDownData,
|
||||
getFormattedEndPointMetricsData,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getFormattedEndPointStatusCodeData,
|
||||
getGroupByFiltersFromGroupByValues,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from '../utils';
|
||||
import { APIMonitoringColumnsMock } from './mock';
|
||||
|
||||
@@ -52,119 +43,13 @@ jest.mock('../utils', () => {
|
||||
});
|
||||
|
||||
describe('API Monitoring Utils', () => {
|
||||
describe('getAllEndpointsWidgetData', () => {
|
||||
it('should create a widget with correct configuration', () => {
|
||||
// Arrange
|
||||
const groupBy = [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
key: 'http.method',
|
||||
type: '',
|
||||
},
|
||||
];
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
const domainName = 'test-domain';
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: 'test-key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getAllEndpointsWidgetData(
|
||||
groupBy as BaseAutocompleteData[],
|
||||
domainName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBeDefined();
|
||||
// Title is a React component, not a string
|
||||
expect(result.title).toBeDefined();
|
||||
expect(result.panelTypes).toBe(PANEL_TYPES.TABLE);
|
||||
|
||||
// Check that each query includes the domainName filter
|
||||
result.query.builder.queryData.forEach((query) => {
|
||||
const serverNameFilter = query.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(serverNameFilter).toBeDefined();
|
||||
expect(serverNameFilter?.value).toBe(domainName);
|
||||
|
||||
// Check that the custom filters were included
|
||||
const testFilter = query.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(testFilter).toBeDefined();
|
||||
});
|
||||
|
||||
// Verify groupBy was included in queries
|
||||
if (result.query.builder.queryData[0].groupBy) {
|
||||
const hasCustomGroupBy = result.query.builder.queryData[0].groupBy.some(
|
||||
(item) => item && item.key === 'http.method',
|
||||
);
|
||||
expect(hasCustomGroupBy).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle empty groupBy correctly', () => {
|
||||
// Arrange
|
||||
const groupBy: any[] = [];
|
||||
const domainName = 'test-domain';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getAllEndpointsWidgetData(groupBy, domainName, filters);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
// Should only include default groupBy
|
||||
if (result.query.builder.queryData[0].groupBy) {
|
||||
expect(result.query.builder.queryData[0].groupBy.length).toBeGreaterThan(0);
|
||||
// Check that it doesn't have extra group by fields (only defaults)
|
||||
const defaultGroupByLength =
|
||||
result.query.builder.queryData[0].groupBy.length;
|
||||
const resultWithCustomGroupBy = getAllEndpointsWidgetData(
|
||||
[
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'custom.field',
|
||||
type: '',
|
||||
},
|
||||
] as BaseAutocompleteData[],
|
||||
domainName,
|
||||
filters,
|
||||
);
|
||||
// Custom groupBy should have more fields than default
|
||||
if (resultWithCustomGroupBy.query.builder.queryData[0].groupBy) {
|
||||
expect(
|
||||
resultWithCustomGroupBy.query.builder.queryData[0].groupBy.length,
|
||||
).toBeGreaterThan(defaultGroupByLength);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// New tests for formatDataForTable
|
||||
describe('formatDataForTable', () => {
|
||||
it('should format rows correctly with valid data', () => {
|
||||
const columns = APIMonitoringColumnsMock;
|
||||
const data = [
|
||||
[
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
'test-domain', // domainName
|
||||
'10', // endpoints
|
||||
'25', // rps
|
||||
@@ -222,6 +107,7 @@ describe('API Monitoring Utils', () => {
|
||||
const groupBy = [
|
||||
{
|
||||
id: 'group-by-1',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
key: 'http.method',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
@@ -344,49 +230,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatTopErrorsDataForTable', () => {
|
||||
it('should format top errors data correctly', () => {
|
||||
// Arrange
|
||||
const inputData = [
|
||||
{
|
||||
metric: {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: '/api/test',
|
||||
[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]: '500',
|
||||
status_message: 'Internal Server Error',
|
||||
},
|
||||
values: [[1000000100, '10']],
|
||||
queryName: 'A',
|
||||
legend: 'Test Legend',
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = formatTopErrorsDataForTable(
|
||||
inputData as TopErrorsResponseRow[],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
|
||||
// Check first item is formatted correctly
|
||||
expect(result[0].endpointName).toBe('/api/test');
|
||||
expect(result[0].statusCode).toBe('500');
|
||||
expect(result[0].statusMessage).toBe('Internal Server Error');
|
||||
expect(result[0].count).toBe('10');
|
||||
expect(result[0].key).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle empty input', () => {
|
||||
// Act
|
||||
const result = formatTopErrorsDataForTable(undefined);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTopErrorsColumnsConfig', () => {
|
||||
it('should return column configuration with expected fields', () => {
|
||||
// Act
|
||||
@@ -453,72 +296,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTopErrorsQueryPayload', () => {
|
||||
it('should create correct query payload with filters', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const start = 1000000000;
|
||||
const end = 1000010000;
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: 'test-key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
start,
|
||||
end,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify query params
|
||||
expect(result[0].start).toBe(start);
|
||||
expect(result[0].end).toBe(end);
|
||||
|
||||
// Verify correct structure
|
||||
expect(result[0].graphType).toBeDefined();
|
||||
expect(result[0].query).toBeDefined();
|
||||
expect(result[0].query.builder).toBeDefined();
|
||||
expect(result[0].query.builder.queryData).toBeDefined();
|
||||
|
||||
// Verify domain filter is included
|
||||
const queryData = result[0].query.builder.queryData[0];
|
||||
expect(queryData.filters).toBeDefined();
|
||||
|
||||
// Check for domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
(item) =>
|
||||
item.key &&
|
||||
item.key.key === SPAN_ATTRIBUTES.SERVER_NAME &&
|
||||
item.value === domainName,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
|
||||
// Check that custom filters were included
|
||||
const testFilter = queryData.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(testFilter).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
// Add new tests for EndPointDetails utility functions
|
||||
describe('extractPortAndEndpoint', () => {
|
||||
it('should extract port and endpoint from a valid URL', () => {
|
||||
@@ -564,243 +341,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEndPointDetailsQueryPayload', () => {
|
||||
it('should generate proper query payload with all parameters', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const startTime = 1609459200000; // 2021-01-01
|
||||
const endTime = 1609545600000; // 2021-01-02
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: 'string',
|
||||
key: 'test.key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
startTime,
|
||||
endTime,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toHaveLength(6); // Should return 6 queries
|
||||
|
||||
// Check that each query includes proper parameters
|
||||
result.forEach((query) => {
|
||||
expect(query).toHaveProperty('start', startTime);
|
||||
expect(query).toHaveProperty('end', endTime);
|
||||
|
||||
// Should have query property with builder data
|
||||
expect(query).toHaveProperty('query');
|
||||
expect(query.query).toHaveProperty('builder');
|
||||
|
||||
// All queries should include the domain filter
|
||||
const {
|
||||
query: {
|
||||
builder: { queryData },
|
||||
},
|
||||
} = query;
|
||||
queryData.forEach((qd) => {
|
||||
if (qd.filters && qd.filters.items) {
|
||||
const serverNameFilter = qd.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(serverNameFilter).toBeDefined();
|
||||
// Only check if the serverNameFilter exists, as the actual value might vary
|
||||
// depending on implementation details or domain defaults
|
||||
if (serverNameFilter) {
|
||||
expect(typeof serverNameFilter.value).toBe('string');
|
||||
}
|
||||
}
|
||||
|
||||
// Should include our custom filter
|
||||
const customFilter = qd.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(customFilter).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRateOverTimeWidgetData', () => {
|
||||
it('should generate widget configuration for rate over time', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getRateOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('title', 'Rate Over Time');
|
||||
// Check only title since description might vary
|
||||
|
||||
// Check query configuration
|
||||
expect(result).toHaveProperty('query');
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(result).toHaveProperty('query.builder.queryData');
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Should have domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
if (domainFilter) {
|
||||
expect(typeof domainFilter.value).toBe('string');
|
||||
}
|
||||
|
||||
// Should have 'rate' time aggregation
|
||||
expect(queryData).toHaveProperty('timeAggregation', 'rate');
|
||||
|
||||
// Should have proper legend that includes endpoint info
|
||||
expect(queryData).toHaveProperty('legend');
|
||||
expect(
|
||||
typeof queryData.legend === 'string' ? queryData.legend : '',
|
||||
).toContain('/api/test');
|
||||
});
|
||||
|
||||
it('should handle case without endpoint name', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getRateOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Legend should be domain name only
|
||||
expect(queryData).toHaveProperty('legend', domainName);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLatencyOverTimeWidgetData', () => {
|
||||
it('should generate widget configuration for latency over time', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('title', 'Latency Over Time');
|
||||
// Check only title since description might vary
|
||||
|
||||
// Check query configuration
|
||||
expect(result).toHaveProperty('query');
|
||||
expect(result).toHaveProperty('query.builder.queryData');
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Should have domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
if (domainFilter) {
|
||||
expect(typeof domainFilter.value).toBe('string');
|
||||
}
|
||||
|
||||
// Should use duration_nano as the aggregate attribute
|
||||
expect(queryData.aggregateAttribute).toHaveProperty('key', 'duration_nano');
|
||||
|
||||
// Should have 'p99' time aggregation
|
||||
expect(queryData).toHaveProperty('timeAggregation', 'p99');
|
||||
});
|
||||
|
||||
it('should handle case without endpoint name', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Legend should be domain name only
|
||||
expect(queryData).toHaveProperty('legend', domainName);
|
||||
});
|
||||
|
||||
// Changed approach to verify end-to-end behavior for URL with port
|
||||
it('should format legends appropriately for complete URLs with ports', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = 'http://example.com:8080/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Extract what we expect the function to extract
|
||||
const expectedParts = extractPortAndEndpoint(endPointName);
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Check that legend is present and is a string
|
||||
expect(queryData).toHaveProperty('legend');
|
||||
expect(typeof queryData.legend).toBe('string');
|
||||
|
||||
// If the URL has a port and endpoint, the legend should reflect that appropriately
|
||||
// (Testing the integration rather than the exact formatting)
|
||||
if (expectedParts.port !== '-') {
|
||||
// Verify that both components are incorporated into the legend in some way
|
||||
// This tests the behavior without relying on the exact implementation details
|
||||
const legendStr = queryData.legend as string;
|
||||
expect(legendStr).not.toBe(domainName); // Legend should be different when URL has port/endpoint
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointDropDownData', () => {
|
||||
it('should format endpoint dropdown data correctly', () => {
|
||||
// Arrange
|
||||
@@ -810,6 +350,7 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/users',
|
||||
'url.full': 'http://example.com/api/users',
|
||||
A: 150, // count or other metric
|
||||
},
|
||||
},
|
||||
@@ -817,6 +358,7 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/orders',
|
||||
'url.full': 'http://example.com/api/orders',
|
||||
A: 75,
|
||||
},
|
||||
},
|
||||
@@ -900,87 +442,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointMetricsData', () => {
|
||||
it('should format endpoint metrics data correctly', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
A: '50', // rate
|
||||
B: '15000000', // latency in nanoseconds
|
||||
C: '5', // required by type
|
||||
D: '1640995200000000', // timestamp in nanoseconds
|
||||
F1: '5.5', // error rate
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.key).toBeDefined();
|
||||
expect(result.rate).toBe('50');
|
||||
expect(result.latency).toBe(15); // Should be converted from ns to ms
|
||||
expect(result.errorRate).toBe(5.5);
|
||||
expect(typeof result.lastUsed).toBe('string'); // Time formatting is tested elsewhere
|
||||
});
|
||||
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
it('should handle undefined values in data', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
A: undefined,
|
||||
B: 'n/a',
|
||||
C: '', // required by type
|
||||
D: undefined,
|
||||
F1: 'n/a',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
|
||||
it('should handle empty input array', () => {
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData([]);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
|
||||
it('should handle undefined input', () => {
|
||||
// Arrange
|
||||
const undefinedInput = undefined as any;
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(undefinedInput);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointStatusCodeData', () => {
|
||||
it('should format status code data correctly', () => {
|
||||
// Arrange
|
||||
@@ -1117,139 +578,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedDependentServicesData', () => {
|
||||
it('should format dependent services data correctly', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
'service.name': 'auth-service',
|
||||
A: '500', // count
|
||||
B: '120000000', // latency in nanoseconds
|
||||
C: '15', // rate
|
||||
F1: '2.5', // error percentage
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
'service.name': 'db-service',
|
||||
A: '300',
|
||||
B: '80000000',
|
||||
C: '10',
|
||||
F1: '1.2',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(2);
|
||||
|
||||
// Check first service
|
||||
expect(result[0].key).toBeDefined();
|
||||
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||
expect(result[0].serviceData.count).toBe(500);
|
||||
expect(typeof result[0].serviceData.percentage).toBe('number');
|
||||
expect(result[0].latency).toBe(120); // Should be converted from ns to ms
|
||||
expect(result[0].rate).toBe('15');
|
||||
expect(result[0].errorPercentage).toBe('2.5');
|
||||
|
||||
// Check second service
|
||||
expect(result[1].serviceData.serviceName).toBe('db-service');
|
||||
expect(result[1].serviceData.count).toBe(300);
|
||||
expect(result[1].latency).toBe(80);
|
||||
expect(result[1].rate).toBe('10');
|
||||
expect(result[1].errorPercentage).toBe('1.2');
|
||||
|
||||
// Verify percentage calculation
|
||||
const totalCount = 500 + 300;
|
||||
expect(result[0].serviceData.percentage).toBeCloseTo(
|
||||
(500 / totalCount) * 100,
|
||||
2,
|
||||
);
|
||||
expect(result[1].serviceData.percentage).toBeCloseTo(
|
||||
(300 / totalCount) * 100,
|
||||
2,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle undefined values in data', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
'service.name': 'auth-service',
|
||||
A: 'n/a',
|
||||
B: undefined,
|
||||
C: 'n/a',
|
||||
F1: undefined,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||
expect(result[0].serviceData.count).toBe('-');
|
||||
expect(result[0].serviceData.percentage).toBe(0);
|
||||
expect(result[0].latency).toBe('-');
|
||||
expect(result[0].rate).toBe('-');
|
||||
expect(result[0].errorPercentage).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle empty input array', () => {
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData([]);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined input', () => {
|
||||
// Arrange
|
||||
const undefinedInput = undefined as any;
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(undefinedInput);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle missing service name', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// Missing service.name
|
||||
A: '200',
|
||||
B: '50000000',
|
||||
C: '8',
|
||||
F1: '0.5',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].serviceData.serviceName).toBe('-');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointStatusCodeChartData', () => {
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
|
||||
@@ -0,0 +1,221 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/**
|
||||
* V5 Migration Tests for All Endpoints Widget (Endpoint Overview)
|
||||
*
|
||||
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Aggregation format: aggregateAttribute → aggregations[] array
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
const emptyGroupBy: BaseAutocompleteData[] = [];
|
||||
|
||||
describe('1. V5 Format Migration - All Four Queries', () => {
|
||||
it('all queries use filter.expression format (not filters.items)', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const { queryData } = widget.query.builder;
|
||||
|
||||
// All 4 queries must use V5 filter.expression format
|
||||
queryData.forEach((query) => {
|
||||
expect(query.filter).toBeDefined();
|
||||
expect(query.filter?.expression).toBeDefined();
|
||||
expect(typeof query.filter?.expression).toBe('string');
|
||||
// OLD V4 format should NOT exist
|
||||
expect(query).not.toHaveProperty('filters');
|
||||
});
|
||||
|
||||
// Verify we have exactly 4 queries
|
||||
expect(queryData).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('all queries use aggregations array format (not aggregateAttribute)', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
// Query A: count()
|
||||
expect(queryA.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryA.aggregations)).toBe(true);
|
||||
expect(queryA.aggregations).toEqual([{ expression: 'count()' }]);
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// Query B: p99(duration_nano)
|
||||
expect(queryB.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryB.aggregations)).toBe(true);
|
||||
expect(queryB.aggregations).toEqual([{ expression: 'p99(duration_nano)' }]);
|
||||
expect(queryB).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// Query C: max(timestamp)
|
||||
expect(queryC.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryC.aggregations)).toBe(true);
|
||||
expect(queryC.aggregations).toEqual([{ expression: 'max(timestamp)' }]);
|
||||
expect(queryC).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// Query D: count() (disabled, for errors)
|
||||
expect(queryD.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryD.aggregations)).toBe(true);
|
||||
expect(queryD.aggregations).toEqual([{ expression: 'count()' }]);
|
||||
expect(queryD).not.toHaveProperty('aggregateAttribute');
|
||||
});
|
||||
|
||||
it('all queries have correct base filter expressions', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
|
||||
|
||||
// Queries A, B, C have identical base filter
|
||||
expect(queryA.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryB.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryC.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
|
||||
// Query D has additional has_error filter
|
||||
expect(queryD.filter?.expression).toBe(
|
||||
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. GroupBy Structure', () => {
|
||||
it('default groupBy includes both http.url and url.full with type attribute', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const { queryData } = widget.query.builder;
|
||||
|
||||
// All queries should have the same default groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(2);
|
||||
|
||||
// http.url
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'attribute',
|
||||
});
|
||||
|
||||
// url.full
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('custom groupBy is appended after defaults', () => {
|
||||
const customGroupBy: BaseAutocompleteData[] = [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
},
|
||||
];
|
||||
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
customGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const { queryData } = widget.query.builder;
|
||||
|
||||
// All queries should have defaults + custom groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
|
||||
|
||||
// First two should be defaults (http.url, url.full)
|
||||
expect(query.groupBy[0].key).toBe('http.url');
|
||||
expect(query.groupBy[1].key).toBe('url.full');
|
||||
|
||||
// Last two should be custom (matching subset of properties)
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
});
|
||||
expect(query.groupBy[3]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Query-Specific Validations', () => {
|
||||
it('query D has has_error filter and is disabled', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
// Query D should be disabled
|
||||
expect(queryD.disabled).toBe(true);
|
||||
|
||||
// Queries A, B, C should NOT be disabled
|
||||
expect(queryA.disabled).toBe(false);
|
||||
expect(queryB.disabled).toBe(false);
|
||||
expect(queryC.disabled).toBe(false);
|
||||
|
||||
// Query D should have has_error in filter
|
||||
expect(queryD.filter?.expression).toContain('has_error = true');
|
||||
|
||||
// Queries A, B, C should NOT have has_error
|
||||
expect(queryA.filter?.expression).not.toContain('has_error');
|
||||
expect(queryB.filter?.expression).not.toContain('has_error');
|
||||
expect(queryC.filter?.expression).not.toContain('has_error');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,211 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointMetrics from '../Explorer/Domains/DomainDetails/components/EndPointMetrics';
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mock the utils function
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointMetricsData: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Progress: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="progress-bar-mock" />),
|
||||
Skeleton: {
|
||||
Button: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="skeleton-button-mock" />),
|
||||
},
|
||||
Tooltip: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="tooltip-mock">{children}</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid={`typography-${className}`} className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('EndPointMetrics', () => {
|
||||
// Common metric data to use in tests
|
||||
const mockMetricsData = {
|
||||
key: 'test-key',
|
||||
rate: '42',
|
||||
latency: 99,
|
||||
errorRate: 5.5,
|
||||
lastUsed: '5 minutes ago',
|
||||
};
|
||||
|
||||
// Basic props for tests
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(
|
||||
mockMetricsData,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
|
||||
// Verify labels are visible even during loading
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify error state is shown
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders data correctly when loaded', () => {
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { A: '42', B: '99000000', D: '1609459200000000', F1: '5.5' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify the utils function was called with the data
|
||||
expect(getFormattedEndPointMetricsData).toHaveBeenCalledWith(
|
||||
mockData.payload.data.result[0].table.rows,
|
||||
);
|
||||
|
||||
// Verify data is displayed
|
||||
expect(
|
||||
screen.getByText(`${mockMetricsData.rate} ops/sec`),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText(`${mockMetricsData.latency}ms`)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricsData.lastUsed)).toBeInTheDocument();
|
||||
expect(screen.getByTestId('progress-bar-mock')).toBeInTheDocument(); // For error rate
|
||||
});
|
||||
|
||||
it('handles refetching state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: true,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible during refetching
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
});
|
||||
|
||||
it('handles null metrics data gracefully', () => {
|
||||
// Mock the utils function to return null to simulate missing data
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(null);
|
||||
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Even with null data, the component should render without crashing
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,173 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/**
|
||||
* V5 Migration Tests for Endpoint Dropdown Query
|
||||
*
|
||||
* These tests validate the migration from V4 to V5 format for the third payload
|
||||
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain handling: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Existence check: (http.url EXISTS OR url.full EXISTS)
|
||||
* - Aggregation: count() expression
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const mockStartTime = 1000;
|
||||
const mockEndTime = 2000;
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
describe('1. V5 Format Migration - Structure and Base Filters', () => {
|
||||
it('migrates to V5 format with correct filter expression structure, aggregations, and groupBy', () => {
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
// Third payload is the endpoint dropdown query (index 2)
|
||||
const dropdownQuery = payload[2];
|
||||
const queryA = dropdownQuery.query.builder.queryData[0];
|
||||
|
||||
// CRITICAL V5 MIGRATION: filter.expression (not filters.items)
|
||||
expect(queryA.filter).toBeDefined();
|
||||
expect(queryA.filter?.expression).toBeDefined();
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Base filter 3: Existence check
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
'(http.url EXISTS OR url.full EXISTS)',
|
||||
);
|
||||
|
||||
// V5 Aggregation format: aggregations array (not aggregateAttribute)
|
||||
expect(queryA.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryA.aggregations)).toBe(true);
|
||||
expect(queryA.aggregations?.[0]).toEqual({
|
||||
expression: 'count()',
|
||||
});
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// GroupBy: Both http.url and url.full
|
||||
expect(queryA.groupBy).toHaveLength(2);
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'http.url',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'url.full',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Custom Filters Integration', () => {
|
||||
it('merges custom filters into filter expression with AND logic', () => {
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Exact filter expression with custom filters merged
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. HTTP URL Filter Special Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: Exact filter expression with http.url converted to OR logic
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,173 @@
|
||||
import {
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
const mockEndpointName = '/api/users';
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
describe('1. Rate Over Time - V5 Payload Structure', () => {
|
||||
it('generates V5 filter expression format (not V3 filters.items)', () => {
|
||||
const widget = getRateOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const queryData = widget.query.builder.queryData[0];
|
||||
|
||||
// CRITICAL: Must use V5 format (filter.expression), not V3 format (filters.items)
|
||||
expect(queryData.filter).toBeDefined();
|
||||
expect(queryData?.filter?.expression).toBeDefined();
|
||||
expect(typeof queryData?.filter?.expression).toBe('string');
|
||||
|
||||
// OLD V3 format should NOT exist
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
const widget = getRateOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const queryData = widget.query.builder.queryData[0];
|
||||
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
expect(queryData.legend).toContain('/api/users');
|
||||
});
|
||||
|
||||
it('merges custom filters into filter expression', () => {
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
value: 'user-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const widget = getRateOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const queryData = widget.query.builder.queryData[0];
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Verify custom filters are merged into the expression
|
||||
expect(queryData?.filter?.expression).toContain('service.name');
|
||||
expect(queryData?.filter?.expression).toContain('user-service');
|
||||
expect(queryData?.filter?.expression).toContain('deployment.environment');
|
||||
expect(queryData?.filter?.expression).toContain('production');
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Latency Over Time - V5 Payload Structure', () => {
|
||||
it('generates V5 filter expression format (not V3 filters.items)', () => {
|
||||
const widget = getLatencyOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const queryData = widget.query.builder.queryData[0];
|
||||
|
||||
// CRITICAL: Must use V5 format (filter.expression), not V3 format (filters.items)
|
||||
expect(queryData.filter).toBeDefined();
|
||||
expect(queryData?.filter?.expression).toBeDefined();
|
||||
expect(typeof queryData?.filter?.expression).toBe('string');
|
||||
|
||||
// OLD V3 format should NOT exist
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
const widget = getLatencyOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const queryData = widget.query.builder.queryData[0];
|
||||
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData.filter).toBeDefined();
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
expect(queryData.legend).toContain('/api/users');
|
||||
});
|
||||
|
||||
it('merges custom filters into filter expression', () => {
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const widget = getLatencyOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const queryData = widget.query.builder.queryData[0];
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,237 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/**
|
||||
* V5 Migration Tests for Status Code Bar Chart Queries
|
||||
*
|
||||
* These tests validate the migration to V5 format for the bar chart payloads
|
||||
* in getEndPointDetailsQueryPayload (5th and 6th payloads):
|
||||
* - Number of Calls Chart (count aggregation)
|
||||
* - Latency Chart (p99 aggregation)
|
||||
*
|
||||
* V5 Changes:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - stepInterval: 60 → null
|
||||
* - Grouped by response_status_code
|
||||
*/
|
||||
import { TraceAggregation } from 'api/v5/v5';
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
const mockDomainName = '0.0.0.0';
|
||||
const mockStartTime = 1762573673000;
|
||||
const mockEndTime = 1762832873000;
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
describe('1. Number of Calls Chart - V5 Payload Structure', () => {
|
||||
it('generates correct V5 payload for count aggregation grouped by status code', () => {
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
// 5th payload (index 4) is the number of calls bar chart
|
||||
const callsChartQuery = payload[4];
|
||||
const queryA = callsChartQuery.query.builder.queryData[0];
|
||||
|
||||
// V5 format: filter.expression (not filters.items)
|
||||
expect(queryA.filter).toBeDefined();
|
||||
expect(queryA.filter?.expression).toBeDefined();
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Aggregation: count
|
||||
expect(queryA.queryName).toBe('A');
|
||||
expect(queryA.aggregateOperator).toBe('count');
|
||||
expect(queryA.disabled).toBe(false);
|
||||
|
||||
// Grouped by response_status_code
|
||||
expect(queryA.groupBy).toContainEqual(
|
||||
expect.objectContaining({
|
||||
key: 'response_status_code',
|
||||
dataType: 'string',
|
||||
type: 'span',
|
||||
}),
|
||||
);
|
||||
|
||||
// V5 critical: stepInterval should be null
|
||||
expect(queryA.stepInterval).toBeNull();
|
||||
|
||||
// Time aggregation
|
||||
expect(queryA.timeAggregation).toBe('rate');
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Latency Chart - V5 Payload Structure', () => {
|
||||
it('generates correct V5 payload for p99 aggregation grouped by status code', () => {
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
// 6th payload (index 5) is the latency bar chart
|
||||
const latencyChartQuery = payload[5];
|
||||
const queryA = latencyChartQuery.query.builder.queryData[0];
|
||||
|
||||
// V5 format: filter.expression (not filters.items)
|
||||
expect(queryA.filter).toBeDefined();
|
||||
expect(queryA.filter?.expression).toBeDefined();
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Aggregation: p99 on duration_nano
|
||||
expect(queryA.queryName).toBe('A');
|
||||
expect(queryA.aggregateOperator).toBe('p99');
|
||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
expect(queryA.disabled).toBe(false);
|
||||
|
||||
// Grouped by response_status_code
|
||||
expect(queryA.groupBy).toContainEqual(
|
||||
expect.objectContaining({
|
||||
key: 'response_status_code',
|
||||
dataType: 'string',
|
||||
type: 'span',
|
||||
}),
|
||||
);
|
||||
|
||||
// V5 critical: stepInterval should be null
|
||||
expect(queryA.stepInterval).toBeNull();
|
||||
|
||||
// Time aggregation
|
||||
expect(queryA.timeAggregation).toBe('p99');
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Custom Filters Integration', () => {
|
||||
it('merges custom filters into filter expression for both charts', () => {
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const callsChartQuery = payload[4];
|
||||
const latencyChartQuery = payload[5];
|
||||
|
||||
const callsExpression =
|
||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
const latencyExpression =
|
||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Both charts should have the same filter expression
|
||||
expect(callsExpression).toBe(latencyExpression);
|
||||
|
||||
// Verify base filters
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Verify custom filters are merged
|
||||
expect(callsExpression).toContain('service.name');
|
||||
expect(callsExpression).toContain('user-service');
|
||||
expect(callsExpression).toContain('deployment.environment');
|
||||
expect(callsExpression).toContain('production');
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/metrics',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const callsChartQuery = payload[4];
|
||||
const latencyChartQuery = payload[5];
|
||||
|
||||
const callsExpression =
|
||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
const latencyExpression =
|
||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(callsExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
expect(latencyExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
|
||||
// Base filters still present
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,226 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/**
|
||||
* V5 Migration Tests for Status Code Table Query
|
||||
*
|
||||
* These tests validate the migration from V4 to V5 format for the second payload
|
||||
* in getEndPointDetailsQueryPayload (status code table data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - URL handling: Special logic for (http.url OR url.full)
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Kind filter: response_status_code EXISTS
|
||||
* - Three queries: A (count), B (p99 latency), C (rate)
|
||||
* - All grouped by response_status_code
|
||||
*/
|
||||
import { TraceAggregation } from 'api/v5/v5';
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const mockStartTime = 1000;
|
||||
const mockEndTime = 2000;
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
describe('1. V5 Format Migration with Base Filters', () => {
|
||||
it('migrates to V5 format with correct filter expression structure and base filters', () => {
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
// Second payload is the status code table query
|
||||
const statusCodeQuery = payload[1];
|
||||
const queryA = statusCodeQuery.query.builder.queryData[0];
|
||||
|
||||
// CRITICAL V5 MIGRATION: filter.expression (not filters.items)
|
||||
expect(queryA.filter).toBeDefined();
|
||||
expect(queryA.filter?.expression).toBeDefined();
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Base filter 3: response_status_code EXISTS
|
||||
expect(queryA.filter?.expression).toContain('response_status_code EXISTS');
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Three Queries Structure and Consistency', () => {
|
||||
it('generates three queries (count, p99, rate) all grouped by response_status_code with identical filters', () => {
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const statusCodeQuery = payload[1];
|
||||
const [queryA, queryB, queryC] = statusCodeQuery.query.builder.queryData;
|
||||
|
||||
// Query A: Count
|
||||
expect(queryA.queryName).toBe('A');
|
||||
expect(queryA.aggregateOperator).toBe('count');
|
||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count(span_id)',
|
||||
);
|
||||
expect(queryA.disabled).toBe(false);
|
||||
|
||||
// Query B: P99 Latency
|
||||
expect(queryB.queryName).toBe('B');
|
||||
expect(queryB.aggregateOperator).toBe('p99');
|
||||
expect((queryB.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
expect(queryB.disabled).toBe(false);
|
||||
|
||||
// Query C: Rate
|
||||
expect(queryC.queryName).toBe('C');
|
||||
expect(queryC.aggregateOperator).toBe('rate');
|
||||
expect(queryC.disabled).toBe(false);
|
||||
|
||||
// All group by response_status_code
|
||||
[queryA, queryB, queryC].forEach((query) => {
|
||||
expect(query.groupBy).toContainEqual(
|
||||
expect.objectContaining({
|
||||
key: 'response_status_code',
|
||||
dataType: 'string',
|
||||
type: 'span',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
// CRITICAL: All have identical filter expressions
|
||||
expect(queryA.filter?.expression).toBe(queryB.filter?.expression);
|
||||
expect(queryB.filter?.expression).toBe(queryC.filter?.expression);
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Custom Filters Integration', () => {
|
||||
it('merges custom filters into filter expression with AND logic', () => {
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const statusCodeQuery = payload[1];
|
||||
const expression =
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
// Custom filters merged
|
||||
expect(expression).toContain('service.name');
|
||||
expect(expression).toContain('user-service');
|
||||
expect(expression).toContain('deployment.environment');
|
||||
expect(expression).toContain('production');
|
||||
|
||||
// All three queries have the same merged expression
|
||||
const queries = statusCodeQuery.query.builder.queryData;
|
||||
expect(queries[0].filter?.expression).toBe(queries[1].filter?.expression);
|
||||
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const statusCodeQuery = payload[1];
|
||||
const expression =
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(expression).toContain(
|
||||
"(http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
|
||||
// Other filters still present
|
||||
expect(expression).toContain('service.name');
|
||||
expect(expression).toContain('user-service');
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
|
||||
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,17 +1,11 @@
|
||||
import { fireEvent, render, screen, within } from '@testing-library/react';
|
||||
import { BuilderQuery } from 'api/v5/v5';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
formatTopErrorsDataForTable,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useQueries } from 'react-query';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
|
||||
import { getTopErrorsQueryPayload } from '../utils';
|
||||
|
||||
// Mock the EndPointsDropDown component to avoid issues
|
||||
jest.mock(
|
||||
@@ -35,26 +29,14 @@ jest.mock(
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('react-query', () => ({
|
||||
...jest.requireActual('react-query'),
|
||||
useQueries: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useNavigateToExplorer', () => ({
|
||||
useNavigateToExplorer: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY: ['key1', 'key2', 'key3', 'key4', 'key5'],
|
||||
formatTopErrorsDataForTable: jest.fn(),
|
||||
getEndPointDetailsQueryPayload: jest.fn(),
|
||||
getTopErrorsColumnsConfig: jest.fn(),
|
||||
getTopErrorsCoRelationQueryFilters: jest.fn(),
|
||||
getTopErrorsQueryPayload: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('TopErrors', () => {
|
||||
const TABLE_BODY_SELECTOR = '.ant-table-tbody';
|
||||
const V5_QUERY_RANGE_API_PATH = '*/api/v5/query_range';
|
||||
|
||||
const mockProps = {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
domainName: 'test-domain',
|
||||
@@ -68,75 +50,72 @@ describe('TopErrors', () => {
|
||||
},
|
||||
};
|
||||
|
||||
// Setup basic mocks
|
||||
// Helper function to wait for table data to load
|
||||
const waitForTableDataToLoad = async (
|
||||
container: HTMLElement,
|
||||
): Promise<void> => {
|
||||
await waitFor(() => {
|
||||
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
|
||||
expect(tableBody).not.toBeNull();
|
||||
if (tableBody) {
|
||||
expect(
|
||||
within(tableBody as HTMLElement).queryByText('/api/test'),
|
||||
).toBeInTheDocument();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Mock getTopErrorsColumnsConfig
|
||||
(getTopErrorsColumnsConfig as jest.Mock).mockReturnValue([
|
||||
{
|
||||
title: 'Endpoint',
|
||||
dataIndex: 'endpointName',
|
||||
key: 'endpointName',
|
||||
},
|
||||
{
|
||||
title: 'Status Code',
|
||||
dataIndex: 'statusCode',
|
||||
key: 'statusCode',
|
||||
},
|
||||
{
|
||||
title: 'Status Message',
|
||||
dataIndex: 'statusMessage',
|
||||
key: 'statusMessage',
|
||||
},
|
||||
{
|
||||
title: 'Count',
|
||||
dataIndex: 'count',
|
||||
key: 'count',
|
||||
},
|
||||
]);
|
||||
// Mock useNavigateToExplorer
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
|
||||
|
||||
// Mock useQueries
|
||||
(useQueries as jest.Mock).mockImplementation((queryConfigs) => {
|
||||
// For topErrorsDataQueries
|
||||
if (
|
||||
queryConfigs.length === 1 &&
|
||||
queryConfigs[0].queryKey &&
|
||||
queryConfigs[0].queryKey[0] === REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN
|
||||
) {
|
||||
return [
|
||||
{
|
||||
// Mock V5 API endpoint for top errors
|
||||
server.use(
|
||||
rest.post(V5_QUERY_RANGE_API_PATH, (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: {
|
||||
'http.url': '/api/test',
|
||||
status_code: '500',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
status_message: 'Internal Server Error',
|
||||
data: {
|
||||
results: [
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
values: [[1000000100, '10']],
|
||||
queryName: 'A',
|
||||
legend: 'Test Legend',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'response_status_code',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
{
|
||||
name: 'status_message',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
{ name: 'count()', fieldDataType: 'int64', fieldContext: '' },
|
||||
],
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
data: [['/api/test', '500', 'Internal Server Error', 10]],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
];
|
||||
}
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
// For endPointDropDownDataQueries
|
||||
return [
|
||||
{
|
||||
data: {
|
||||
// Mock V4 API endpoint for dropdown data
|
||||
server.use(
|
||||
rest.post('*/api/v1/query_range', (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
@@ -153,62 +132,13 @@ describe('TopErrors', () => {
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
// Mock formatTopErrorsDataForTable
|
||||
(formatTopErrorsDataForTable as jest.Mock).mockReturnValue([
|
||||
{
|
||||
key: '1',
|
||||
endpointName: '/api/test',
|
||||
statusCode: '500',
|
||||
statusMessage: 'Internal Server Error',
|
||||
count: 10,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getTopErrorsQueryPayload
|
||||
(getTopErrorsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{
|
||||
queryName: 'TopErrorsQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getEndPointDetailsQueryPayload
|
||||
(getEndPointDetailsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{},
|
||||
{},
|
||||
{
|
||||
queryName: 'EndpointDropdownQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock useNavigateToExplorer
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
|
||||
|
||||
// Mock getTopErrorsCoRelationQueryFilters
|
||||
(getTopErrorsCoRelationQueryFilters as jest.Mock).mockReturnValue({
|
||||
items: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
op: 'AND',
|
||||
});
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders component correctly', () => {
|
||||
it('renders component correctly', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
@@ -216,10 +146,11 @@ describe('TopErrors', () => {
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
expect(screen.getByText('Status Message Exists')).toBeInTheDocument();
|
||||
|
||||
// Find the table row and verify content
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
expect(tableBody).not.toBeNull();
|
||||
// Wait for data to load
|
||||
await waitForTableDataToLoad(container);
|
||||
|
||||
// Find the table row and verify content
|
||||
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
|
||||
if (tableBody) {
|
||||
const row = within(tableBody as HTMLElement).getByRole('row');
|
||||
expect(within(row).getByText('/api/test')).toBeInTheDocument();
|
||||
@@ -228,35 +159,40 @@ describe('TopErrors', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('renders error state when isError is true', () => {
|
||||
// Mock useQueries to return isError: true
|
||||
(useQueries as jest.Mock).mockImplementationOnce(() => [
|
||||
{
|
||||
isError: true,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
]);
|
||||
it('renders error state when API fails', async () => {
|
||||
// Mock API to return error
|
||||
server.use(
|
||||
rest.post(V5_QUERY_RANGE_API_PATH, (_req, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json({ error: 'Internal Server Error' })),
|
||||
),
|
||||
);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Error state should be shown with the actual text displayed in the UI
|
||||
expect(
|
||||
screen.getByText('Uh-oh :/ We ran into an error.'),
|
||||
).toBeInTheDocument();
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText('Uh-oh :/ We ran into an error.'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByText('Please refresh this panel.')).toBeInTheDocument();
|
||||
expect(screen.getByText('Refresh this panel')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles row click correctly', () => {
|
||||
it('handles row click correctly', async () => {
|
||||
const navigateMock = jest.fn();
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(navigateMock);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for data to load
|
||||
await waitForTableDataToLoad(container);
|
||||
|
||||
// Find and click on the table cell containing the endpoint
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
|
||||
expect(tableBody).not.toBeNull();
|
||||
|
||||
if (tableBody) {
|
||||
@@ -267,11 +203,28 @@ describe('TopErrors', () => {
|
||||
|
||||
// Check if navigateToExplorer was called with correct params
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
filters: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'http.url' }),
|
||||
op: '=',
|
||||
value: '/api/test',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'has_error' }),
|
||||
op: '=',
|
||||
value: 'true',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'net.peer.name' }),
|
||||
op: '=',
|
||||
value: 'test-domain',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'response_status_code' }),
|
||||
op: '=',
|
||||
value: '500',
|
||||
}),
|
||||
]),
|
||||
dataSource: DataSource.TRACES,
|
||||
startTime: mockProps.timeRange.startTime,
|
||||
endTime: mockProps.timeRange.endTime,
|
||||
@@ -279,24 +232,34 @@ describe('TopErrors', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('updates endpoint filter when dropdown value changes', () => {
|
||||
it('updates endpoint filter when dropdown value changes', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for initial load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('combobox')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the dropdown
|
||||
const dropdown = screen.getByRole('combobox');
|
||||
|
||||
// Mock the change
|
||||
fireEvent.change(dropdown, { target: { value: '/api/new-endpoint' } });
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with updated parameters
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalled();
|
||||
// Component should re-render with new filter
|
||||
expect(dropdown).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles status message toggle correctly', () => {
|
||||
it('handles status message toggle correctly', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for initial load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('switch')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the toggle switch
|
||||
const toggle = screen.getByRole('switch');
|
||||
expect(toggle).toBeInTheDocument();
|
||||
@@ -307,69 +270,71 @@ describe('TopErrors', () => {
|
||||
// Click the toggle to turn it off
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=false
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
false,
|
||||
);
|
||||
|
||||
// Title should change
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click the toggle to turn it back on
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=true
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
true,
|
||||
);
|
||||
|
||||
// Title should change back
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('includes toggle state in query key for cache busting', () => {
|
||||
it('includes toggle state in query key for cache busting', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
const toggle = screen.getByRole('switch');
|
||||
// Wait for initial load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('switch')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Initial query should include showStatusCodeErrors=true
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
true,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
);
|
||||
const toggle = screen.getByRole('switch');
|
||||
|
||||
// Click toggle
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Query should be called with showStatusCodeErrors=false in key
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
false,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
// Wait for title to change, indicating query was refetched with new key
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// The fact that data refetches when toggle changes proves the query key includes the toggle state
|
||||
expect(toggle).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('sends query_range v5 API call with required filters including has_error', async () => {
|
||||
// let capturedRequest: any;
|
||||
|
||||
const topErrorsPayload = getTopErrorsQueryPayload(
|
||||
'test-domain',
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
{ items: [], op: 'AND' },
|
||||
false,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for the API call to be made
|
||||
await waitFor(() => {
|
||||
expect(topErrorsPayload).toBeDefined();
|
||||
});
|
||||
|
||||
// Extract the filter expression from the captured request
|
||||
// getTopErrorsQueryPayload returns a builder_query with TraceBuilderQuery spec
|
||||
const builderQuery = topErrorsPayload.compositeQuery.queries[0]
|
||||
.spec as BuilderQuery;
|
||||
const filterExpression = builderQuery.filter?.expression;
|
||||
|
||||
// Verify all required filters are present
|
||||
expect(filterExpression).toContain(
|
||||
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -112,6 +112,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
setShowPaymentFailedWarning,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const errorBoundaryRef = useRef<Sentry.ErrorBoundary>(null);
|
||||
|
||||
const [showSlowApiWarning, setShowSlowApiWarning] = useState(false);
|
||||
const [slowApiWarningShown, setSlowApiWarningShown] = useState(false);
|
||||
|
||||
@@ -378,6 +380,13 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
getChangelogByVersionResponse.isSuccess,
|
||||
]);
|
||||
|
||||
// reset error boundary on route change
|
||||
useEffect(() => {
|
||||
if (errorBoundaryRef.current) {
|
||||
errorBoundaryRef.current.resetErrorBoundary();
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
const isToDisplayLayout = isLoggedIn;
|
||||
|
||||
const routeKey = useMemo(() => getRouteKey(pathname), [pathname]);
|
||||
@@ -836,7 +845,10 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
})}
|
||||
data-overlayscrollbars-initialize
|
||||
>
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<Sentry.ErrorBoundary
|
||||
fallback={<ErrorBoundaryFallback />}
|
||||
ref={errorBoundaryRef}
|
||||
>
|
||||
<LayoutContent data-overlayscrollbars-initialize>
|
||||
<OverlayScrollbar>
|
||||
<ChildrenContainer>
|
||||
|
||||
@@ -11,12 +11,14 @@ import { v4 } from 'uuid';
|
||||
|
||||
import { useCreateAlertState } from '../context';
|
||||
import {
|
||||
INITIAL_EVALUATION_WINDOW_STATE,
|
||||
INITIAL_INFO_THRESHOLD,
|
||||
INITIAL_RANDOM_THRESHOLD,
|
||||
INITIAL_WARNING_THRESHOLD,
|
||||
THRESHOLD_MATCH_TYPE_OPTIONS,
|
||||
THRESHOLD_OPERATOR_OPTIONS,
|
||||
} from '../context/constants';
|
||||
import { AlertThresholdMatchType } from '../context/types';
|
||||
import EvaluationSettings from '../EvaluationSettings/EvaluationSettings';
|
||||
import ThresholdItem from './ThresholdItem';
|
||||
import { AnomalyAndThresholdProps, UpdateThreshold } from './types';
|
||||
@@ -38,12 +40,12 @@ function AlertThreshold({
|
||||
alertState,
|
||||
thresholdState,
|
||||
setThresholdState,
|
||||
setEvaluationWindow,
|
||||
notificationSettings,
|
||||
setNotificationSettings,
|
||||
} = useCreateAlertState();
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const queryNames = getQueryNames(currentQuery);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -160,6 +162,54 @@ function AlertThreshold({
|
||||
}),
|
||||
);
|
||||
|
||||
const handleSetEvaluationDetailsForMeter = (): void => {
|
||||
setEvaluationWindow({
|
||||
type: 'SET_INITIAL_STATE_FOR_METER',
|
||||
});
|
||||
|
||||
setThresholdState({
|
||||
type: 'SET_MATCH_TYPE',
|
||||
payload: AlertThresholdMatchType.IN_TOTAL,
|
||||
});
|
||||
};
|
||||
|
||||
const handleSelectedQueryChange = (value: string): void => {
|
||||
// loop through currenttQuery and find the query that matches the selected query
|
||||
const query = currentQuery?.builder?.queryData.find(
|
||||
(query) => query.queryName === value,
|
||||
);
|
||||
|
||||
const currentSelectedQuery = currentQuery?.builder?.queryData.find(
|
||||
(query) => query.queryName === thresholdState.selectedQuery,
|
||||
);
|
||||
|
||||
const newSelectedQuerySource = query?.source || '';
|
||||
const currentSelectedQuerySource = currentSelectedQuery?.source || '';
|
||||
|
||||
if (newSelectedQuerySource === currentSelectedQuerySource) {
|
||||
setThresholdState({
|
||||
type: 'SET_SELECTED_QUERY',
|
||||
payload: value,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (newSelectedQuerySource === 'meter') {
|
||||
handleSetEvaluationDetailsForMeter();
|
||||
} else {
|
||||
setEvaluationWindow({
|
||||
type: 'SET_INITIAL_STATE',
|
||||
payload: INITIAL_EVALUATION_WINDOW_STATE,
|
||||
});
|
||||
}
|
||||
|
||||
setThresholdState({
|
||||
type: 'SET_SELECTED_QUERY',
|
||||
payload: value,
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={classNames(
|
||||
@@ -175,12 +225,7 @@ function AlertThreshold({
|
||||
</Typography.Text>
|
||||
<Select
|
||||
value={thresholdState.selectedQuery}
|
||||
onChange={(value): void => {
|
||||
setThresholdState({
|
||||
type: 'SET_SELECTED_QUERY',
|
||||
payload: value,
|
||||
});
|
||||
}}
|
||||
onChange={handleSelectedQueryChange}
|
||||
style={{ width: 80 }}
|
||||
options={queryNames}
|
||||
data-testid="alert-threshold-query-select"
|
||||
|
||||
@@ -137,7 +137,7 @@
|
||||
font-size: 13px;
|
||||
|
||||
&::placeholder {
|
||||
color: #888;
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
|
||||
&:focus,
|
||||
|
||||
@@ -10,6 +10,7 @@ import { getEvaluationWindowTypeText, getTimeframeText } from './utils';
|
||||
|
||||
function EvaluationSettings(): JSX.Element {
|
||||
const { evaluationWindow, setEvaluationWindow } = useCreateAlertState();
|
||||
|
||||
const [
|
||||
isEvaluationWindowPopoverOpen,
|
||||
setIsEvaluationWindowPopoverOpen,
|
||||
|
||||
@@ -24,7 +24,11 @@ import {
|
||||
INITIAL_EVALUATION_WINDOW_STATE,
|
||||
INITIAL_NOTIFICATION_SETTINGS_STATE,
|
||||
} from './constants';
|
||||
import { ICreateAlertContextProps, ICreateAlertProviderProps } from './types';
|
||||
import {
|
||||
AlertThresholdMatchType,
|
||||
ICreateAlertContextProps,
|
||||
ICreateAlertProviderProps,
|
||||
} from './types';
|
||||
import {
|
||||
advancedOptionsReducer,
|
||||
alertCreationReducer,
|
||||
@@ -67,6 +71,7 @@ export function CreateAlertProvider(
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const location = useLocation();
|
||||
const queryParams = new URLSearchParams(location.search);
|
||||
const thresholdsFromURL = queryParams.get(QueryParams.thresholds);
|
||||
|
||||
const [alertType, setAlertType] = useState<AlertTypes>(() => {
|
||||
if (isEditMode) {
|
||||
@@ -122,7 +127,28 @@ export function CreateAlertProvider(
|
||||
setThresholdState({
|
||||
type: 'RESET',
|
||||
});
|
||||
}, [alertType]);
|
||||
|
||||
if (thresholdsFromURL) {
|
||||
try {
|
||||
const thresholds = JSON.parse(thresholdsFromURL);
|
||||
setThresholdState({
|
||||
type: 'SET_THRESHOLDS',
|
||||
payload: thresholds,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error parsing thresholds from URL:', error);
|
||||
}
|
||||
|
||||
setEvaluationWindow({
|
||||
type: 'SET_INITIAL_STATE_FOR_METER',
|
||||
});
|
||||
|
||||
setThresholdState({
|
||||
type: 'SET_MATCH_TYPE',
|
||||
payload: AlertThresholdMatchType.IN_TOTAL,
|
||||
});
|
||||
}
|
||||
}, [alertType, thresholdsFromURL]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isEditMode && initialAlertState) {
|
||||
|
||||
@@ -237,6 +237,7 @@ export type EvaluationWindowAction =
|
||||
}
|
||||
| { type: 'SET_EVALUATION_CADENCE_MODE'; payload: EvaluationCadenceMode }
|
||||
| { type: 'SET_INITIAL_STATE'; payload: EvaluationWindowState }
|
||||
| { type: 'SET_INITIAL_STATE_FOR_METER' }
|
||||
| { type: 'RESET' };
|
||||
|
||||
export type EvaluationCadenceMode = 'default' | 'custom' | 'rrule';
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { UTC_TIMEZONE } from 'components/CustomTimePicker/timezoneUtils';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
alertDefaults,
|
||||
@@ -11,6 +13,7 @@ import { AlertDef } from 'types/api/alerts/def';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { CumulativeWindowTimeframes } from '../EvaluationSettings/types';
|
||||
import {
|
||||
INITIAL_ADVANCED_OPTIONS_STATE,
|
||||
INITIAL_ALERT_STATE,
|
||||
@@ -210,6 +213,18 @@ export const evaluationWindowReducer = (
|
||||
return INITIAL_EVALUATION_WINDOW_STATE;
|
||||
case 'SET_INITIAL_STATE':
|
||||
return action.payload;
|
||||
case 'SET_INITIAL_STATE_FOR_METER':
|
||||
return {
|
||||
...state,
|
||||
windowType: 'cumulative',
|
||||
timeframe: CumulativeWindowTimeframes.CURRENT_DAY,
|
||||
startingAt: {
|
||||
time: '00:00:00',
|
||||
number: '0',
|
||||
timezone: UTC_TIMEZONE.value,
|
||||
unit: UniversalYAxisUnit.MINUTES,
|
||||
},
|
||||
};
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ function ExplorerOptionWrapper({
|
||||
isOneChartPerQuery,
|
||||
splitedQueries,
|
||||
signalSource,
|
||||
handleChangeSelectedView,
|
||||
}: ExplorerOptionsWrapperProps): JSX.Element {
|
||||
const [isExplorerOptionHidden, setIsExplorerOptionHidden] = useState(false);
|
||||
|
||||
@@ -38,6 +39,7 @@ function ExplorerOptionWrapper({
|
||||
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
|
||||
isOneChartPerQuery={isOneChartPerQuery}
|
||||
splitedQueries={splitedQueries}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -72,10 +72,11 @@ import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { ViewProps } from 'types/api/saveViews/types';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { panelTypeToExplorerView } from 'utils/explorerUtils';
|
||||
|
||||
import { PreservedViewsTypes } from './constants';
|
||||
import ExplorerOptionsHideArea from './ExplorerOptionsHideArea';
|
||||
import { PreservedViewsInLocalStorage } from './types';
|
||||
import { ChangeViewFunctionType, PreservedViewsInLocalStorage } from './types';
|
||||
import {
|
||||
DATASOURCE_VS_ROUTES,
|
||||
generateRGBAFromHex,
|
||||
@@ -98,6 +99,7 @@ function ExplorerOptions({
|
||||
setIsExplorerOptionHidden,
|
||||
isOneChartPerQuery = false,
|
||||
splitedQueries = [],
|
||||
handleChangeSelectedView,
|
||||
}: ExplorerOptionsProps): JSX.Element {
|
||||
const [isExport, setIsExport] = useState<boolean>(false);
|
||||
const [isSaveModalOpen, setIsSaveModalOpen] = useState(false);
|
||||
@@ -412,13 +414,22 @@ function ExplorerOptions({
|
||||
if (!currentViewDetails) return;
|
||||
const { query, name, id, panelType: currentPanelType } = currentViewDetails;
|
||||
|
||||
handleExplorerTabChange(currentPanelType, {
|
||||
query,
|
||||
name,
|
||||
id,
|
||||
});
|
||||
if (handleChangeSelectedView) {
|
||||
handleChangeSelectedView(panelTypeToExplorerView[currentPanelType], {
|
||||
query,
|
||||
name,
|
||||
id,
|
||||
});
|
||||
} else {
|
||||
// to remove this after traces cleanup
|
||||
handleExplorerTabChange(currentPanelType, {
|
||||
query,
|
||||
name,
|
||||
id,
|
||||
});
|
||||
}
|
||||
},
|
||||
[viewsData, handleExplorerTabChange],
|
||||
[viewsData, handleExplorerTabChange, handleChangeSelectedView],
|
||||
);
|
||||
|
||||
const updatePreservedViewInLocalStorage = (option: {
|
||||
@@ -524,6 +535,10 @@ function ExplorerOptions({
|
||||
return;
|
||||
}
|
||||
|
||||
if (handleChangeSelectedView) {
|
||||
handleChangeSelectedView(panelTypeToExplorerView[PANEL_TYPES.LIST]);
|
||||
}
|
||||
|
||||
history.replace(DATASOURCE_VS_ROUTES[sourcepage]);
|
||||
};
|
||||
|
||||
@@ -1020,6 +1035,7 @@ export interface ExplorerOptionsProps {
|
||||
setIsExplorerOptionHidden?: Dispatch<SetStateAction<boolean>>;
|
||||
isOneChartPerQuery?: boolean;
|
||||
splitedQueries?: Query[];
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
}
|
||||
|
||||
ExplorerOptions.defaultProps = {
|
||||
@@ -1029,6 +1045,7 @@ ExplorerOptions.defaultProps = {
|
||||
isOneChartPerQuery: false,
|
||||
splitedQueries: [],
|
||||
signalSource: '',
|
||||
handleChangeSelectedView: undefined,
|
||||
};
|
||||
|
||||
export default ExplorerOptions;
|
||||
|
||||
@@ -2,6 +2,8 @@ import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { SaveViewWithNameProps } from 'components/ExplorerCard/types';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { ICurrentQueryData } from 'hooks/useHandleExplorerTabChange';
|
||||
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { UseMutateAsyncFunction } from 'react-query';
|
||||
import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery';
|
||||
@@ -38,3 +40,8 @@ export type PreservedViewType =
|
||||
export type PreservedViewsInLocalStorage = Partial<
|
||||
Record<PreservedViewType, { key: string; value: string }>
|
||||
>;
|
||||
|
||||
export type ChangeViewFunctionType = (
|
||||
view: ExplorerViews,
|
||||
querySearchParameters?: ICurrentQueryData,
|
||||
) => void;
|
||||
|
||||
@@ -36,6 +36,7 @@ function QuerySection({
|
||||
// init namespace for translations
|
||||
const { t } = useTranslation('alerts');
|
||||
const [currentTab, setCurrentTab] = useState(queryCategory);
|
||||
const [signalSource, setSignalSource] = useState<string>('metrics');
|
||||
|
||||
const handleQueryCategoryChange = (queryType: string): void => {
|
||||
setQueryCategory(queryType as EQueryType);
|
||||
@@ -48,12 +49,17 @@ function QuerySection({
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const handleSignalSourceChange = (value: string): void => {
|
||||
setSignalSource(value);
|
||||
};
|
||||
|
||||
const renderMetricUI = (): JSX.Element => (
|
||||
<QueryBuilderV2
|
||||
panelType={panelType}
|
||||
config={{
|
||||
queryVariant: 'static',
|
||||
initialDataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||
signalSource: signalSource === 'meter' ? 'meter' : '',
|
||||
}}
|
||||
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
|
||||
showFunctions={
|
||||
@@ -62,6 +68,8 @@ function QuerySection({
|
||||
alertType === AlertTypes.LOGS_BASED_ALERT
|
||||
}
|
||||
version={alertDef.version || 'v3'}
|
||||
onSignalSourceChange={handleSignalSourceChange}
|
||||
signalSourceChangeEnabled
|
||||
/>
|
||||
);
|
||||
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { CaretDownFilled, CaretRightFilled } from '@ant-design/icons';
|
||||
import { Col, Typography } from 'antd';
|
||||
import { StyledCol, StyledRow } from 'components/Styled';
|
||||
import { IIntervalUnit } from 'container/TraceDetail/utils';
|
||||
import {
|
||||
IIntervalUnit,
|
||||
SPAN_DETAILS_LEFT_COL_WIDTH,
|
||||
} from 'container/TraceDetail/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { SPAN_DETAILS_LEFT_COL_WIDTH } from 'pages/TraceDetail/constants';
|
||||
import {
|
||||
Dispatch,
|
||||
MouseEventHandler,
|
||||
|
||||
@@ -137,8 +137,7 @@ function GeneralSettings({
|
||||
if (logsCurrentTTLValues) {
|
||||
setLogsTotalRetentionPeriod(logsCurrentTTLValues.default_ttl_days * 24);
|
||||
setLogsS3RetentionPeriod(
|
||||
logsCurrentTTLValues.cold_storage_ttl_days &&
|
||||
logsCurrentTTLValues.cold_storage_ttl_days > 0
|
||||
logsCurrentTTLValues.cold_storage_ttl_days
|
||||
? logsCurrentTTLValues.cold_storage_ttl_days * 24
|
||||
: null,
|
||||
);
|
||||
|
||||
@@ -94,6 +94,9 @@ const mockDisksWithoutS3: IDiskType[] = [
|
||||
];
|
||||
|
||||
describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
const BUTTON_SELECTOR = 'button[type="button"]';
|
||||
const PRIMARY_BUTTON_CLASS = 'ant-btn-primary';
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(setRetentionApiV2 as jest.Mock).mockResolvedValue({
|
||||
@@ -155,10 +158,10 @@ describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
await user.type(s3Input, '5');
|
||||
|
||||
// Find the save button in the Logs card
|
||||
const buttons = logsCard?.querySelectorAll('button[type="button"]');
|
||||
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
|
||||
// The primary button should be the save button
|
||||
const saveButton = Array.from(buttons || []).find((btn) =>
|
||||
btn.className.includes('ant-btn-primary'),
|
||||
btn.className.includes(PRIMARY_BUTTON_CLASS),
|
||||
) as HTMLButtonElement;
|
||||
|
||||
expect(saveButton).toBeInTheDocument();
|
||||
@@ -262,9 +265,9 @@ describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
await user.type(totalInput, '60');
|
||||
|
||||
// Find the save button
|
||||
const buttons = logsCard?.querySelectorAll('button[type="button"]');
|
||||
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
|
||||
const saveButton = Array.from(buttons || []).find((btn) =>
|
||||
btn.className.includes('ant-btn-primary'),
|
||||
btn.className.includes(PRIMARY_BUTTON_CLASS),
|
||||
) as HTMLButtonElement;
|
||||
|
||||
expect(saveButton).toBeInTheDocument();
|
||||
@@ -329,4 +332,59 @@ describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
expect(dropdowns?.[1]).toHaveTextContent('Days');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test 4: Save Button State with S3 Disabled', () => {
|
||||
it('should disable save button when cold_storage_ttl_days is -1 and no changes made', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<GeneralSettings
|
||||
metricsTtlValuesPayload={mockMetricsRetention}
|
||||
tracesTtlValuesPayload={mockTracesRetention}
|
||||
logsTtlValuesPayload={mockLogsRetentionWithoutS3}
|
||||
getAvailableDiskPayload={mockDisksWithS3}
|
||||
metricsTtlValuesRefetch={jest.fn()}
|
||||
tracesTtlValuesRefetch={jest.fn()}
|
||||
logsTtlValuesRefetch={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find the Logs card
|
||||
const logsCard = screen.getByText('Logs').closest('.ant-card');
|
||||
expect(logsCard).toBeInTheDocument();
|
||||
|
||||
// Find the save button
|
||||
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
|
||||
const saveButton = Array.from(buttons || []).find((btn) =>
|
||||
btn.className.includes(PRIMARY_BUTTON_CLASS),
|
||||
) as HTMLButtonElement;
|
||||
|
||||
expect(saveButton).toBeInTheDocument();
|
||||
|
||||
// Verify save button is disabled on initial load (no changes, S3 disabled with -1)
|
||||
expect(saveButton).toBeDisabled();
|
||||
|
||||
// Find the total retention input
|
||||
const inputs = logsCard?.querySelectorAll('input[type="text"]');
|
||||
const totalInput = inputs?.[0] as HTMLInputElement;
|
||||
|
||||
// Change total retention value to trigger button enable
|
||||
await user.clear(totalInput);
|
||||
await user.type(totalInput, '60');
|
||||
|
||||
// Button should now be enabled after change
|
||||
await waitFor(() => {
|
||||
expect(saveButton).not.toBeDisabled();
|
||||
});
|
||||
|
||||
// Revert to original value (30 days displays as 1 Month)
|
||||
await user.clear(totalInput);
|
||||
await user.type(totalInput, '1');
|
||||
|
||||
// Button should be disabled again (back to original state)
|
||||
await waitFor(() => {
|
||||
expect(saveButton).toBeDisabled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -46,8 +46,7 @@ export const convertHoursValueToRelevantUnit = (
|
||||
availableUnits?: ITimeUnit[],
|
||||
): ITimeUnitConversion => {
|
||||
const unitsToConsider = availableUnits?.length ? availableUnits : TimeUnits;
|
||||
|
||||
if (value) {
|
||||
if (value >= 0) {
|
||||
for (let idx = unitsToConsider.length - 1; idx >= 0; idx -= 1) {
|
||||
const timeUnit = unitsToConsider[idx];
|
||||
const convertedValue = timeUnit.multiplier * value;
|
||||
@@ -62,7 +61,7 @@ export const convertHoursValueToRelevantUnit = (
|
||||
}
|
||||
|
||||
// Fallback to the first available unit
|
||||
return { value, timeUnitValue: unitsToConsider[0].value };
|
||||
return { value: -1, timeUnitValue: unitsToConsider[0].value };
|
||||
};
|
||||
|
||||
export const convertHoursValueToRelevantUnitString = (
|
||||
|
||||
@@ -324,6 +324,7 @@ function FullView({
|
||||
panelType={selectedPanelType}
|
||||
version={selectedDashboard?.data?.version || 'v3'}
|
||||
isListViewPanel={selectedPanelType === PANEL_TYPES.LIST}
|
||||
signalSourceChangeEnabled
|
||||
// filterConfigs={filterConfigs}
|
||||
// queryComponents={queryComponents}
|
||||
/>
|
||||
|
||||
@@ -17,12 +17,6 @@ export const Card = styled(CardComponent)<CardProps>`
|
||||
overflow: hidden;
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
background: linear-gradient(
|
||||
0deg,
|
||||
rgba(171, 189, 255, 0) 0%,
|
||||
rgba(171, 189, 255, 0) 100%
|
||||
),
|
||||
#0b0c0e;
|
||||
|
||||
${({ isDarkMode }): StyledCSS =>
|
||||
!isDarkMode &&
|
||||
|
||||
@@ -49,17 +49,29 @@ function GridTableComponent({
|
||||
panelType,
|
||||
queryRangeRequest,
|
||||
decimalPrecision,
|
||||
hiddenColumns = [],
|
||||
...props
|
||||
}: GridTableComponentProps): JSX.Element {
|
||||
const { t } = useTranslation(['valueGraph']);
|
||||
|
||||
// create columns and dataSource in the ui friendly structure
|
||||
// use the query from the widget here to extract the legend information
|
||||
const { columns, dataSource: originalDataSource } = useMemo(
|
||||
const { columns: allColumns, dataSource: originalDataSource } = useMemo(
|
||||
() => createColumnsAndDataSource((data as unknown) as TableData, query),
|
||||
[query, data],
|
||||
);
|
||||
|
||||
// Filter out hidden columns from being displayed
|
||||
const columns = useMemo(
|
||||
() =>
|
||||
allColumns.filter(
|
||||
(column) =>
|
||||
!('dataIndex' in column) ||
|
||||
!hiddenColumns.includes(column.dataIndex as string),
|
||||
),
|
||||
[allColumns, hiddenColumns],
|
||||
);
|
||||
|
||||
const createDataInCorrectFormat = useCallback(
|
||||
(dataSource: RowData[]): RowData[] =>
|
||||
dataSource.map((d) => {
|
||||
@@ -88,17 +100,13 @@ function GridTableComponent({
|
||||
const newValue = { ...val };
|
||||
Object.keys(val).forEach((k) => {
|
||||
const unit = getColumnUnit(k, columnUnits);
|
||||
// Apply formatting if:
|
||||
// 1. Column has a unit defined, OR
|
||||
// 2. decimalPrecision is specified (format all values)
|
||||
const shouldFormat = unit || decimalPrecision !== undefined;
|
||||
|
||||
if (shouldFormat) {
|
||||
if (unit) {
|
||||
// the check below takes care of not adding units for rows that have n/a or null values
|
||||
if (val[k] !== 'n/a' && val[k] !== null) {
|
||||
newValue[k] = getYAxisFormattedValue(
|
||||
String(val[k]),
|
||||
unit || 'none',
|
||||
unit,
|
||||
decimalPrecision,
|
||||
);
|
||||
} else if (val[k] === null) {
|
||||
|
||||
@@ -30,6 +30,7 @@ export type GridTableComponentProps = {
|
||||
contextLinks?: ContextLinksData;
|
||||
panelType?: PANEL_TYPES;
|
||||
queryRangeRequest?: QueryRangeRequestV5;
|
||||
hiddenColumns?: string[];
|
||||
} & Pick<LogsExplorerTableProps, 'data'> &
|
||||
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;
|
||||
|
||||
|
||||
@@ -423,6 +423,7 @@
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 14px;
|
||||
align-items: flex-start;
|
||||
|
||||
.section-icon {
|
||||
display: flex;
|
||||
@@ -461,7 +462,6 @@
|
||||
flex-direction: column;
|
||||
gap: 14px;
|
||||
|
||||
width: 150px;
|
||||
justify-content: flex-end;
|
||||
|
||||
.ant-btn {
|
||||
|
||||
@@ -418,6 +418,11 @@
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.set-alert-btn {
|
||||
cursor: pointer;
|
||||
margin-left: 24px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user