Compare commits
2 Commits
v0.104.0-c
...
tvats-hand
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
509a1cfb85 | ||
|
|
fd118d386a |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -6,10 +6,6 @@
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
|
||||
# Onboarding
|
||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
||||
|
||||
# Dashboard, Alert, Metrics, Service Map, Services
|
||||
/frontend/src/container/ListOfDashboard/ @srikanthccv
|
||||
/frontend/src/container/NewDashboard/ @srikanthccv
|
||||
|
||||
1
.github/workflows/build-enterprise.yaml
vendored
1
.github/workflows/build-enterprise.yaml
vendored
@@ -69,7 +69,6 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/build-staging.yaml
vendored
1
.github/workflows/build-staging.yaml
vendored
@@ -68,7 +68,6 @@ jobs:
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/gor-signoz.yaml
vendored
1
.github/workflows/gor-signoz.yaml
vendored
@@ -35,7 +35,6 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
|
||||
1
.github/workflows/integrationci.yaml
vendored
1
.github/workflows/integrationci.yaml
vendored
@@ -18,7 +18,6 @@ jobs:
|
||||
- passwordauthn
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- dashboard
|
||||
- querier
|
||||
- ttl
|
||||
sqlstore-provider:
|
||||
|
||||
12
Makefile
12
Makefile
@@ -84,9 +84,10 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go server
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@@ -101,9 +102,10 @@ go-run-community: ## Runs the community go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
@@ -206,4 +208,4 @@ py-lint: ## Run lint for integration tests
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@@ -5,12 +5,9 @@ import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
@@ -79,9 +76,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -8,8 +8,6 @@ import (
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
@@ -19,7 +17,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -108,9 +105,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -47,10 +47,10 @@ cache:
|
||||
provider: memory
|
||||
# memory: Uses in-memory caching.
|
||||
memory:
|
||||
# Max items for the in-memory cache (10x the entries)
|
||||
num_counters: 100000
|
||||
# Total cost in bytes allocated bounded cache
|
||||
max_cost: 67108864
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanup_interval: 1m
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.103.1
|
||||
image: signoz/signoz:v0.100.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,7 +209,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
image: signoz/signoz-otel-collector:v0.129.8
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +233,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.103.1
|
||||
image: signoz/signoz:v0.100.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
image: signoz/signoz-otel-collector:v0.129.8
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.103.1}
|
||||
image: signoz/signoz:${VERSION:-v0.100.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +213,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +239,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +250,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.103.1}
|
||||
image: signoz/signoz:${VERSION:-v0.100.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -103,19 +103,9 @@ Remember to replace the region and ingestion key with proper values as obtained
|
||||
|
||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||
|
||||
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
|
||||
|
||||
|
||||
```sh
|
||||
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
|
||||
ENVOY_PORT=8081 docker compose up -d
|
||||
```
|
||||
|
||||
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||
```sh
|
||||
docker compose ps -a
|
||||
|
||||
@@ -129,12 +129,6 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
|
||||
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
|
||||
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)
|
||||
|
||||
@@ -99,14 +99,6 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
state := authtypes.NewState(&url.URL{Path: "login"}, authDomain.StorableAuthDomain().ID).URL.String()
|
||||
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: &state,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
|
||||
certStore, err := a.getCertificateStore(authDomain)
|
||||
if err != nil {
|
||||
|
||||
@@ -48,26 +48,7 @@ func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -15,18 +15,18 @@ type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user, anonymous]
|
||||
define assignee: [user]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type metaresources
|
||||
type resources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
|
||||
type metaresource
|
||||
type resource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
@@ -35,6 +35,6 @@ type metaresource
|
||||
define block: [user, role#assignee]
|
||||
|
||||
|
||||
type telemetryresource
|
||||
type telemetry
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define read: [user, anonymous, role#assignee]
|
||||
|
||||
@@ -20,10 +20,6 @@ import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -103,39 +99,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
||||
|
||||
// dashboards
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.CreatePublic)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.GetPublic)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.UpdatePublic)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.DeletePublic)).Methods(http.MethodDelete)
|
||||
|
||||
// public access for dashboards
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicData,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}/widgets/{index}/query_range", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicWidgetQueryRange,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||
"go.opentelemetry.io/otel/propagation"
|
||||
@@ -75,26 +74,13 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 10000,
|
||||
MaxCost: 1 << 27, // 128 MB
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := clickhouseReader.NewReader(
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.Prometheus,
|
||||
signoz.TelemetryStore.Cluster(),
|
||||
config.Querier.FluxInterval,
|
||||
cacheForTraceDetail,
|
||||
signoz.Cache,
|
||||
nil,
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
@@ -206,7 +192,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
|
||||
r.Use(otelmux.Middleware(
|
||||
"apiserver",
|
||||
|
||||
@@ -246,9 +246,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
continue
|
||||
}
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -298,9 +296,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
continue
|
||||
}
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -414,7 +410,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
Missing: smpl.IsMissing,
|
||||
IsRecovering: smpl.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -427,9 +422,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
// Update the recovering and missing state of existing alert
|
||||
alert.IsRecovering = a.IsRecovering
|
||||
alert.Missing = a.Missing
|
||||
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
|
||||
alert.Receivers = ruleReceiverMap[v]
|
||||
}
|
||||
@@ -488,30 +480,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeFiringToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
|
||||
@@ -30,8 +30,6 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
|
||||
return sqlschema.DataTypeBoolean
|
||||
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
|
||||
return sqlschema.DataTypeText
|
||||
case "BYTEA":
|
||||
return sqlschema.DataTypeBytea
|
||||
}
|
||||
|
||||
return formatter.Formatter.DataTypeOf(dataType)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
|
||||
ignorePatterns: ['src/parser/*.ts'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
|
||||
@@ -3,6 +3,5 @@ BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
PYLON_APP_ID="pylon-app-id"
|
||||
APPCUES_APP_ID="appcess-app-id"
|
||||
PYLON_IDENTITY_SECRET="pylon-identity-secret"
|
||||
|
||||
CI="1"
|
||||
@@ -14,7 +14,7 @@
|
||||
"jest": "jest",
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure) && node scripts/update-registry.js",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
@@ -38,7 +38,7 @@
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
"@monaco-editor/react": "^4.3.1",
|
||||
"@playwright/test": "1.55.1",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@radix-ui/react-tabs": "1.0.4",
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "8.41.0",
|
||||
@@ -83,7 +83,6 @@
|
||||
"color": "^4.2.1",
|
||||
"color-alpha": "1.1.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"crypto-js": "4.2.0",
|
||||
"css-loader": "5.0.0",
|
||||
"css-minimizer-webpack-plugin": "5.0.1",
|
||||
"d3-hierarchy": "3.1.2",
|
||||
@@ -113,7 +112,7 @@
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.298.0",
|
||||
"posthog-js": "1.215.5",
|
||||
"rc-tween-one": "3.0.6",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
@@ -150,6 +149,7 @@
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.1",
|
||||
"typescript": "^4.0.5",
|
||||
"uplot": "1.6.31",
|
||||
"userpilot": "1.3.9",
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
@@ -186,7 +186,6 @@
|
||||
"@types/color": "^3.0.3",
|
||||
"@types/compression-webpack-plugin": "^9.0.0",
|
||||
"@types/copy-webpack-plugin": "^8.0.1",
|
||||
"@types/crypto-js": "4.2.2",
|
||||
"@types/dompurify": "^2.4.0",
|
||||
"@types/event-source-polyfill": "^1.0.0",
|
||||
"@types/fontfaceobserver": "2.1.0",
|
||||
@@ -281,7 +280,6 @@
|
||||
"got": "11.8.5",
|
||||
"form-data": "4.0.4",
|
||||
"brace-expansion": "^2.0.2",
|
||||
"on-headers": "^1.1.0",
|
||||
"tmp": "0.2.4"
|
||||
"on-headers": "^1.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" fill="currentColor" fill-rule="evenodd" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>AWS</title><path d="M6.763 11.212q.002.446.088.71c.064.176.144.368.256.576.04.063.056.127.056.183q.002.12-.152.24l-.503.335a.4.4 0 0 1-.208.072q-.12-.002-.239-.112a2.5 2.5 0 0 1-.287-.375 6 6 0 0 1-.248-.471q-.934 1.101-2.347 1.101c-.67 0-1.205-.191-1.596-.574-.39-.384-.59-.894-.59-1.533 0-.678.24-1.23.726-1.644.487-.415 1.133-.623 1.955-.623.272 0 .551.024.846.064.296.04.6.104.918.176v-.583q-.001-.908-.375-1.277c-.255-.248-.686-.367-1.3-.367-.28 0-.568.031-.863.103s-.583.16-.862.272a2 2 0 0 1-.28.104.5.5 0 0 1-.127.023q-.168.002-.168-.247v-.391c0-.128.016-.224.056-.28a.6.6 0 0 1 .224-.167 4.6 4.6 0 0 1 1.005-.36 4.8 4.8 0 0 1 1.246-.151c.95 0 1.644.216 2.091.647q.661.646.662 1.963v2.586zm-3.24 1.214c.263 0 .534-.048.822-.144a1.8 1.8 0 0 0 .758-.51 1.3 1.3 0 0 0 .272-.512c.047-.191.08-.423.08-.694v-.335a7 7 0 0 0-.735-.136 6 6 0 0 0-.75-.048c-.535 0-.926.104-1.19.32-.263.215-.39.518-.39.917 0 .375.095.655.295.846.191.2.47.296.838.296m6.41.862c-.144 0-.24-.024-.304-.08-.064-.048-.12-.16-.168-.311L7.586 6.726a1.4 1.4 0 0 1-.072-.32c0-.128.064-.2.191-.2h.783q.227-.001.31.08c.065.048.113.16.16.312l1.342 5.284 1.245-5.284q.058-.24.151-.312a.55.55 0 0 1 .32-.08h.638c.152 0 .256.025.32.08.063.048.12.16.151.312l1.261 5.348 1.381-5.348q.074-.24.16-.312a.52.52 0 0 1 .311-.08h.743c.127 0 .2.065.2.2 0 .04-.009.08-.017.128a1 1 0 0 1-.056.2l-1.923 6.17q-.072.24-.168.311a.5.5 0 0 1-.303.08h-.687c-.15 0-.255-.024-.32-.08-.063-.056-.119-.16-.15-.32L12.32 7.747l-1.23 5.14c-.04.16-.087.264-.15.32-.065.056-.177.08-.32.08zm10.256.215c-.415 0-.83-.048-1.229-.143-.399-.096-.71-.2-.918-.32-.128-.071-.215-.151-.247-.223a.6.6 0 0 1-.048-.224v-.407c0-.167.064-.247.183-.247q.072 0 .144.024c.048.016.12.048.2.08q.408.181.878.279c.32.064.63.096.95.096.502 0 .894-.088 1.165-.264a.86.86 0 0 0 .415-.758.78.78 0 0 0-.215-.559c-.144-.151-.416-.287-.807-.415l-1.157-.36c-.583-.183-1.014-.454-1.277-.813a1.9 1.9 0 0 1-.4-1.158q0-.502.216-.886c.144-.255.335-.479.575-.654.24-.184.51-.32.83-.415.32-.096.655-.136 1.006-.136.175 0 .36.008.535.032.183.024.35.056.518.088q.24.058.455.127.216.072.336.144a.7.7 0 0 1 .24.2.43.43 0 0 1 .071.263v.375q-.002.254-.184.256a.8.8 0 0 1-.303-.096 3.65 3.65 0 0 0-1.532-.311c-.455 0-.815.071-1.062.223s-.375.383-.375.71c0 .224.08.416.24.567.16.152.454.304.877.44l1.134.358c.574.184.99.44 1.237.767s.367.702.367 1.117c0 .343-.072.655-.207.926a2.2 2.2 0 0 1-.583.703c-.248.2-.543.343-.886.447-.36.111-.734.167-1.142.167"/><path fill="#f90" d="M.378 15.475c3.384 1.963 7.56 3.153 11.877 3.153 2.914 0 6.114-.607 9.06-1.852.44-.2.814.287.383.607-2.626 1.94-6.442 2.969-9.722 2.969-4.598 0-8.74-1.7-11.87-4.526-.247-.223-.024-.527.272-.351m23.531-.2c.287.36-.08 2.826-1.485 4.007-.215.184-.423.088-.327-.151l.175-.439c.343-.88.802-2.198.52-2.555-.336-.43-2.22-.207-3.074-.103-.255.032-.295-.192-.063-.36 1.5-1.053 3.967-.75 4.254-.399"/></svg>
|
||||
|
Before Width: | Height: | Size: 3.0 KiB |
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 20 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>Azure</title><path fill="url(#a)" d="M7.242 1.613A1.11 1.11 0 0 1 8.295.857h6.977L8.03 22.316a1.11 1.11 0 0 1-1.052.755h-5.43a1.11 1.11 0 0 1-1.053-1.466z"/><path fill="#0078d4" d="M18.397 15.296H7.4a.51.51 0 0 0-.347.882l7.066 6.595c.206.192.477.298.758.298h6.226z"/><path fill="url(#b)" d="M15.272.857H7.497L0 23.071h7.775l1.596-4.73 5.068 4.73h6.665l-2.707-7.775h-7.998z"/><path fill="url(#c)" d="M17.193 1.613a1.11 1.11 0 0 0-1.052-.756h-7.81.035c.477 0 .9.304 1.052.756l6.748 19.992a1.11 1.11 0 0 1-1.052 1.466h-.12 7.895a1.11 1.11 0 0 0 1.052-1.466z"/><defs><linearGradient id="a" x1="8.247" x2="1.002" y1="1.626" y2="23.03" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0669bc"/></linearGradient><linearGradient id="b" x1="14.042" x2="12.324" y1="15.302" y2="15.888" gradientUnits="userSpaceOnUse"><stop stop-opacity=".3"/><stop offset=".071" stop-opacity=".2"/><stop offset=".321" stop-opacity=".1"/><stop offset=".623" stop-opacity=".05"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" x1="12.841" x2="20.793" y1="1.626" y2="22.814" gradientUnits="userSpaceOnUse"><stop stop-color="#3ccbf4"/><stop offset="1" stop-color="#2892df"/></linearGradient></defs></svg>
|
||||
|
Before Width: | Height: | Size: 1.3 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>CrewAI</title><path fill="#461816" d="M19.41 10.783a2.75 2.75 0 0 1 2.471 1.355c.483.806.622 1.772.385 2.68l-.136.522a10 10 0 0 1-3.156 5.058c-.605.517-1.283 1.062-2.083 1.524l-.028.017c-.402.232-.884.511-1.398.756-1.19.602-2.475.997-3.798 1.167-.854.111-1.716.155-2.577.132h-.018a8.6 8.6 0 0 1-5.046-1.87l-.012-.01-.012-.01A8.02 8.02 0 0 1 1.22 17.42a10.9 10.9 0 0 1-.102-3.779A15.6 15.6 0 0 1 2.88 8.4a21.8 21.8 0 0 1 2.432-3.678 15.4 15.4 0 0 1 3.56-3.182A10 10 0 0 1 12.44.104h.004l.003-.002c2.057-.384 3.743.374 5.024 1.26a8.3 8.3 0 0 1 2.395 2.513l.024.04.023.042a5.47 5.47 0 0 1 .508 4.012c-.239.97-.577 1.914-1.01 2.814z"/><path fill="#fff" d="M18.861 13.165a.748.748 0 0 1 1.256.031c.199.332.256.73.159 1.103l-.137.522a7.94 7.94 0 0 1-2.504 4.014c-.572.49-1.138.939-1.774 1.306-.427.247-.857.496-1.303.707a9.6 9.6 0 0 1-3.155.973 14.3 14.3 0 0 1-2.257.116 6.53 6.53 0 0 1-3.837-1.422 5.97 5.97 0 0 1-2.071-3.494 8.9 8.9 0 0 1-.085-3.08 13.6 13.6 0 0 1 1.54-4.568 19.7 19.7 0 0 1 2.212-3.348 13.4 13.4 0 0 1 3.088-2.76 7.9 7.9 0 0 1 2.832-1.14c1.307-.245 2.434.207 3.481.933a6.2 6.2 0 0 1 1.806 1.892c.423.767.536 1.668.314 2.515a12.4 12.4 0 0 1-.99 2.67l-.223.497q-.48 1.07-.97 2.137a.76.76 0 0 1-.97.467 3.39 3.39 0 0 1-2.283-2.49c-.095-.83.04-1.669.39-2.426.288-.746.61-1.477.933-2.208l.248-.563a.53.53 0 0 0-.204-.742 2.35 2.35 0 0 0-1.2.702 25 25 0 0 0-1.614 1.767 21.6 21.6 0 0 0-2.619 4.184 7.6 7.6 0 0 0-.816 2.753 7 7 0 0 0 .07 2.219 2.055 2.055 0 0 0 1.934 1.715c1.801.1 3.59-.363 5.116-1.328a19 19 0 0 0 1.675-1.294c.752-.71 1.376-1.519 1.958-2.36"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.7 KiB |
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 19 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>PydanticAI</title><path fill="#e72564" d="M13.223 22.86c-.605.83-1.844.83-2.448 0L5.74 15.944a1.514 1.514 0 0 1 .73-2.322l5.035-1.738c.32-.11.668-.11.988 0l5.035 1.738c.962.332 1.329 1.5.73 2.322zm-1.224-1.259 4.688-6.439-4.688-1.618-4.688 1.618L12 21.602z"/><path fill="#e723a0" d="M23.71 13.463c.604.832.221 2.01-.756 2.328l-8.133 2.652a1.514 1.514 0 0 1-1.983-1.412l-.097-5.326c-.006-.338.101-.67.305-.94l3.209-4.25a1.514 1.514 0 0 1 2.434.022l5.022 6.926zm-1.574.775L17.46 7.79l-2.988 3.958.09 4.959z"/><path fill="#e520e9" d="M18.016.591a1.514 1.514 0 0 1 1.98 1.44l.009 8.554a1.514 1.514 0 0 1-1.956 1.45l-5.095-1.554a1.5 1.5 0 0 1-.8-.58l-3.05-4.366a1.514 1.514 0 0 1 .774-2.308zm.25 1.738L10.69 4.783l2.841 4.065 4.744 1.446-.008-7.965z"/><path fill="#e520e9" d="M5.99.595a1.514 1.514 0 0 0-1.98 1.44L4 10.588a1.514 1.514 0 0 0 1.956 1.45l5.095-1.554c.323-.098.605-.303.799-.58l3.052-4.366a1.514 1.514 0 0 0-.775-2.308zm-.25 1.738 7.577 2.454-2.842 4.065-4.743 1.446.007-7.965z"/><path fill="#e723a0" d="M.29 13.461a1.514 1.514 0 0 0 .756 2.329l8.133 2.651a1.514 1.514 0 0 0 1.983-1.412l.097-5.325a1.5 1.5 0 0 0-.305-.94L7.745 6.513a1.514 1.514 0 0 0-2.434.023L.289 13.461zm1.574.776L6.54 7.788l2.988 3.959-.09 4.958z"/><path fill="#ff96d1" d="m16.942 17.751 1.316-1.806q.178-.248.245-.523l-2.63.858-1.627 2.235a1.5 1.5 0 0 0 .575-.072zm-4.196-5.78.033 1.842 1.742.602-.034-1.843-1.741-.6zm7.257-3.622-1.314-1.812a1.5 1.5 0 0 0-.419-.393l.003 2.767 1.624 2.24q.107-.261.108-.566zm-5.038 2.746-1.762-.537 1.11-1.471 1.762.537zm-2.961-1.41 1.056-1.51-1.056-1.51-1.056 1.51zM9.368 3.509c.145-.122.316-.219.51-.282l2.12-.686 2.13.69c.191.062.36.157.503.276l-2.634.853zm1.433 7.053L9.691 9.09l-1.762.537 1.11 1.47 1.762-.537zm-6.696.584L5.733 8.9l.003-2.763c-.16.1-.305.232-.425.398L4.003 8.339l-.002 2.25q.002.299.104.557m7.149.824-1.741.601-.034 1.843 1.742-.601zM9.75 18.513l-1.628-2.237-2.629-.857q.068.276.247.525l1.313 1.804 2.126.693c.192.062.385.085.571.072"/></svg>
|
||||
|
Before Width: | Height: | Size: 2.1 KiB |
@@ -1,50 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires, import/no-dynamic-require, simple-import-sort/imports, simple-import-sort/exports */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// 1. Define paths
|
||||
const packageJsonPath = path.resolve(__dirname, '../package.json');
|
||||
const registryPath = path.resolve(
|
||||
__dirname,
|
||||
'../src/auto-import-registry.d.ts',
|
||||
);
|
||||
|
||||
// 2. Read package.json
|
||||
const packageJson = require(packageJsonPath);
|
||||
|
||||
// 3. Combine dependencies and devDependencies
|
||||
const allDeps = {
|
||||
...packageJson.dependencies,
|
||||
...packageJson.devDependencies,
|
||||
};
|
||||
|
||||
// 4. Filter for @signozhq packages
|
||||
const signozPackages = Object.keys(allDeps).filter((dep) =>
|
||||
dep.startsWith('@signozhq/'),
|
||||
);
|
||||
|
||||
// 5. Generate file content
|
||||
const fileContent = `// -------------------------------------------------------------------------
|
||||
// AUTO-GENERATED FILE
|
||||
// -------------------------------------------------------------------------
|
||||
// This file is generated by scripts/update-registry.js automatically
|
||||
// whenever you run 'yarn install' or 'npm install'.
|
||||
//
|
||||
// It forces VS Code to index these specific packages to fix auto-import
|
||||
// performance issues in TypeScript 4.x.
|
||||
//
|
||||
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
${signozPackages.map((pkg) => `import '${pkg}';`).join('\n')}
|
||||
`;
|
||||
|
||||
// 6. Write the file
|
||||
try {
|
||||
fs.writeFileSync(registryPath, fileContent);
|
||||
console.log(
|
||||
`✅ Auto-import registry updated with ${signozPackages.length} @signozhq packages.`,
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('❌ Failed to update auto-import registry:', err);
|
||||
}
|
||||
@@ -7,12 +7,11 @@ import AppLoading from 'components/AppLoading/AppLoading';
|
||||
import KBarCommandPalette from 'components/KBarCommandPalette/KBarCommandPalette';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import AppLayout from 'container/AppLayout';
|
||||
import Hex from 'crypto-js/enc-hex';
|
||||
import HmacSHA256 from 'crypto-js/hmac-sha256';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
@@ -34,6 +33,7 @@ import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { LicenseStatus } from 'types/api/licensesV3/getActive';
|
||||
import { Userpilot } from 'userpilot';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
import { Home } from './pageComponents';
|
||||
@@ -84,9 +84,9 @@ function App(): JSX.Element {
|
||||
email,
|
||||
name: displayName,
|
||||
company_name: orgName,
|
||||
deployment_name: hostNameParts[0],
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
deployment_url: hostname,
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
role,
|
||||
@@ -94,9 +94,9 @@ function App(): JSX.Element {
|
||||
|
||||
const groupTraits = {
|
||||
name: orgName,
|
||||
deployment_name: hostNameParts[0],
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
deployment_url: hostname,
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
@@ -111,23 +111,37 @@ function App(): JSX.Element {
|
||||
if (window && window.Appcues) {
|
||||
window.Appcues.identify(id, {
|
||||
name: displayName,
|
||||
deployment_name: hostNameParts[0],
|
||||
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
deployment_url: hostname,
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
|
||||
companyName: orgName,
|
||||
email,
|
||||
paidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
}
|
||||
|
||||
Userpilot.identify(email, {
|
||||
email,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
posthog?.identify(id, {
|
||||
email,
|
||||
name: displayName,
|
||||
orgName,
|
||||
deployment_name: hostNameParts[0],
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
deployment_url: hostname,
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
@@ -135,9 +149,9 @@ function App(): JSX.Element {
|
||||
|
||||
posthog?.group('company', orgId, {
|
||||
name: orgName,
|
||||
deployment_name: hostNameParts[0],
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
deployment_url: hostname,
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
@@ -256,20 +270,11 @@ function App(): JSX.Element {
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
const email = user.email || '';
|
||||
const secret = process.env.PYLON_IDENTITY_SECRET || '';
|
||||
let emailHash = '';
|
||||
|
||||
if (email && secret) {
|
||||
emailHash = HmacSHA256(email, Hex.parse(secret)).toString(Hex);
|
||||
}
|
||||
|
||||
window.pylon = {
|
||||
chat_settings: {
|
||||
app_id: process.env.PYLON_APP_ID,
|
||||
email: user.email,
|
||||
name: user.displayName || user.email,
|
||||
email_hash: emailHash,
|
||||
name: user.displayName,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -303,6 +308,10 @@ function App(): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
if (process.env.USERPILOT_KEY) {
|
||||
Userpilot.initialize(process.env.USERPILOT_KEY);
|
||||
}
|
||||
|
||||
if (!isSentryInitialized) {
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
@@ -363,6 +372,7 @@ function App(): JSX.Element {
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<KBarCommandPaletteProvider>
|
||||
<UserpilotRouteTracker />
|
||||
<KBarCommandPalette />
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { LogEventAxiosInstance as axios } from 'api';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import { ApiBaseInstance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { EventSuccessPayloadProps } from 'types/api/events/types';
|
||||
|
||||
@@ -13,14 +11,9 @@ const logEvent = async (
|
||||
rateLimited?: boolean,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
// add deployment_url and user_email to attributes
|
||||
// add tenant_url to attributes
|
||||
const { hostname } = window.location;
|
||||
const userEmail = getLocalStorageApi(LOCALSTORAGE.LOGGED_IN_USER_EMAIL);
|
||||
const updatedAttributes = {
|
||||
...attributes,
|
||||
deployment_url: hostname,
|
||||
user_email: userEmail,
|
||||
};
|
||||
const updatedAttributes = { ...attributes, tenant_url: hostname };
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes: updatedAttributes,
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
|
||||
import { getFieldKeys } from '../getFieldKeys';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
get: jest.fn(),
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('getFieldKeys API', () => {
|
||||
@@ -29,33 +31,33 @@ describe('getFieldKeys API', () => {
|
||||
|
||||
it('should call API with correct parameters when no args provided', async () => {
|
||||
// Mock successful API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with no parameters
|
||||
await getFieldKeys();
|
||||
|
||||
// Verify API was called correctly with empty params object
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with signal parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with signal parameter
|
||||
await getFieldKeys('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with name parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -70,14 +72,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys(undefined, 'service');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with both signal and name when provided', async () => {
|
||||
// Mock successful API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -92,14 +94,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys('logs', 'service');
|
||||
|
||||
// Verify API was called with both parameters
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'logs', name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should return properly formatted response', async () => {
|
||||
// Mock API to return our response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldKeys('traces');
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
|
||||
import { getFieldValues } from '../getFieldValues';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
get: jest.fn(),
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('getFieldValues API', () => {
|
||||
@@ -15,7 +17,7 @@ describe('getFieldValues API', () => {
|
||||
|
||||
it('should call the API with correct parameters (no options)', async () => {
|
||||
// Mock API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -32,14 +34,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues();
|
||||
|
||||
// Verify API was called correctly with empty params
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with signal parameter', async () => {
|
||||
// Mock API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -56,14 +58,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with name parameter', async () => {
|
||||
// Mock API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -80,14 +82,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with value parameter', async () => {
|
||||
// Mock API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -104,14 +106,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name', 'front');
|
||||
|
||||
// Verify API was called with value parameter
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name', searchText: 'front' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with time range parameters', async () => {
|
||||
// Mock API response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -136,7 +138,7 @@ describe('getFieldValues API', () => {
|
||||
);
|
||||
|
||||
// Verify API was called with time range parameters (converted to milliseconds)
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {
|
||||
signal: 'logs',
|
||||
name: 'service.name',
|
||||
@@ -163,7 +165,7 @@ describe('getFieldValues API', () => {
|
||||
},
|
||||
};
|
||||
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'mixed.values');
|
||||
@@ -194,7 +196,7 @@ describe('getFieldValues API', () => {
|
||||
};
|
||||
|
||||
// Mock API to return our response
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'service.name');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -24,7 +24,7 @@ export const getFieldKeys = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.get('/fields/keys', { params });
|
||||
const response = await ApiBaseInstance.get('/fields/keys', { params });
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -47,7 +47,7 @@ export const getFieldValues = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.get('/fields/values', { params });
|
||||
const response = await ApiBaseInstance.get('/fields/values', { params });
|
||||
|
||||
// Normalize values from different types (stringValues, boolValues, etc.)
|
||||
if (response.data?.data?.values) {
|
||||
|
||||
@@ -86,9 +86,8 @@ const interceptorRejected = async (
|
||||
|
||||
if (
|
||||
response.status === 401 &&
|
||||
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
response.config.url !== '/sessions/rotate' &&
|
||||
response.config.url !== '/sessions/email_password' &&
|
||||
!(
|
||||
response.config.url === '/sessions' && response.config.method === 'delete'
|
||||
)
|
||||
@@ -200,15 +199,15 @@ ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios Base
|
||||
export const LogEventAxiosInstance = axios.create({
|
||||
export const ApiBaseInstance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
});
|
||||
|
||||
LogEventAxiosInstance.interceptors.response.use(
|
||||
ApiBaseInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejectedBase,
|
||||
);
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
||||
@@ -17,7 +17,7 @@ export const getHostAttributeKeys = async (
|
||||
try {
|
||||
const response: AxiosResponse<{
|
||||
data: IQueryAutocompleteResponse;
|
||||
}> = await axios.get(
|
||||
}> = await ApiBaseInstance.get(
|
||||
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
|
||||
{
|
||||
params: {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
@@ -20,7 +20,7 @@ const getOnboardingStatus = async (props: {
|
||||
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
||||
const { endpointService, ...rest } = props;
|
||||
try {
|
||||
const response = await axios.post(
|
||||
const response = await ApiBaseInstance.post(
|
||||
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
||||
rest,
|
||||
);
|
||||
|
||||
@@ -1,20 +1,13 @@
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import axios from 'api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getService';
|
||||
|
||||
const getService = async (props: Props): Promise<PayloadProps> => {
|
||||
try {
|
||||
const response = await ApiV2Instance.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
const response = await axios.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getService;
|
||||
|
||||
@@ -1,27 +1,22 @@
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import axios from 'api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
|
||||
|
||||
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
|
||||
try {
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
|
||||
const response = await ApiV2Instance.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
limit: 5000,
|
||||
});
|
||||
const response = await axios.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
|
||||
if (props.isEntryPoint) {
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getTopOperations;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
@@ -9,7 +9,7 @@ const getCustomFilters = async (
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
const { signal } = props;
|
||||
try {
|
||||
const response = await axios.get(`/orgs/me/filters/${signal}`);
|
||||
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
|
||||
@@ -6,7 +6,7 @@ import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFil
|
||||
const updateCustomFiltersAPI = async (
|
||||
props: UpdateCustomFiltersProps,
|
||||
): Promise<SuccessResponse<void> | AxiosError> =>
|
||||
axios.put(`/orgs/me/filters`, {
|
||||
ApiBaseInstance.put(`orgs/me/filters`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -9,12 +9,15 @@ const listOverview = async (
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
const { start, end, show_ip: showIp, filter } = props;
|
||||
try {
|
||||
const response = await axios.post(`/third-party-apis/overview/list`, {
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
});
|
||||
const response = await ApiBaseInstance.post(
|
||||
`/third-party-apis/overview/list`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -11,7 +11,7 @@ const getSpanPercentiles = async (
|
||||
props: GetSpanPercentilesProps,
|
||||
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/span_percentile', {
|
||||
const response = await ApiBaseInstance.post('/span_percentile', {
|
||||
...props,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,30 +1,30 @@
|
||||
interface ConfigureIconProps {
|
||||
width?: number;
|
||||
height?: number;
|
||||
color?: string;
|
||||
fill?: string;
|
||||
}
|
||||
|
||||
function ConfigureIcon({
|
||||
width,
|
||||
height,
|
||||
color,
|
||||
fill,
|
||||
}: ConfigureIconProps): JSX.Element {
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width={width}
|
||||
height={height}
|
||||
fill="none"
|
||||
fill={fill}
|
||||
>
|
||||
<path
|
||||
stroke={color}
|
||||
stroke="#C0C1C3"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth="1.333"
|
||||
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
|
||||
/>
|
||||
<path
|
||||
stroke={color}
|
||||
stroke="#C0C1C3"
|
||||
strokeLinecap="round"
|
||||
strokeWidth="1.333"
|
||||
d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3m5.417 7.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"
|
||||
@@ -36,6 +36,6 @@ function ConfigureIcon({
|
||||
ConfigureIcon.defaultProps = {
|
||||
width: 16,
|
||||
height: 16,
|
||||
color: 'currentColor',
|
||||
fill: 'none',
|
||||
};
|
||||
export default ConfigureIcon;
|
||||
|
||||
23
frontend/src/auto-import-registry.d.ts
vendored
23
frontend/src/auto-import-registry.d.ts
vendored
@@ -1,23 +0,0 @@
|
||||
// -------------------------------------------------------------------------
|
||||
// AUTO-GENERATED FILE
|
||||
// -------------------------------------------------------------------------
|
||||
// This file is generated by scripts/update-registry.js automatically
|
||||
// whenever you run 'yarn install' or 'npm install'.
|
||||
//
|
||||
// It forces VS Code to index these specific packages to fix auto-import
|
||||
// performance issues in TypeScript 4.x.
|
||||
//
|
||||
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
import '@signozhq/badge';
|
||||
import '@signozhq/button';
|
||||
import '@signozhq/calendar';
|
||||
import '@signozhq/callout';
|
||||
import '@signozhq/design-tokens';
|
||||
import '@signozhq/input';
|
||||
import '@signozhq/popover';
|
||||
import '@signozhq/resizable';
|
||||
import '@signozhq/sonner';
|
||||
import '@signozhq/table';
|
||||
import '@signozhq/tooltip';
|
||||
@@ -1,6 +1,5 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { PrecisionOptionsEnum } from '../types';
|
||||
import { getYAxisFormattedValue } from '../yAxisConfig';
|
||||
import { getYAxisFormattedValue, PrecisionOptionsEnum } from '../yAxisConfig';
|
||||
|
||||
const testFullPrecisionGetYAxisFormattedValue = (
|
||||
value: string,
|
||||
@@ -233,7 +232,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', ()
|
||||
).toBe('1%');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'percent'),
|
||||
).toBe('1.005555555595959%');
|
||||
).toBe('1.005555555595958%');
|
||||
});
|
||||
|
||||
test('ratio', () => {
|
||||
@@ -360,7 +359,7 @@ describe('getYAxisFormattedValue - precision option tests', () => {
|
||||
's',
|
||||
PrecisionOptionsEnum.FULL,
|
||||
),
|
||||
).toBe('26.254299141484417 µs');
|
||||
).toBe('26254299141484417000000 µs');
|
||||
|
||||
expect(
|
||||
getYAxisFormattedValue('4353.81', 'ms', PrecisionOptionsEnum.FULL),
|
||||
|
||||
@@ -78,18 +78,3 @@ export interface ITimeRange {
|
||||
minTime: number | null;
|
||||
maxTime: number | null;
|
||||
}
|
||||
|
||||
export const DEFAULT_SIGNIFICANT_DIGITS = 15;
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
export const MAX_DECIMALS = 15;
|
||||
|
||||
export enum PrecisionOptionsEnum {
|
||||
ZERO = 0,
|
||||
ONE = 1,
|
||||
TWO = 2,
|
||||
THREE = 3,
|
||||
FOUR = 4,
|
||||
FULL = 'full',
|
||||
}
|
||||
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
|
||||
|
||||
@@ -16,12 +16,8 @@ import {
|
||||
} from './Plugin/IntersectionCursor';
|
||||
import {
|
||||
CustomChartOptions,
|
||||
DEFAULT_SIGNIFICANT_DIGITS,
|
||||
GraphOnClickHandler,
|
||||
IAxisTimeConfig,
|
||||
MAX_DECIMALS,
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
StaticLineProps,
|
||||
} from './types';
|
||||
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
|
||||
@@ -153,7 +149,6 @@ export const getGraphOptions = (
|
||||
scales: {
|
||||
x: {
|
||||
stacked: isStacked,
|
||||
offset: false,
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
@@ -246,68 +241,3 @@ declare module 'chart.js' {
|
||||
custom: TooltipPositionerFunction<ChartType>;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a number for display, preserving leading zeros after the decimal point
|
||||
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
|
||||
* It avoids scientific notation and removes unnecessary trailing zeros.
|
||||
*
|
||||
* @example
|
||||
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
|
||||
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
|
||||
* formatDecimalWithLeadingZeros(5.0); // "5"
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted string.
|
||||
*/
|
||||
export const formatDecimalWithLeadingZeros = (
|
||||
value: number,
|
||||
precision: PrecisionOption,
|
||||
): string => {
|
||||
if (value === 0) {
|
||||
return '0';
|
||||
}
|
||||
|
||||
// Use toLocaleString to get a full decimal representation without scientific notation.
|
||||
const numStr = value.toLocaleString('en-US', {
|
||||
useGrouping: false,
|
||||
maximumFractionDigits: 20,
|
||||
});
|
||||
|
||||
const [integerPart, decimalPart = ''] = numStr.split('.');
|
||||
|
||||
// If there's no decimal part, the integer part is the result.
|
||||
if (!decimalPart) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Find the index of the first non-zero digit in the decimal part.
|
||||
const firstNonZeroIndex = decimalPart.search(/[^0]/);
|
||||
|
||||
// If the decimal part consists only of zeros, return just the integer part.
|
||||
if (firstNonZeroIndex === -1) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
|
||||
const significantDigits =
|
||||
precision === PrecisionOptionsEnum.FULL
|
||||
? DEFAULT_SIGNIFICANT_DIGITS
|
||||
: precision;
|
||||
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
|
||||
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
|
||||
|
||||
// If precision is 0, we drop the decimal part entirely.
|
||||
if (precision === 0) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Remove any trailing zeros from the result to keep it clean.
|
||||
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
|
||||
|
||||
// Return the integer part, or the integer and decimal parts combined.
|
||||
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
|
||||
};
|
||||
|
||||
@@ -1,17 +1,86 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { formattedValueToString, getValueFormat } from '@grafana/data';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
import { isUniversalUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { isNaN } from 'lodash-es';
|
||||
|
||||
import { formatUniversalUnit } from '../YAxisUnitSelector/formatter';
|
||||
import {
|
||||
DEFAULT_SIGNIFICANT_DIGITS,
|
||||
PrecisionOption,
|
||||
PrecisionOptionsEnum,
|
||||
} from './types';
|
||||
import { formatDecimalWithLeadingZeros } from './utils';
|
||||
const DEFAULT_SIGNIFICANT_DIGITS = 15;
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const MAX_DECIMALS = 15;
|
||||
|
||||
export enum PrecisionOptionsEnum {
|
||||
ZERO = 0,
|
||||
ONE = 1,
|
||||
TWO = 2,
|
||||
THREE = 3,
|
||||
FOUR = 4,
|
||||
FULL = 'full',
|
||||
}
|
||||
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
|
||||
|
||||
/**
|
||||
* Formats a number for display, preserving leading zeros after the decimal point
|
||||
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
|
||||
* It avoids scientific notation and removes unnecessary trailing zeros.
|
||||
*
|
||||
* @example
|
||||
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
|
||||
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
|
||||
* formatDecimalWithLeadingZeros(5.0); // "5"
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted string.
|
||||
*/
|
||||
const formatDecimalWithLeadingZeros = (
|
||||
value: number,
|
||||
precision: PrecisionOption,
|
||||
): string => {
|
||||
if (value === 0) {
|
||||
return '0';
|
||||
}
|
||||
|
||||
// Use toLocaleString to get a full decimal representation without scientific notation.
|
||||
const numStr = value.toLocaleString('en-US', {
|
||||
useGrouping: false,
|
||||
maximumFractionDigits: 20,
|
||||
});
|
||||
|
||||
const [integerPart, decimalPart = ''] = numStr.split('.');
|
||||
|
||||
// If there's no decimal part, the integer part is the result.
|
||||
if (!decimalPart) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Find the index of the first non-zero digit in the decimal part.
|
||||
const firstNonZeroIndex = decimalPart.search(/[^0]/);
|
||||
|
||||
// If the decimal part consists only of zeros, return just the integer part.
|
||||
if (firstNonZeroIndex === -1) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
|
||||
const significantDigits =
|
||||
precision === PrecisionOptionsEnum.FULL
|
||||
? DEFAULT_SIGNIFICANT_DIGITS
|
||||
: precision;
|
||||
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
|
||||
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
|
||||
|
||||
// If precision is 0, we drop the decimal part entirely.
|
||||
if (precision === 0) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Remove any trailing zeros from the result to keep it clean.
|
||||
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
|
||||
|
||||
// Return the integer part, or the integer and decimal parts combined.
|
||||
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats a Y-axis value based on a given format string.
|
||||
@@ -32,10 +101,19 @@ export const getYAxisFormattedValue = (
|
||||
if (numValue === Infinity) return '∞';
|
||||
if (numValue === -Infinity) return '-∞';
|
||||
|
||||
const decimalPlaces = value.split('.')[1]?.length || undefined;
|
||||
|
||||
// Use custom formatter for the 'none' format honoring precision
|
||||
if (format === 'none') {
|
||||
return formatDecimalWithLeadingZeros(numValue, precision);
|
||||
}
|
||||
|
||||
// For all other standard formats, delegate to grafana/data's built-in formatter.
|
||||
const computeDecimals = (): number | undefined => {
|
||||
if (precision === PrecisionOptionsEnum.FULL) {
|
||||
return DEFAULT_SIGNIFICANT_DIGITS;
|
||||
return decimalPlaces && decimalPlaces >= DEFAULT_SIGNIFICANT_DIGITS
|
||||
? decimalPlaces
|
||||
: DEFAULT_SIGNIFICANT_DIGITS;
|
||||
}
|
||||
return precision;
|
||||
};
|
||||
@@ -52,22 +130,6 @@ export const getYAxisFormattedValue = (
|
||||
};
|
||||
|
||||
try {
|
||||
// Use custom formatter for the 'none' format honoring precision
|
||||
if (format === 'none') {
|
||||
return formatDecimalWithLeadingZeros(numValue, precision);
|
||||
}
|
||||
|
||||
// Separate logic for universal units// Separate logic for universal units
|
||||
if (format && isUniversalUnit(format)) {
|
||||
const decimals = computeDecimals();
|
||||
return formatUniversalUnit(
|
||||
numValue,
|
||||
format as UniversalYAxisUnit,
|
||||
precision,
|
||||
decimals,
|
||||
);
|
||||
}
|
||||
|
||||
const formatter = getValueFormat(format);
|
||||
const formattedValue = formatter(numValue, computeDecimals(), undefined);
|
||||
if (formattedValue.text && formattedValue.text.includes('.')) {
|
||||
@@ -76,7 +138,6 @@ export const getYAxisFormattedValue = (
|
||||
precision,
|
||||
);
|
||||
}
|
||||
|
||||
return formattedValueToString(formattedValue);
|
||||
} catch (error) {
|
||||
Sentry.captureEvent({
|
||||
|
||||
@@ -10,6 +10,10 @@ import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import {
|
||||
LOG_FIELD_BODY_KEY,
|
||||
LOG_FIELD_TIMESTAMP_KEY,
|
||||
} from 'lib/logs/flatLogData';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
@@ -85,11 +89,15 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'body',
|
||||
displayName: 'Body',
|
||||
key: LOG_FIELD_BODY_KEY,
|
||||
},
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'timestamp',
|
||||
displayName: 'Timestamp',
|
||||
key: LOG_FIELD_TIMESTAMP_KEY,
|
||||
},
|
||||
]}
|
||||
/>
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
|
||||
border-radius: 2px 0px 0px 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
border-right: none;
|
||||
border-left: none;
|
||||
@@ -44,12 +45,6 @@
|
||||
border-bottom-right-radius: 0px;
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
font-size: 12px !important;
|
||||
line-height: 27px;
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
|
||||
@@ -6,7 +6,6 @@ import { useCopyToClipboard } from 'react-use';
|
||||
function CopyClipboardHOC({
|
||||
entityKey,
|
||||
textToCopy,
|
||||
tooltipText = 'Copy to clipboard',
|
||||
children,
|
||||
}: CopyClipboardHOCProps): JSX.Element {
|
||||
const [value, setCopy] = useCopyToClipboard();
|
||||
@@ -32,7 +31,7 @@ function CopyClipboardHOC({
|
||||
<span onClick={onClick} role="presentation" tabIndex={-1}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<span style={{ fontSize: '0.9rem' }}>{tooltipText}</span>}
|
||||
content={<span style={{ fontSize: '0.9rem' }}>Copy to clipboard</span>}
|
||||
>
|
||||
{children}
|
||||
</Popover>
|
||||
@@ -43,11 +42,7 @@ function CopyClipboardHOC({
|
||||
interface CopyClipboardHOCProps {
|
||||
entityKey: string | undefined;
|
||||
textToCopy: string;
|
||||
tooltipText?: string;
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export default CopyClipboardHOC;
|
||||
CopyClipboardHOC.defaultProps = {
|
||||
tooltipText: 'Copy to clipboard',
|
||||
};
|
||||
|
||||
@@ -13,6 +13,10 @@ import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
// utils
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import {
|
||||
LOG_FIELD_BODY_KEY,
|
||||
LOG_FIELD_TIMESTAMP_KEY,
|
||||
} from 'lib/logs/flatLogData';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
// interfaces
|
||||
@@ -42,7 +46,9 @@ interface LogFieldProps {
|
||||
}
|
||||
|
||||
type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> &
|
||||
Pick<AddToQueryHOCProps, 'onAddToQuery'>;
|
||||
Pick<AddToQueryHOCProps, 'onAddToQuery'> & {
|
||||
fieldKeyDisplay: string;
|
||||
};
|
||||
|
||||
function LogGeneralField({
|
||||
fieldKey,
|
||||
@@ -74,6 +80,7 @@ function LogGeneralField({
|
||||
function LogSelectedField({
|
||||
fieldKey = '',
|
||||
fieldValue = '',
|
||||
fieldKeyDisplay = '',
|
||||
onAddToQuery,
|
||||
fontSize,
|
||||
}: LogSelectedFieldProps): JSX.Element {
|
||||
@@ -90,7 +97,7 @@ function LogSelectedField({
|
||||
style={{ color: blue[4] }}
|
||||
className={cx('selected-log-field-key', fontSize)}
|
||||
>
|
||||
{fieldKey}
|
||||
{fieldKeyDisplay}
|
||||
</span>
|
||||
</Typography.Text>
|
||||
</AddToQueryHOC>
|
||||
@@ -162,7 +169,7 @@ function ListLogView({
|
||||
);
|
||||
|
||||
const updatedSelecedFields = useMemo(
|
||||
() => selectedFields.filter((e) => e.name !== 'id'),
|
||||
() => selectedFields.filter((e) => e.key !== 'id'),
|
||||
[selectedFields],
|
||||
);
|
||||
|
||||
@@ -170,16 +177,16 @@ function ListLogView({
|
||||
|
||||
const timestampValue = useMemo(
|
||||
() =>
|
||||
typeof flattenLogData.timestamp === 'string'
|
||||
typeof flattenLogData[LOG_FIELD_TIMESTAMP_KEY] === 'string'
|
||||
? formatTimezoneAdjustedTimestamp(
|
||||
flattenLogData.timestamp,
|
||||
flattenLogData[LOG_FIELD_TIMESTAMP_KEY],
|
||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||
)
|
||||
: formatTimezoneAdjustedTimestamp(
|
||||
flattenLogData.timestamp / 1e6,
|
||||
flattenLogData[LOG_FIELD_TIMESTAMP_KEY] / 1e6,
|
||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||
),
|
||||
[flattenLogData.timestamp, formatTimezoneAdjustedTimestamp],
|
||||
[flattenLogData, formatTimezoneAdjustedTimestamp],
|
||||
);
|
||||
|
||||
const logType = getLogIndicatorType(logData);
|
||||
@@ -215,10 +222,12 @@ function ListLogView({
|
||||
/>
|
||||
<div>
|
||||
<LogContainer fontSize={fontSize}>
|
||||
{updatedSelecedFields.some((field) => field.name === 'body') && (
|
||||
{updatedSelecedFields.some(
|
||||
(field) => field.key === LOG_FIELD_BODY_KEY,
|
||||
) && (
|
||||
<LogGeneralField
|
||||
fieldKey="Log"
|
||||
fieldValue={flattenLogData.body}
|
||||
fieldValue={flattenLogData[LOG_FIELD_BODY_KEY]}
|
||||
linesPerRow={linesPerRow}
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
@@ -230,7 +239,9 @@ function ListLogView({
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
)}
|
||||
{updatedSelecedFields.some((field) => field.name === 'timestamp') && (
|
||||
{updatedSelecedFields.some(
|
||||
(field) => field.key === LOG_FIELD_TIMESTAMP_KEY,
|
||||
) && (
|
||||
<LogGeneralField
|
||||
fieldKey="Timestamp"
|
||||
fieldValue={timestampValue}
|
||||
@@ -239,13 +250,17 @@ function ListLogView({
|
||||
)}
|
||||
|
||||
{updatedSelecedFields
|
||||
.filter((field) => !['timestamp', 'body'].includes(field.name))
|
||||
.filter(
|
||||
(field) =>
|
||||
![LOG_FIELD_TIMESTAMP_KEY, LOG_FIELD_BODY_KEY].includes(field.key),
|
||||
)
|
||||
.map((field) =>
|
||||
isValidLogField(flattenLogData[field.name] as never) ? (
|
||||
isValidLogField(flattenLogData[field.key] as never) ? (
|
||||
<LogSelectedField
|
||||
key={field.name}
|
||||
fieldKey={field.name}
|
||||
fieldValue={flattenLogData[field.name] as never}
|
||||
key={field.key}
|
||||
fieldKey={field.key}
|
||||
fieldKeyDisplay={field.displayName}
|
||||
fieldValue={flattenLogData[field.key] as never}
|
||||
onAddToQuery={onAddToQuery}
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
|
||||
@@ -73,16 +73,25 @@ function RawLogView({
|
||||
);
|
||||
|
||||
const attributesValues = updatedSelecedFields
|
||||
.filter((field) => !['timestamp', 'body'].includes(field.name))
|
||||
.map((field) => flattenLogData[field.name])
|
||||
.filter((attribute) => {
|
||||
.filter(
|
||||
(field) => !['log.timestamp:string', 'log.body:string'].includes(field.key),
|
||||
)
|
||||
.map((field) => {
|
||||
const value = flattenLogData[field.key];
|
||||
const label = field.displayName;
|
||||
|
||||
// loadash isEmpty doesnot work with numbers
|
||||
if (isNumber(attribute)) {
|
||||
return true;
|
||||
if (isNumber(value)) {
|
||||
return `${label}: ${value}`;
|
||||
}
|
||||
|
||||
return !isUndefined(attribute) && !isEmpty(attribute);
|
||||
});
|
||||
if (!isUndefined(value) && !isEmpty(value)) {
|
||||
return `${label}: ${value}`;
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter((attribute) => attribute !== null);
|
||||
|
||||
let attributesText = attributesValues.join(' | ');
|
||||
|
||||
|
||||
@@ -6,7 +6,11 @@ import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
import {
|
||||
FlatLogData,
|
||||
LOG_FIELD_BODY_KEY,
|
||||
LOG_FIELD_TIMESTAMP_KEY,
|
||||
} from 'lib/logs/flatLogData';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
@@ -51,28 +55,33 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
|
||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
||||
.filter((e) => !['id', 'body', 'timestamp'].includes(e.name))
|
||||
.map(({ name }) => ({
|
||||
title: name,
|
||||
dataIndex: name,
|
||||
accessorKey: name,
|
||||
id: name.toLowerCase().replace(/\./g, '_'),
|
||||
key: name,
|
||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: isListViewPanel
|
||||
? defaultListViewPanelStyle
|
||||
: getDefaultCellStyle(isDarkMode),
|
||||
},
|
||||
children: (
|
||||
<Typography.Paragraph
|
||||
ellipsis={{ rows: linesPerRow }}
|
||||
className={cx('paragraph', fontSize)}
|
||||
>
|
||||
{field}
|
||||
</Typography.Paragraph>
|
||||
),
|
||||
}),
|
||||
.filter(
|
||||
(e) => !['id', LOG_FIELD_BODY_KEY, LOG_FIELD_TIMESTAMP_KEY].includes(e.key),
|
||||
)
|
||||
.map((field) => ({
|
||||
title: field.displayName,
|
||||
dataIndex: field.key,
|
||||
accessorKey: field.key,
|
||||
id: field.key.toLowerCase().replace(/\./g, '_').replace(/:/g, '_'),
|
||||
key: field.key,
|
||||
render: (fieldValue, record): ColumnTypeRender<Record<string, unknown>> => {
|
||||
const value = record[field.key] || fieldValue;
|
||||
return {
|
||||
props: {
|
||||
style: isListViewPanel
|
||||
? defaultListViewPanelStyle
|
||||
: getDefaultCellStyle(isDarkMode),
|
||||
},
|
||||
children: (
|
||||
<Typography.Paragraph
|
||||
ellipsis={{ rows: linesPerRow }}
|
||||
className={cx('paragraph', fontSize)}
|
||||
>
|
||||
{value}
|
||||
</Typography.Paragraph>
|
||||
),
|
||||
};
|
||||
},
|
||||
}));
|
||||
|
||||
if (isListViewPanel) {
|
||||
@@ -100,26 +109,29 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
),
|
||||
}),
|
||||
},
|
||||
...(fields.some((field) => field.name === 'timestamp')
|
||||
...(fields.some((field) => field.key === LOG_FIELD_TIMESTAMP_KEY)
|
||||
? [
|
||||
{
|
||||
title: 'timestamp',
|
||||
dataIndex: 'timestamp',
|
||||
dataIndex: LOG_FIELD_TIMESTAMP_KEY,
|
||||
key: 'timestamp',
|
||||
accessorKey: 'timestamp',
|
||||
accessorKey: LOG_FIELD_TIMESTAMP_KEY,
|
||||
id: 'timestamp',
|
||||
// https://github.com/ant-design/ant-design/discussions/36886
|
||||
render: (
|
||||
field: string | number,
|
||||
record: Record<string, unknown>,
|
||||
): ColumnTypeRender<Record<string, unknown>> => {
|
||||
const timestampValue =
|
||||
(record[LOG_FIELD_TIMESTAMP_KEY] as string | number) || field;
|
||||
const date =
|
||||
typeof field === 'string'
|
||||
typeof timestampValue === 'string'
|
||||
? formatTimezoneAdjustedTimestamp(
|
||||
field,
|
||||
timestampValue,
|
||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||
)
|
||||
: formatTimezoneAdjustedTimestamp(
|
||||
field / 1e6,
|
||||
timestampValue / 1e6,
|
||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||
);
|
||||
return {
|
||||
@@ -136,33 +148,37 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
]
|
||||
: []),
|
||||
...(appendTo === 'center' ? fieldColumns : []),
|
||||
...(fields.some((field) => field.name === 'body')
|
||||
...(fields.some((field) => field.key === LOG_FIELD_BODY_KEY)
|
||||
? [
|
||||
{
|
||||
title: 'body',
|
||||
dataIndex: 'body',
|
||||
dataIndex: LOG_FIELD_BODY_KEY,
|
||||
key: 'body',
|
||||
accessorKey: 'body',
|
||||
accessorKey: LOG_FIELD_BODY_KEY,
|
||||
id: 'body',
|
||||
render: (
|
||||
field: string | number,
|
||||
): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: bodyColumnStyle,
|
||||
},
|
||||
children: (
|
||||
<TableBodyContent
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: getSanitizedLogBody(field as string, {
|
||||
shouldEscapeHtml: true,
|
||||
}),
|
||||
}}
|
||||
fontSize={fontSize}
|
||||
linesPerRow={linesPerRow}
|
||||
isDarkMode={isDarkMode}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
record: Record<string, unknown>,
|
||||
): ColumnTypeRender<Record<string, unknown>> => {
|
||||
const bodyValue = (record[LOG_FIELD_BODY_KEY] as string) || '';
|
||||
return {
|
||||
props: {
|
||||
style: bodyColumnStyle,
|
||||
},
|
||||
children: (
|
||||
<TableBodyContent
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: getSanitizedLogBody(bodyValue, {
|
||||
shouldEscapeHtml: true,
|
||||
}),
|
||||
}}
|
||||
fontSize={fontSize}
|
||||
linesPerRow={linesPerRow}
|
||||
isDarkMode={isDarkMode}
|
||||
/>
|
||||
),
|
||||
};
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
|
||||
@@ -416,18 +416,21 @@ function OptionsMenu({
|
||||
)}
|
||||
|
||||
<div className="column-format">
|
||||
{addColumn?.value?.map(({ name }) => (
|
||||
<div className="column-name" key={name}>
|
||||
{addColumn?.value?.map((column) => (
|
||||
<div className="column-name" key={column.key}>
|
||||
<div className="name">
|
||||
<Tooltip placement="left" title={name}>
|
||||
{name}
|
||||
<Tooltip
|
||||
placement="left"
|
||||
title={column.displayName || column.name}
|
||||
>
|
||||
{column.displayName || column.name}
|
||||
</Tooltip>
|
||||
</div>
|
||||
{addColumn?.value?.length > 1 && (
|
||||
<X
|
||||
className="delete-btn"
|
||||
size={14}
|
||||
onClick={(): void => addColumn.onRemove(name)}
|
||||
onClick={(): void => addColumn.onRemove(column.key)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
@@ -471,13 +474,11 @@ function LogsFormatOptionsMenu({
|
||||
rootClassName="format-options-popover"
|
||||
destroyTooltipOnHide
|
||||
>
|
||||
<Tooltip title="Options">
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<Sliders size={14} />}
|
||||
data-testid="periscope-btn-format-options"
|
||||
/>
|
||||
</Tooltip>
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<Sliders size={14} />}
|
||||
data-testid="periscope-btn-format-options"
|
||||
/>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -251,10 +251,6 @@
|
||||
.ant-input-group-addon {
|
||||
border-top-left-radius: 0px !important;
|
||||
border-top-right-radius: 0px !important;
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 12px;
|
||||
font-weight: 300;
|
||||
}
|
||||
|
||||
.ant-input {
|
||||
@@ -300,10 +296,6 @@
|
||||
}
|
||||
}
|
||||
.qb-trace-operator-button-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
&-text {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
@@ -179,7 +179,6 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
isListViewPanel={isListViewPanel}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
queriesCount={1}
|
||||
/>
|
||||
) : (
|
||||
currentQuery.builder.queryData.map((query, index) => (
|
||||
@@ -201,7 +200,6 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
signalSource={query.source as 'meter' | ''}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
queriesCount={currentQuery.builder.queryData.length}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
|
||||
@@ -98,13 +98,6 @@
|
||||
border-radius: 2px;
|
||||
border: 1.005px solid var(--Slate-400, #1d212d);
|
||||
background: var(--Ink-300, #16181d);
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Geist Mono';
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.input-with-label {
|
||||
|
||||
@@ -6,15 +6,6 @@
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
|
||||
.ant-select-selection-search-input {
|
||||
font-size: 12px !important;
|
||||
line-height: 27px;
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.source-selector {
|
||||
width: 120px;
|
||||
}
|
||||
@@ -31,11 +22,6 @@
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
min-height: 36px;
|
||||
|
||||
.ant-select-selection-placeholder {
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-dropdown {
|
||||
|
||||
@@ -236,10 +236,6 @@
|
||||
background: var(--bg-ink-100) !important;
|
||||
opacity: 0.5 !important;
|
||||
}
|
||||
|
||||
.cm-activeLine > span {
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -275,9 +271,6 @@
|
||||
|
||||
box-sizing: border-box;
|
||||
position: relative;
|
||||
.cm-placeholder {
|
||||
font-size: 12px !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,8 +20,6 @@
|
||||
border-radius: 2px;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
font-size: 12px;
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
|
||||
&.error {
|
||||
.cm-editor {
|
||||
@@ -233,9 +231,6 @@
|
||||
.query-aggregation-interval-input {
|
||||
input {
|
||||
max-width: 120px;
|
||||
&::placeholder {
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
.add-trace-operator-button,
|
||||
.add-new-query-button,
|
||||
.add-formula-button {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
@@ -1,75 +1,7 @@
|
||||
import './QueryFooter.styles.scss';
|
||||
|
||||
/* eslint-disable react/require-default-props */
|
||||
import { Button, Tooltip, Typography } from 'antd';
|
||||
import WarningPopover from 'components/WarningPopover/WarningPopover';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { DraftingCompass, Plus, Sigma } from 'lucide-react';
|
||||
import BetaTag from 'periscope/components/BetaTag/BetaTag';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
function TraceOperatorSection({
|
||||
addTraceOperator,
|
||||
}: {
|
||||
addTraceOperator?: () => void;
|
||||
}): JSX.Element {
|
||||
const { currentQuery, panelType } = useQueryBuilder();
|
||||
|
||||
const showTraceOperatorWarning = useMemo(() => {
|
||||
const isListViewPanel =
|
||||
panelType === PANEL_TYPES.LIST || panelType === PANEL_TYPES.TRACE;
|
||||
const hasMultipleQueries = currentQuery.builder.queryData.length > 1;
|
||||
const hasTraceOperator =
|
||||
currentQuery.builder.queryTraceOperator &&
|
||||
currentQuery.builder.queryTraceOperator.length > 0;
|
||||
return isListViewPanel && hasMultipleQueries && !hasTraceOperator;
|
||||
}, [
|
||||
currentQuery?.builder?.queryData,
|
||||
currentQuery?.builder?.queryTraceOperator,
|
||||
panelType,
|
||||
]);
|
||||
|
||||
const traceOperatorWarning = useMemo(() => {
|
||||
if (currentQuery.builder.queryData.length === 0) return '';
|
||||
const firstQuery = currentQuery.builder.queryData[0];
|
||||
return `Currently, you are only seeing results from query ${firstQuery.queryName}. Add a trace operator to combine results of multiple queries.`;
|
||||
}, [currentQuery]);
|
||||
return (
|
||||
<div className="qb-trace-operator-button-container">
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ textAlign: 'center' }}>
|
||||
Add Trace Matching
|
||||
<Typography.Link
|
||||
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-trace-operators"
|
||||
target="_blank"
|
||||
style={{ textDecoration: 'underline' }}
|
||||
>
|
||||
{' '}
|
||||
<br />
|
||||
Learn more
|
||||
</Typography.Link>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-trace-operator-button periscope-btn"
|
||||
icon={<DraftingCompass size={16} />}
|
||||
onClick={(): void => addTraceOperator?.()}
|
||||
>
|
||||
<div className="qb-trace-operator-button-container-text">
|
||||
Add Trace Matching
|
||||
<BetaTag />
|
||||
</div>
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{showTraceOperatorWarning && (
|
||||
<WarningPopover message={traceOperatorWarning} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function QueryFooter({
|
||||
addNewBuilderQuery,
|
||||
@@ -90,7 +22,8 @@ export default function QueryFooter({
|
||||
<div className="qb-add-new-query">
|
||||
<Tooltip title={<div style={{ textAlign: 'center' }}>Add New Query</div>}>
|
||||
<Button
|
||||
className="add-new-query-button periscope-btn "
|
||||
className="add-new-query-button periscope-btn secondary"
|
||||
type="text"
|
||||
icon={<Plus size={16} />}
|
||||
onClick={addNewBuilderQuery}
|
||||
/>
|
||||
@@ -116,7 +49,7 @@ export default function QueryFooter({
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-formula-button periscope-btn "
|
||||
className="add-formula-button periscope-btn secondary"
|
||||
icon={<Sigma size={16} />}
|
||||
onClick={addNewFormula}
|
||||
>
|
||||
@@ -126,7 +59,35 @@ export default function QueryFooter({
|
||||
</div>
|
||||
)}
|
||||
{showAddTraceOperator && (
|
||||
<TraceOperatorSection addTraceOperator={addTraceOperator} />
|
||||
<div className="qb-trace-operator-button-container">
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ textAlign: 'center' }}>
|
||||
Add Trace Matching
|
||||
<Typography.Link
|
||||
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-trace-operators"
|
||||
target="_blank"
|
||||
style={{ textDecoration: 'underline' }}
|
||||
>
|
||||
{' '}
|
||||
<br />
|
||||
Learn more
|
||||
</Typography.Link>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-trace-operator-button periscope-btn secondary"
|
||||
icon={<DraftingCompass size={16} />}
|
||||
onClick={(): void => addTraceOperator?.()}
|
||||
>
|
||||
<div className="qb-trace-operator-button-container-text">
|
||||
Add Trace Matching
|
||||
<BetaTag />
|
||||
</div>
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
startCompletion,
|
||||
} from '@codemirror/autocomplete';
|
||||
import { javascript } from '@codemirror/lang-javascript';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { copilot } from '@uiw/codemirror-theme-copilot';
|
||||
import { githubLight } from '@uiw/codemirror-theme-github';
|
||||
@@ -80,16 +79,6 @@ const stopEventsExtension = EditorView.domEventHandlers({
|
||||
},
|
||||
});
|
||||
|
||||
interface QuerySearchProps {
|
||||
placeholder?: string;
|
||||
onChange: (value: string) => void;
|
||||
queryData: IBuilderQuery;
|
||||
dataSource: DataSource;
|
||||
signalSource?: string;
|
||||
hardcodedAttributeKeys?: QueryKeyDataSuggestionsProps[];
|
||||
onRun?: (query: string) => void;
|
||||
}
|
||||
|
||||
function QuerySearch({
|
||||
placeholder,
|
||||
onChange,
|
||||
@@ -98,8 +87,17 @@ function QuerySearch({
|
||||
onRun,
|
||||
signalSource,
|
||||
hardcodedAttributeKeys,
|
||||
}: QuerySearchProps): JSX.Element {
|
||||
}: {
|
||||
placeholder?: string;
|
||||
onChange: (value: string) => void;
|
||||
queryData: IBuilderQuery;
|
||||
dataSource: DataSource;
|
||||
signalSource?: string;
|
||||
hardcodedAttributeKeys?: QueryKeyDataSuggestionsProps[];
|
||||
onRun?: (query: string) => void;
|
||||
}): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [query, setQuery] = useState<string>(queryData.filter?.expression || '');
|
||||
const [valueSuggestions, setValueSuggestions] = useState<any[]>([]);
|
||||
const [activeKey, setActiveKey] = useState<string>('');
|
||||
const [isLoadingSuggestions, setIsLoadingSuggestions] = useState(false);
|
||||
@@ -109,12 +107,8 @@ function QuerySearch({
|
||||
message: '',
|
||||
errors: [],
|
||||
});
|
||||
const isProgrammaticChangeRef = useRef(false);
|
||||
const [isEditorReady, setIsEditorReady] = useState(false);
|
||||
const [isFocused, setIsFocused] = useState(false);
|
||||
const editorRef = useRef<EditorView | null>(null);
|
||||
|
||||
const handleQueryValidation = useCallback((newQuery: string): void => {
|
||||
const handleQueryValidation = (newQuery: string): void => {
|
||||
try {
|
||||
const validationResponse = validateQuery(newQuery);
|
||||
setValidation(validationResponse);
|
||||
@@ -125,67 +119,29 @@ function QuerySearch({
|
||||
errors: [error as IDetailedError],
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
};
|
||||
|
||||
const getCurrentQuery = useCallback(
|
||||
(): string => editorRef.current?.state.doc.toString() || '',
|
||||
[],
|
||||
);
|
||||
// Track if the query was changed externally (from queryData) vs internally (user input)
|
||||
const [isExternalQueryChange, setIsExternalQueryChange] = useState(false);
|
||||
const [lastExternalQuery, setLastExternalQuery] = useState<string>('');
|
||||
|
||||
const updateEditorValue = useCallback(
|
||||
(value: string, options: { skipOnChange?: boolean } = {}): void => {
|
||||
const view = editorRef.current;
|
||||
if (!view) return;
|
||||
useEffect(() => {
|
||||
const newQuery = queryData.filter?.expression || '';
|
||||
// Only mark as external change if the query actually changed from external source
|
||||
if (newQuery !== lastExternalQuery) {
|
||||
setQuery(newQuery);
|
||||
setIsExternalQueryChange(true);
|
||||
setLastExternalQuery(newQuery);
|
||||
}
|
||||
}, [queryData.filter?.expression, lastExternalQuery]);
|
||||
|
||||
const currentValue = view.state.doc.toString();
|
||||
if (currentValue === value) return;
|
||||
|
||||
if (options.skipOnChange) {
|
||||
isProgrammaticChangeRef.current = true;
|
||||
}
|
||||
|
||||
view.dispatch({
|
||||
changes: {
|
||||
from: 0,
|
||||
to: currentValue.length,
|
||||
insert: value,
|
||||
},
|
||||
selection: {
|
||||
anchor: value.length,
|
||||
},
|
||||
});
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const handleEditorCreate = useCallback((view: EditorView): void => {
|
||||
editorRef.current = view;
|
||||
setIsEditorReady(true);
|
||||
}, []);
|
||||
|
||||
useEffect(
|
||||
() => {
|
||||
if (!isEditorReady) return;
|
||||
|
||||
const newQuery = queryData.filter?.expression || '';
|
||||
const currentQuery = getCurrentQuery();
|
||||
|
||||
/* eslint-disable-next-line sonarjs/no-collapsible-if */
|
||||
if (newQuery !== currentQuery && !isFocused) {
|
||||
// Prevent clearing a non-empty editor when queryData becomes empty temporarily
|
||||
// Only update if newQuery has a value, or if both are empty (initial state)
|
||||
if (newQuery || !currentQuery) {
|
||||
updateEditorValue(newQuery, { skipOnChange: true });
|
||||
|
||||
if (newQuery) {
|
||||
handleQueryValidation(newQuery);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[isEditorReady, queryData.filter?.expression, isFocused],
|
||||
);
|
||||
// Validate query when it changes externally (from queryData)
|
||||
useEffect(() => {
|
||||
if (isExternalQueryChange && query) {
|
||||
handleQueryValidation(query);
|
||||
setIsExternalQueryChange(false);
|
||||
}
|
||||
}, [isExternalQueryChange, query]);
|
||||
|
||||
const [keySuggestions, setKeySuggestions] = useState<
|
||||
QueryKeyDataSuggestionsProps[] | null
|
||||
@@ -194,6 +150,7 @@ function QuerySearch({
|
||||
const [showExamples] = useState(false);
|
||||
|
||||
const [cursorPos, setCursorPos] = useState({ line: 0, ch: 0 });
|
||||
const [isFocused, setIsFocused] = useState(false);
|
||||
|
||||
const [
|
||||
isFetchingCompleteValuesList,
|
||||
@@ -202,6 +159,8 @@ function QuerySearch({
|
||||
|
||||
const lastPosRef = useRef<{ line: number; ch: number }>({ line: 0, ch: 0 });
|
||||
|
||||
// Reference to the editor view for programmatic autocompletion
|
||||
const editorRef = useRef<EditorView | null>(null);
|
||||
const lastKeyRef = useRef<string>('');
|
||||
const lastFetchedKeyRef = useRef<string>('');
|
||||
const lastValueRef = useRef<string>('');
|
||||
@@ -547,7 +506,6 @@ function QuerySearch({
|
||||
|
||||
if (!editorRef.current) {
|
||||
editorRef.current = viewUpdate.view;
|
||||
setIsEditorReady(true);
|
||||
}
|
||||
|
||||
const selection = viewUpdate.view.state.selection.main;
|
||||
@@ -563,15 +521,7 @@ function QuerySearch({
|
||||
const lastPos = lastPosRef.current;
|
||||
|
||||
if (newPos.line !== lastPos.line || newPos.ch !== lastPos.ch) {
|
||||
setCursorPos((lastPos) => {
|
||||
if (newPos.ch !== lastPos.ch && newPos.ch === 0) {
|
||||
Sentry.captureEvent({
|
||||
message: `Cursor jumped to start of line from ${lastPos.ch} to ${newPos.ch}`,
|
||||
level: 'warning',
|
||||
});
|
||||
}
|
||||
return newPos;
|
||||
});
|
||||
setCursorPos(newPos);
|
||||
lastPosRef.current = newPos;
|
||||
|
||||
if (doc) {
|
||||
@@ -604,17 +554,16 @@ function QuerySearch({
|
||||
}, []);
|
||||
|
||||
const handleChange = (value: string): void => {
|
||||
if (isProgrammaticChangeRef.current) {
|
||||
isProgrammaticChangeRef.current = false;
|
||||
return;
|
||||
}
|
||||
|
||||
setQuery(value);
|
||||
onChange(value);
|
||||
// Mark as internal change to avoid triggering external validation
|
||||
setIsExternalQueryChange(false);
|
||||
// Update lastExternalQuery to prevent external validation trigger
|
||||
setLastExternalQuery(value);
|
||||
};
|
||||
|
||||
const handleBlur = (): void => {
|
||||
const currentQuery = getCurrentQuery();
|
||||
handleQueryValidation(currentQuery);
|
||||
handleQueryValidation(query);
|
||||
setIsFocused(false);
|
||||
};
|
||||
|
||||
@@ -633,11 +582,12 @@ function QuerySearch({
|
||||
|
||||
const handleExampleClick = (exampleQuery: string): void => {
|
||||
// If there's an existing query, append the example with AND
|
||||
const currentQuery = getCurrentQuery();
|
||||
const newQuery = currentQuery
|
||||
? `${currentQuery} AND ${exampleQuery}`
|
||||
: exampleQuery;
|
||||
updateEditorValue(newQuery);
|
||||
const newQuery = query ? `${query} AND ${exampleQuery}` : exampleQuery;
|
||||
setQuery(newQuery);
|
||||
// Mark as internal change to avoid triggering external validation
|
||||
setIsExternalQueryChange(false);
|
||||
// Update lastExternalQuery to prevent external validation trigger
|
||||
setLastExternalQuery(newQuery);
|
||||
};
|
||||
|
||||
// Helper function to render a badge for the current context mode
|
||||
@@ -672,10 +622,8 @@ function QuerySearch({
|
||||
const word = context.matchBefore(/[a-zA-Z0-9_.:/?&=#%\-\[\]]*/);
|
||||
if (word?.from === word?.to && !context.explicit) return null;
|
||||
|
||||
// Get current query from editor
|
||||
const currentQuery = editorRef.current?.state.doc.toString() || '';
|
||||
// Get the query context at the cursor position
|
||||
const queryContext = getQueryContextAtCursor(currentQuery, cursorPos.ch);
|
||||
const queryContext = getQueryContextAtCursor(query, cursorPos.ch);
|
||||
|
||||
// Define autocomplete options based on the context
|
||||
let options: {
|
||||
@@ -1171,8 +1119,7 @@ function QuerySearch({
|
||||
|
||||
if (queryContext.isInParenthesis) {
|
||||
// Different suggestions based on the context within parenthesis or bracket
|
||||
const currentQuery = editorRef.current?.state.doc.toString() || '';
|
||||
const curChar = currentQuery.charAt(cursorPos.ch - 1) || '';
|
||||
const curChar = query.charAt(cursorPos.ch - 1) || '';
|
||||
|
||||
if (curChar === '(' || curChar === '[') {
|
||||
// Right after opening parenthesis/bracket
|
||||
@@ -1321,7 +1268,7 @@ function QuerySearch({
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: 8,
|
||||
right: validation.isValid === false && getCurrentQuery() ? 40 : 8, // Move left when error shown
|
||||
right: validation.isValid === false && query ? 40 : 8, // Move left when error shown
|
||||
cursor: 'help',
|
||||
zIndex: 10,
|
||||
transition: 'right 0.2s ease',
|
||||
@@ -1342,10 +1289,10 @@ function QuerySearch({
|
||||
</Tooltip>
|
||||
|
||||
<CodeMirror
|
||||
value={query}
|
||||
theme={isDarkMode ? copilot : githubLight}
|
||||
onChange={handleChange}
|
||||
onUpdate={handleUpdate}
|
||||
onCreateEditor={handleEditorCreate}
|
||||
className={cx('query-where-clause-editor', {
|
||||
isValid: validation.isValid === true,
|
||||
hasErrors: validation.errors.length > 0,
|
||||
@@ -1383,7 +1330,7 @@ function QuerySearch({
|
||||
// Mod-Enter is usually Ctrl-Enter or Cmd-Enter based on OS
|
||||
run: (): boolean => {
|
||||
if (onRun && typeof onRun === 'function') {
|
||||
onRun(getCurrentQuery());
|
||||
onRun(query);
|
||||
} else {
|
||||
handleRunQuery();
|
||||
}
|
||||
@@ -1409,7 +1356,7 @@ function QuerySearch({
|
||||
onBlur={handleBlur}
|
||||
/>
|
||||
|
||||
{getCurrentQuery() && validation.isValid === false && !isFocused && (
|
||||
{query && validation.isValid === false && !isFocused && (
|
||||
<div
|
||||
className={cx('query-status-container', {
|
||||
hasErrors: validation.errors.length > 0,
|
||||
|
||||
@@ -9,13 +9,7 @@ import SpanScopeSelector from 'container/QueryBuilder/filters/QueryBuilderSearch
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { Copy, Ellipsis, Trash } from 'lucide-react';
|
||||
import {
|
||||
ForwardedRef,
|
||||
forwardRef,
|
||||
useCallback,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { memo, useCallback, useMemo, useState } from 'react';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { HandleChangeQueryDataV5 } from 'types/common/operations.types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -26,29 +20,26 @@ import QueryAddOns from './QueryAddOns/QueryAddOns';
|
||||
import QueryAggregation from './QueryAggregation/QueryAggregation';
|
||||
import QuerySearch from './QuerySearch/QuerySearch';
|
||||
|
||||
export const QueryV2 = forwardRef(function QueryV2(
|
||||
{
|
||||
index,
|
||||
queryVariant,
|
||||
query,
|
||||
filterConfigs,
|
||||
isListViewPanel = false,
|
||||
showTraceOperator = false,
|
||||
hasTraceOperator = false,
|
||||
version,
|
||||
showOnlyWhereClause = false,
|
||||
signalSource = '',
|
||||
isMultiQueryAllowed = false,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
queriesCount = 1,
|
||||
}: QueryProps & {
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
queriesCount: number;
|
||||
},
|
||||
ref: ForwardedRef<HTMLDivElement>,
|
||||
): JSX.Element {
|
||||
export const QueryV2 = memo(function QueryV2({
|
||||
ref,
|
||||
index,
|
||||
queryVariant,
|
||||
query,
|
||||
filterConfigs,
|
||||
isListViewPanel = false,
|
||||
showTraceOperator = false,
|
||||
hasTraceOperator = false,
|
||||
version,
|
||||
showOnlyWhereClause = false,
|
||||
signalSource = '',
|
||||
isMultiQueryAllowed = false,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: QueryProps & {
|
||||
ref: React.RefObject<HTMLDivElement>;
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
}): JSX.Element {
|
||||
const { cloneQuery, panelType } = useQueryBuilder();
|
||||
|
||||
const showFunctions = query?.functions?.length > 0;
|
||||
@@ -201,16 +192,12 @@ export const QueryV2 = forwardRef(function QueryV2(
|
||||
icon: <Copy size={14} />,
|
||||
onClick: handleCloneEntity,
|
||||
},
|
||||
...(queriesCount && queriesCount > 1
|
||||
? [
|
||||
{
|
||||
label: 'Delete',
|
||||
key: 'delete-query',
|
||||
icon: <Trash size={14} />,
|
||||
onClick: handleDeleteQuery,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
label: 'Delete',
|
||||
key: 'delete-query',
|
||||
icon: <Trash size={14} />,
|
||||
onClick: handleDeleteQuery,
|
||||
},
|
||||
],
|
||||
}}
|
||||
placement="bottomRight"
|
||||
@@ -302,5 +289,3 @@ export const QueryV2 = forwardRef(function QueryV2(
|
||||
</div>
|
||||
);
|
||||
});
|
||||
|
||||
QueryV2.displayName = 'QueryV2';
|
||||
|
||||
@@ -92,9 +92,6 @@
|
||||
|
||||
.qb-trace-operator-editor-container {
|
||||
flex: 1;
|
||||
.cm-activeLine > span {
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
&.arrow-left {
|
||||
@@ -116,8 +113,6 @@
|
||||
text-overflow: ellipsis;
|
||||
padding: 0px 8px;
|
||||
border-right: 1px solid var(--bg-slate-400);
|
||||
font-size: 12px;
|
||||
font-weight: 300;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ export default function TraceOperator({
|
||||
!isListViewPanel && 'qb-trace-operator-arrow',
|
||||
)}
|
||||
>
|
||||
<Typography.Text className="label">Trace Operator</Typography.Text>
|
||||
<Typography.Text className="label">TRACE OPERATOR</Typography.Text>
|
||||
<div className="qb-trace-operator-editor-container">
|
||||
<TraceOperatorEditor
|
||||
value={traceOperator?.expression || ''}
|
||||
|
||||
@@ -5,85 +5,13 @@ import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
|
||||
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import * as UseQBModule from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { fireEvent, render, userEvent, waitFor } from 'tests/test-utils';
|
||||
import React from 'react';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import type { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import QuerySearch from '../QuerySearch/QuerySearch';
|
||||
|
||||
const CM_EDITOR_SELECTOR = '.cm-editor .cm-content';
|
||||
|
||||
// Mock DOM APIs that CodeMirror needs
|
||||
beforeAll(() => {
|
||||
// Mock getClientRects and getBoundingClientRect for Range objects
|
||||
const mockRect: DOMRect = {
|
||||
width: 100,
|
||||
height: 20,
|
||||
top: 0,
|
||||
left: 0,
|
||||
right: 100,
|
||||
bottom: 20,
|
||||
x: 0,
|
||||
y: 0,
|
||||
toJSON: (): DOMRect => mockRect,
|
||||
} as DOMRect;
|
||||
|
||||
// Create a minimal Range mock with only what CodeMirror actually uses
|
||||
const createMockRange = (): Range => {
|
||||
let startContainer: Node = document.createTextNode('');
|
||||
let endContainer: Node = document.createTextNode('');
|
||||
let startOffset = 0;
|
||||
let endOffset = 0;
|
||||
|
||||
const mockRange = {
|
||||
// CodeMirror uses these for text measurement
|
||||
getClientRects: (): DOMRectList =>
|
||||
(({
|
||||
length: 1,
|
||||
item: (index: number): DOMRect | null => (index === 0 ? mockRect : null),
|
||||
0: mockRect,
|
||||
*[Symbol.iterator](): Generator<DOMRect> {
|
||||
yield mockRect;
|
||||
},
|
||||
} as unknown) as DOMRectList),
|
||||
getBoundingClientRect: (): DOMRect => mockRect,
|
||||
// CodeMirror calls these to set up text ranges
|
||||
setStart: (node: Node, offset: number): void => {
|
||||
startContainer = node;
|
||||
startOffset = offset;
|
||||
},
|
||||
setEnd: (node: Node, offset: number): void => {
|
||||
endContainer = node;
|
||||
endOffset = offset;
|
||||
},
|
||||
// Minimal Range properties (TypeScript requires these)
|
||||
get startContainer(): Node {
|
||||
return startContainer;
|
||||
},
|
||||
get endContainer(): Node {
|
||||
return endContainer;
|
||||
},
|
||||
get startOffset(): number {
|
||||
return startOffset;
|
||||
},
|
||||
get endOffset(): number {
|
||||
return endOffset;
|
||||
},
|
||||
get collapsed(): boolean {
|
||||
return startContainer === endContainer && startOffset === endOffset;
|
||||
},
|
||||
commonAncestorContainer: document.body,
|
||||
};
|
||||
return (mockRange as unknown) as Range;
|
||||
};
|
||||
|
||||
// Mock document.createRange to return a new Range instance each time
|
||||
document.createRange = (): Range => createMockRange();
|
||||
|
||||
// Mock getBoundingClientRect for elements
|
||||
Element.prototype.getBoundingClientRect = (): DOMRect => mockRect;
|
||||
});
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
@@ -103,6 +31,24 @@ jest.mock('hooks/queryBuilder/useQueryBuilder', () => {
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('@codemirror/autocomplete', () => ({
|
||||
autocompletion: (): Record<string, unknown> => ({}),
|
||||
closeCompletion: (): boolean => true,
|
||||
completionKeymap: [] as unknown[],
|
||||
startCompletion: (): boolean => true,
|
||||
}));
|
||||
|
||||
jest.mock('@codemirror/lang-javascript', () => ({
|
||||
javascript: (): Record<string, unknown> => ({}),
|
||||
}));
|
||||
|
||||
jest.mock('@uiw/codemirror-theme-copilot', () => ({
|
||||
copilot: {},
|
||||
}));
|
||||
|
||||
jest.mock('@uiw/codemirror-theme-github', () => ({
|
||||
githubLight: {},
|
||||
}));
|
||||
jest.mock('api/querySuggestions/getKeySuggestions', () => ({
|
||||
getKeySuggestions: jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
@@ -117,19 +63,153 @@ jest.mock('api/querySuggestions/getValueSuggestion', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
// Note: We're NOT mocking CodeMirror here - using the real component
|
||||
// This provides integration testing with the actual CodeMirror editor
|
||||
// Mock CodeMirror to a simple textarea to make it testable and call onUpdate
|
||||
jest.mock(
|
||||
'@uiw/react-codemirror',
|
||||
(): Record<string, unknown> => {
|
||||
// Minimal EditorView shape used by the component
|
||||
class EditorViewMock {}
|
||||
(EditorViewMock as any).domEventHandlers = (): unknown => ({} as unknown);
|
||||
(EditorViewMock as any).lineWrapping = {} as unknown;
|
||||
(EditorViewMock as any).editable = { of: () => ({}) } as unknown;
|
||||
|
||||
const keymap = { of: (arr: unknown) => arr } as unknown;
|
||||
const Prec = { highest: (ext: unknown) => ext } as unknown;
|
||||
|
||||
type CodeMirrorProps = {
|
||||
value?: string;
|
||||
onChange?: (v: string) => void;
|
||||
onFocus?: () => void;
|
||||
onBlur?: () => void;
|
||||
placeholder?: string;
|
||||
onCreateEditor?: (view: unknown) => unknown;
|
||||
onUpdate?: (arg: {
|
||||
view: {
|
||||
state: {
|
||||
selection: { main: { head: number } };
|
||||
doc: {
|
||||
toString: () => string;
|
||||
lineAt: (
|
||||
_pos: number,
|
||||
) => { number: number; from: number; to: number; text: string };
|
||||
};
|
||||
};
|
||||
};
|
||||
}) => void;
|
||||
'data-testid'?: string;
|
||||
extensions?: unknown[];
|
||||
};
|
||||
|
||||
function CodeMirrorMock({
|
||||
value,
|
||||
onChange,
|
||||
onFocus,
|
||||
onBlur,
|
||||
placeholder,
|
||||
onCreateEditor,
|
||||
onUpdate,
|
||||
'data-testid': dataTestId,
|
||||
extensions,
|
||||
}: CodeMirrorProps): JSX.Element {
|
||||
const [localValue, setLocalValue] = React.useState<string>(value ?? '');
|
||||
|
||||
// Provide a fake editor instance
|
||||
React.useEffect(() => {
|
||||
if (onCreateEditor) {
|
||||
onCreateEditor(new EditorViewMock() as any);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
// Call onUpdate whenever localValue changes to simulate cursor and doc
|
||||
React.useEffect(() => {
|
||||
if (onUpdate) {
|
||||
const text = String(localValue ?? '');
|
||||
const head = text.length;
|
||||
onUpdate({
|
||||
view: {
|
||||
state: {
|
||||
selection: { main: { head } },
|
||||
doc: {
|
||||
toString: (): string => text,
|
||||
lineAt: () => ({
|
||||
number: 1,
|
||||
from: 0,
|
||||
to: text.length,
|
||||
text,
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [localValue]);
|
||||
|
||||
const handleKeyDown = (
|
||||
e: React.KeyboardEvent<HTMLTextAreaElement>,
|
||||
): void => {
|
||||
const isModEnter = e.key === 'Enter' && (e.metaKey || e.ctrlKey);
|
||||
if (!isModEnter) return;
|
||||
const exts: unknown[] = Array.isArray(extensions) ? extensions : [];
|
||||
const flat: unknown[] = exts.flatMap((x: unknown) =>
|
||||
Array.isArray(x) ? x : [x],
|
||||
);
|
||||
const keyBindings = flat.filter(
|
||||
(x) =>
|
||||
Boolean(x) &&
|
||||
typeof x === 'object' &&
|
||||
'key' in (x as Record<string, unknown>),
|
||||
) as Array<{ key?: string; run?: () => boolean | void }>;
|
||||
keyBindings
|
||||
.filter((b) => b.key === 'Mod-Enter' && typeof b.run === 'function')
|
||||
.forEach((b) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
b.run!();
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<textarea
|
||||
data-testid={dataTestId || 'query-where-clause-editor'}
|
||||
placeholder={placeholder}
|
||||
value={localValue}
|
||||
onChange={(e): void => {
|
||||
setLocalValue(e.target.value);
|
||||
if (onChange) {
|
||||
onChange(e.target.value);
|
||||
}
|
||||
}}
|
||||
onFocus={onFocus}
|
||||
onBlur={onBlur}
|
||||
onKeyDown={handleKeyDown}
|
||||
style={{ width: '100%', minHeight: 80 }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
__esModule: true,
|
||||
default: CodeMirrorMock,
|
||||
EditorView: EditorViewMock,
|
||||
keymap,
|
||||
Prec,
|
||||
};
|
||||
},
|
||||
);
|
||||
const handleRunQueryMock = ((UseQBModule as unknown) as {
|
||||
handleRunQuery: jest.MockedFunction<() => void>;
|
||||
}).handleRunQuery;
|
||||
|
||||
const PLACEHOLDER_TEXT =
|
||||
"Enter your filter query (e.g., http.status_code >= 500 AND service.name = 'frontend')";
|
||||
const TESTID_EDITOR = 'query-where-clause-editor';
|
||||
const SAMPLE_KEY_TYPING = 'http.';
|
||||
const SAMPLE_VALUE_TYPING_INCOMPLETE = "service.name = '";
|
||||
const SAMPLE_VALUE_TYPING_COMPLETE = "service.name = 'frontend'";
|
||||
const SAMPLE_STATUS_QUERY = "http.status_code = '200'";
|
||||
const SAMPLE_VALUE_TYPING_INCOMPLETE = " service.name = '";
|
||||
const SAMPLE_VALUE_TYPING_COMPLETE = " service.name = 'frontend'";
|
||||
const SAMPLE_STATUS_QUERY = " status_code = '200'";
|
||||
|
||||
describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
describe('QuerySearch', () => {
|
||||
it('renders with placeholder', () => {
|
||||
render(
|
||||
<QuerySearch
|
||||
@@ -139,19 +219,21 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
/>,
|
||||
);
|
||||
|
||||
// CodeMirror renders a contenteditable div, so we check for the container
|
||||
const editorContainer = document.querySelector('.query-where-clause-editor');
|
||||
expect(editorContainer).toBeInTheDocument();
|
||||
expect(screen.getByPlaceholderText(PLACEHOLDER_TEXT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('fetches key suggestions when typing a key (debounced)', async () => {
|
||||
// Use real timers for CodeMirror integration tests
|
||||
jest.useFakeTimers();
|
||||
const advance = (ms: number): void => {
|
||||
jest.advanceTimersByTime(ms);
|
||||
};
|
||||
const user = userEvent.setup({
|
||||
advanceTimers: advance,
|
||||
pointerEventsCheck: 0,
|
||||
});
|
||||
const mockedGetKeys = getKeySuggestions as jest.MockedFunction<
|
||||
typeof getKeySuggestions
|
||||
>;
|
||||
mockedGetKeys.mockClear();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
@@ -161,33 +243,28 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for CodeMirror to initialize
|
||||
await waitFor(() => {
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR);
|
||||
expect(editor).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the CodeMirror editor contenteditable element
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
|
||||
// Focus and type into the editor
|
||||
await user.click(editor);
|
||||
const editor = screen.getByTestId(TESTID_EDITOR);
|
||||
await user.type(editor, SAMPLE_KEY_TYPING);
|
||||
advance(1000);
|
||||
|
||||
// Wait for debounced API call (300ms debounce + some buffer)
|
||||
await waitFor(() => expect(mockedGetKeys).toHaveBeenCalled(), {
|
||||
timeout: 2000,
|
||||
timeout: 3000,
|
||||
});
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('fetches value suggestions when editing value context', async () => {
|
||||
// Use real timers for CodeMirror integration tests
|
||||
jest.useFakeTimers();
|
||||
const advance = (ms: number): void => {
|
||||
jest.advanceTimersByTime(ms);
|
||||
};
|
||||
const user = userEvent.setup({
|
||||
advanceTimers: advance,
|
||||
pointerEventsCheck: 0,
|
||||
});
|
||||
const mockedGetValues = getValueSuggestions as jest.MockedFunction<
|
||||
typeof getValueSuggestions
|
||||
>;
|
||||
mockedGetValues.mockClear();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
@@ -197,28 +274,21 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for CodeMirror to initialize
|
||||
await waitFor(() => {
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR);
|
||||
expect(editor).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
await user.click(editor);
|
||||
const editor = screen.getByTestId(TESTID_EDITOR);
|
||||
await user.type(editor, SAMPLE_VALUE_TYPING_INCOMPLETE);
|
||||
advance(1000);
|
||||
|
||||
// Wait for debounced API call (300ms debounce + some buffer)
|
||||
await waitFor(() => expect(mockedGetValues).toHaveBeenCalled(), {
|
||||
timeout: 2000,
|
||||
timeout: 3000,
|
||||
});
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('fetches key suggestions on mount for LOGS', async () => {
|
||||
// Use real timers for CodeMirror integration tests
|
||||
jest.useFakeTimers();
|
||||
const mockedGetKeysOnMount = getKeySuggestions as jest.MockedFunction<
|
||||
typeof getKeySuggestions
|
||||
>;
|
||||
mockedGetKeysOnMount.mockClear();
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
@@ -228,15 +298,17 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for debounced API call (300ms debounce + some buffer)
|
||||
jest.advanceTimersByTime(1000);
|
||||
|
||||
await waitFor(() => expect(mockedGetKeysOnMount).toHaveBeenCalled(), {
|
||||
timeout: 2000,
|
||||
timeout: 3000,
|
||||
});
|
||||
|
||||
const lastArgs = mockedGetKeysOnMount.mock.calls[
|
||||
mockedGetKeysOnMount.mock.calls.length - 1
|
||||
]?.[0] as { signal: unknown; searchText: string };
|
||||
expect(lastArgs).toMatchObject({ signal: DataSource.LOGS, searchText: '' });
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('calls provided onRun on Mod-Enter', async () => {
|
||||
@@ -252,26 +324,12 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for CodeMirror to initialize
|
||||
await waitFor(() => {
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR);
|
||||
expect(editor).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
const editor = screen.getByTestId(TESTID_EDITOR);
|
||||
await user.click(editor);
|
||||
await user.type(editor, SAMPLE_STATUS_QUERY);
|
||||
await user.keyboard('{Meta>}{Enter}{/Meta}');
|
||||
|
||||
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
|
||||
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
|
||||
fireEvent.keyDown(editor, {
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
[modKey]: true,
|
||||
keyCode: 13,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(onRun).toHaveBeenCalled(), { timeout: 2000 });
|
||||
await waitFor(() => expect(onRun).toHaveBeenCalled());
|
||||
});
|
||||
|
||||
it('calls handleRunQuery when Mod-Enter without onRun', async () => {
|
||||
@@ -290,62 +348,11 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for CodeMirror to initialize
|
||||
await waitFor(() => {
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR);
|
||||
expect(editor).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
const editor = screen.getByTestId(TESTID_EDITOR);
|
||||
await user.click(editor);
|
||||
await user.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
|
||||
await user.keyboard('{Meta>}{Enter}{/Meta}');
|
||||
|
||||
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
|
||||
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
|
||||
fireEvent.keyDown(editor, {
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
[modKey]: true,
|
||||
keyCode: 13,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(mockedHandleRunQuery).toHaveBeenCalled(), {
|
||||
timeout: 2000,
|
||||
});
|
||||
});
|
||||
|
||||
it('initializes CodeMirror with expression from queryData.filter.expression on mount', async () => {
|
||||
const testExpression =
|
||||
"http.status_code >= 500 AND service.name = 'frontend'";
|
||||
const queryDataWithExpression = {
|
||||
...initialQueriesMap.logs.builder.queryData[0],
|
||||
filter: {
|
||||
expression: testExpression,
|
||||
},
|
||||
};
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
queryData={queryDataWithExpression}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for CodeMirror to initialize and the expression to be set
|
||||
await waitFor(
|
||||
() => {
|
||||
// CodeMirror stores content in .cm-content, check the text content
|
||||
const editorContent = document.querySelector(
|
||||
CM_EDITOR_SELECTOR,
|
||||
) as HTMLElement;
|
||||
expect(editorContent).toBeInTheDocument();
|
||||
// CodeMirror may render the text in multiple ways, check if it contains our expression
|
||||
const textContent = editorContent.textContent || '';
|
||||
expect(textContent).toContain('http.status_code');
|
||||
expect(textContent).toContain('service.name');
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
await waitFor(() => expect(mockedHandleRunQuery).toHaveBeenCalled());
|
||||
});
|
||||
});
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
convertAggregationToExpression,
|
||||
convertFiltersToExpression,
|
||||
convertFiltersToExpressionWithExistingQuery,
|
||||
formatValueForExpression,
|
||||
removeKeysFromExpression,
|
||||
} from '../utils';
|
||||
|
||||
@@ -1194,220 +1193,3 @@ describe('removeKeysFromExpression', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatValueForExpression', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Variable values', () => {
|
||||
it('should return variable values as-is', () => {
|
||||
expect(formatValueForExpression('$variable')).toBe('$variable');
|
||||
expect(formatValueForExpression('$env')).toBe('$env');
|
||||
expect(formatValueForExpression(' $variable ')).toBe(' $variable ');
|
||||
});
|
||||
|
||||
it('should return variable arrays as-is', () => {
|
||||
expect(formatValueForExpression(['$var1', '$var2'])).toBe('$var1,$var2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric string values', () => {
|
||||
it('should return numeric strings with quotes', () => {
|
||||
expect(formatValueForExpression('123')).toBe("'123'");
|
||||
expect(formatValueForExpression('0')).toBe("'0'");
|
||||
expect(formatValueForExpression('100000')).toBe("'100000'");
|
||||
expect(formatValueForExpression('-42')).toBe("'-42'");
|
||||
expect(formatValueForExpression('3.14')).toBe("'3.14'");
|
||||
expect(formatValueForExpression(' 456 ')).toBe("' 456 '");
|
||||
});
|
||||
|
||||
it('should handle numeric strings with IN operator', () => {
|
||||
expect(formatValueForExpression('123', 'IN')).toBe("['123']");
|
||||
expect(formatValueForExpression(['123', '456'], 'IN')).toBe(
|
||||
"['123', '456']",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Quoted string values', () => {
|
||||
it('should return already quoted strings as-is', () => {
|
||||
expect(formatValueForExpression("'quoted'")).toBe("'quoted'");
|
||||
expect(formatValueForExpression('"double-quoted"')).toBe('"double-quoted"');
|
||||
expect(formatValueForExpression('`backticked`')).toBe('`backticked`');
|
||||
expect(formatValueForExpression("'100000'")).toBe("'100000'");
|
||||
});
|
||||
|
||||
it('should preserve quoted strings in arrays', () => {
|
||||
expect(formatValueForExpression(["'value1'", "'value2'"])).toBe(
|
||||
"['value1', 'value2']",
|
||||
);
|
||||
expect(formatValueForExpression(["'100000'", "'200000'"], 'IN')).toBe(
|
||||
"['100000', '200000']",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Regular string values', () => {
|
||||
it('should wrap regular strings in single quotes', () => {
|
||||
expect(formatValueForExpression('hello')).toBe("'hello'");
|
||||
expect(formatValueForExpression('api-gateway')).toBe("'api-gateway'");
|
||||
expect(formatValueForExpression('test value')).toBe("'test value'");
|
||||
});
|
||||
|
||||
it('should escape single quotes in strings', () => {
|
||||
expect(formatValueForExpression("user's data")).toBe("'user\\'s data'");
|
||||
expect(formatValueForExpression("John's")).toBe("'John\\'s'");
|
||||
expect(formatValueForExpression("it's a test")).toBe("'it\\'s a test'");
|
||||
});
|
||||
|
||||
it('should handle empty strings', () => {
|
||||
expect(formatValueForExpression('')).toBe("''");
|
||||
});
|
||||
|
||||
it('should handle strings with special characters', () => {
|
||||
expect(formatValueForExpression('/api/v1/users')).toBe("'/api/v1/users'");
|
||||
expect(formatValueForExpression('user@example.com')).toBe(
|
||||
"'user@example.com'",
|
||||
);
|
||||
expect(formatValueForExpression('Contains "quotes"')).toBe(
|
||||
'\'Contains "quotes"\'',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Number values', () => {
|
||||
it('should convert numbers to strings without quotes', () => {
|
||||
expect(formatValueForExpression(123)).toBe('123');
|
||||
expect(formatValueForExpression(0)).toBe('0');
|
||||
expect(formatValueForExpression(-42)).toBe('-42');
|
||||
expect(formatValueForExpression(100000)).toBe('100000');
|
||||
expect(formatValueForExpression(3.14)).toBe('3.14');
|
||||
});
|
||||
|
||||
it('should handle numbers with IN operator', () => {
|
||||
expect(formatValueForExpression(123, 'IN')).toBe('[123]');
|
||||
expect(formatValueForExpression([100, 200] as any, 'IN')).toBe('[100, 200]');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean values', () => {
|
||||
it('should convert booleans to strings without quotes', () => {
|
||||
expect(formatValueForExpression(true)).toBe('true');
|
||||
expect(formatValueForExpression(false)).toBe('false');
|
||||
});
|
||||
|
||||
it('should handle booleans with IN operator', () => {
|
||||
expect(formatValueForExpression(true, 'IN')).toBe('[true]');
|
||||
expect(formatValueForExpression([true, false] as any, 'IN')).toBe(
|
||||
'[true, false]',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Array values', () => {
|
||||
it('should format array of strings', () => {
|
||||
expect(formatValueForExpression(['a', 'b', 'c'])).toBe("['a', 'b', 'c']");
|
||||
expect(formatValueForExpression(['service1', 'service2'])).toBe(
|
||||
"['service1', 'service2']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array of numeric strings', () => {
|
||||
expect(formatValueForExpression(['123', '456', '789'])).toBe(
|
||||
"['123', '456', '789']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array of numbers', () => {
|
||||
expect(formatValueForExpression([1, 2, 3] as any)).toBe('[1, 2, 3]');
|
||||
expect(formatValueForExpression([100, 200, 300] as any)).toBe(
|
||||
'[100, 200, 300]',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format mixed array types', () => {
|
||||
expect(formatValueForExpression(['hello', 123, true] as any)).toBe(
|
||||
"['hello', 123, true]",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array with quoted values', () => {
|
||||
expect(formatValueForExpression(["'quoted'", 'regular'])).toBe(
|
||||
"['quoted', 'regular']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array with empty strings', () => {
|
||||
expect(formatValueForExpression(['', 'value'])).toBe("['', 'value']");
|
||||
});
|
||||
});
|
||||
|
||||
describe('IN and NOT IN operators', () => {
|
||||
it('should format single value as array for IN operator', () => {
|
||||
expect(formatValueForExpression('value', 'IN')).toBe("['value']");
|
||||
expect(formatValueForExpression(123, 'IN')).toBe('[123]');
|
||||
expect(formatValueForExpression('123', 'IN')).toBe("['123']");
|
||||
});
|
||||
|
||||
it('should format array for IN operator', () => {
|
||||
expect(formatValueForExpression(['a', 'b'], 'IN')).toBe("['a', 'b']");
|
||||
expect(formatValueForExpression(['123', '456'], 'IN')).toBe(
|
||||
"['123', '456']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format single value as array for NOT IN operator', () => {
|
||||
expect(formatValueForExpression('value', 'NOT IN')).toBe("['value']");
|
||||
expect(formatValueForExpression('value', 'not in')).toBe("['value']");
|
||||
});
|
||||
|
||||
it('should format array for NOT IN operator', () => {
|
||||
expect(formatValueForExpression(['a', 'b'], 'NOT IN')).toBe("['a', 'b']");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle strings that look like numbers but have quotes', () => {
|
||||
expect(formatValueForExpression("'123'")).toBe("'123'");
|
||||
expect(formatValueForExpression('"456"')).toBe('"456"');
|
||||
expect(formatValueForExpression('`789`')).toBe('`789`');
|
||||
});
|
||||
|
||||
it('should handle strings with leading/trailing whitespace', () => {
|
||||
expect(formatValueForExpression(' hello ')).toBe("' hello '");
|
||||
expect(formatValueForExpression(' 123 ')).toBe("' 123 '");
|
||||
});
|
||||
|
||||
it('should handle very large numbers', () => {
|
||||
expect(formatValueForExpression('999999999')).toBe("'999999999'");
|
||||
expect(formatValueForExpression(999999999)).toBe('999999999');
|
||||
});
|
||||
|
||||
it('should handle decimal numbers', () => {
|
||||
expect(formatValueForExpression('123.456')).toBe("'123.456'");
|
||||
expect(formatValueForExpression(123.456)).toBe('123.456');
|
||||
});
|
||||
|
||||
it('should handle negative numbers', () => {
|
||||
expect(formatValueForExpression('-100')).toBe("'-100'");
|
||||
expect(formatValueForExpression(-100)).toBe('-100');
|
||||
});
|
||||
|
||||
it('should handle strings that are not valid numbers', () => {
|
||||
expect(formatValueForExpression('123abc')).toBe("'123abc'");
|
||||
expect(formatValueForExpression('abc123')).toBe("'abc123'");
|
||||
expect(formatValueForExpression('12.34.56')).toBe("'12.34.56'");
|
||||
});
|
||||
|
||||
it('should handle empty array', () => {
|
||||
expect(formatValueForExpression([])).toBe('[]');
|
||||
expect(formatValueForExpression([], 'IN')).toBe('[]');
|
||||
});
|
||||
|
||||
it('should handle array with single element', () => {
|
||||
expect(formatValueForExpression(['single'])).toBe("['single']");
|
||||
expect(formatValueForExpression([123] as any)).toBe('[123]');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
|
||||
import { extractQueryPairs } from 'utils/queryContextUtils';
|
||||
import { isQuoted, unquote } from 'utils/stringUtils';
|
||||
import { unquote } from 'utils/stringUtils';
|
||||
import { isFunctionOperator, isNonValueOperator } from 'utils/tokenUtils';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
@@ -38,57 +38,49 @@ const isArrayOperator = (operator: string): boolean => {
|
||||
return arrayOperators.includes(operator);
|
||||
};
|
||||
|
||||
const isVariable = (
|
||||
value: (string | number | boolean)[] | string | number | boolean,
|
||||
): boolean => {
|
||||
const isVariable = (value: string | string[] | number | boolean): boolean => {
|
||||
if (Array.isArray(value)) {
|
||||
return value.some((v) => typeof v === 'string' && v.trim().startsWith('$'));
|
||||
}
|
||||
return typeof value === 'string' && value.trim().startsWith('$');
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats a single value for use in expression strings.
|
||||
* Strings are quoted and escaped, while numbers and booleans are converted to strings.
|
||||
*/
|
||||
const formatSingleValue = (v: string | number | boolean): string => {
|
||||
if (typeof v === 'string') {
|
||||
// Preserve already-quoted strings
|
||||
if (isQuoted(v)) {
|
||||
return v;
|
||||
}
|
||||
// Quote and escape single quotes in strings
|
||||
return `'${v.replace(/'/g, "\\'")}'`;
|
||||
}
|
||||
// Convert numbers and booleans to strings without quotes
|
||||
return String(v);
|
||||
};
|
||||
|
||||
/**
|
||||
* Format a value for the expression string
|
||||
* @param value - The value to format
|
||||
* @param operator - The operator being used (to determine if array is needed)
|
||||
* @returns Formatted value string
|
||||
*/
|
||||
export const formatValueForExpression = (
|
||||
value: (string | number | boolean)[] | string | number | boolean,
|
||||
const formatValueForExpression = (
|
||||
value: string[] | string | number | boolean,
|
||||
operator?: string,
|
||||
): string => {
|
||||
if (isVariable(value)) {
|
||||
return String(value);
|
||||
}
|
||||
|
||||
// For IN operators, ensure value is always an array
|
||||
if (isArrayOperator(operator || '')) {
|
||||
const arrayValue = Array.isArray(value) ? value : [value];
|
||||
return `[${arrayValue.map(formatSingleValue).join(', ')}]`;
|
||||
return `[${arrayValue
|
||||
.map((v) =>
|
||||
typeof v === 'string' ? `'${v.replace(/'/g, "\\'")}'` : String(v),
|
||||
)
|
||||
.join(', ')}]`;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return `[${value.map(formatSingleValue).join(', ')}]`;
|
||||
// Handle array values (e.g., for IN operations)
|
||||
return `[${value
|
||||
.map((v) =>
|
||||
typeof v === 'string' ? `'${v.replace(/'/g, "\\'")}'` : String(v),
|
||||
)
|
||||
.join(', ')}]`;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return formatSingleValue(value);
|
||||
// Add single quotes around all string values and escape internal single quotes
|
||||
return `'${value.replace(/'/g, "\\'")}'`;
|
||||
}
|
||||
|
||||
return String(value);
|
||||
@@ -144,43 +136,14 @@ export const convertFiltersToExpression = (
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts a string value to its appropriate type (number, boolean, or string)
|
||||
* for use in filter objects. This is the inverse of formatSingleValue.
|
||||
*/
|
||||
function formatSingleValueForFilter(
|
||||
value: string | number | boolean,
|
||||
): string | number | boolean {
|
||||
if (typeof value === 'string') {
|
||||
const trimmed = value.trim();
|
||||
|
||||
// Try to convert numeric strings to numbers
|
||||
if (trimmed !== '' && !Number.isNaN(Number(trimmed))) {
|
||||
return Number(trimmed);
|
||||
}
|
||||
|
||||
// Convert boolean strings to booleans
|
||||
if (trimmed === 'true' || trimmed === 'false') {
|
||||
return trimmed === 'true';
|
||||
}
|
||||
}
|
||||
|
||||
// Return non-string values as-is, or string values that couldn't be converted
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats values for filter objects, converting string representations
|
||||
* to their proper types (numbers, booleans) when appropriate.
|
||||
*/
|
||||
const formatValuesForFilter = (
|
||||
value: (string | number | boolean)[] | number | boolean | string,
|
||||
): (string | number | boolean)[] | number | boolean | string => {
|
||||
const formatValuesForFilter = (value: string | string[]): string | string[] => {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(formatSingleValueForFilter);
|
||||
return value.map((v) => (typeof v === 'string' ? unquote(v) : String(v)));
|
||||
}
|
||||
|
||||
return formatSingleValueForFilter(value);
|
||||
if (typeof value === 'string') {
|
||||
return unquote(value);
|
||||
}
|
||||
return String(value);
|
||||
};
|
||||
|
||||
export const convertExpressionToFilters = (
|
||||
@@ -261,7 +224,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
||||
const visitedPairs: Set<string> = new Set(); // Set to track visited query pairs
|
||||
|
||||
// Map extracted query pairs to key-specific pair information for faster access
|
||||
let queryPairsMap = getQueryPairsMap(existingQuery);
|
||||
let queryPairsMap = getQueryPairsMap(existingQuery.trim());
|
||||
|
||||
filters?.items?.forEach((filter) => {
|
||||
const { key, op, value } = filter;
|
||||
@@ -346,7 +309,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
||||
)}${OPERATORS.IN} ${formattedValue} ${modifiedQuery.slice(
|
||||
notInPair.position.valueEnd + 1,
|
||||
)}`;
|
||||
queryPairsMap = getQueryPairsMap(modifiedQuery);
|
||||
queryPairsMap = getQueryPairsMap(modifiedQuery.trim());
|
||||
}
|
||||
shouldAddToNonExisting = false; // Don't add this to non-existing filters
|
||||
} else if (
|
||||
|
||||
@@ -178,7 +178,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
if (SELECTED_OPERATORS.includes(filterSync.op)) {
|
||||
if (isArray(filterSync.value)) {
|
||||
filterSync.value.forEach((val) => {
|
||||
filterState[String(val)] = true;
|
||||
filterState[val] = true;
|
||||
});
|
||||
} else if (typeof filterSync.value === 'string') {
|
||||
filterState[filterSync.value] = true;
|
||||
@@ -191,7 +191,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
filterState = setDefaultValues(attributeValues, true);
|
||||
if (isArray(filterSync.value)) {
|
||||
filterSync.value.forEach((val) => {
|
||||
filterState[String(val)] = false;
|
||||
filterState[val] = false;
|
||||
});
|
||||
} else if (typeof filterSync.value === 'string') {
|
||||
filterState[filterSync.value] = false;
|
||||
|
||||
@@ -0,0 +1,223 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { Userpilot } from 'userpilot';
|
||||
|
||||
import UserpilotRouteTracker from './UserpilotRouteTracker';
|
||||
|
||||
// Mock constants
|
||||
const INITIAL_PATH = '/initial';
|
||||
const TIMER_DELAY = 100;
|
||||
|
||||
// Mock the userpilot module
|
||||
jest.mock('userpilot', () => ({
|
||||
Userpilot: {
|
||||
reload: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock location state
|
||||
let mockLocation = {
|
||||
pathname: INITIAL_PATH,
|
||||
search: '',
|
||||
hash: '',
|
||||
state: null,
|
||||
};
|
||||
|
||||
// Mock react-router-dom
|
||||
jest.mock('react-router-dom', () => {
|
||||
const originalModule = jest.requireActual('react-router-dom');
|
||||
|
||||
return {
|
||||
...originalModule,
|
||||
useLocation: jest.fn(() => mockLocation),
|
||||
};
|
||||
});
|
||||
|
||||
describe('UserpilotRouteTracker', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
// Reset timers
|
||||
jest.useFakeTimers();
|
||||
// Reset error mock implementation
|
||||
(Userpilot.reload as jest.Mock).mockImplementation(() => {});
|
||||
// Reset location to initial state
|
||||
mockLocation = {
|
||||
pathname: INITIAL_PATH,
|
||||
search: '',
|
||||
hash: '',
|
||||
state: null,
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('calls Userpilot.reload on initial render', () => {
|
||||
render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward timer to trigger the setTimeout in reloadUserpilot
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('calls Userpilot.reload when pathname changes', () => {
|
||||
const { rerender } = render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward initial render timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Create a new location object with different pathname
|
||||
const newLocation = {
|
||||
...mockLocation,
|
||||
pathname: '/new-path',
|
||||
};
|
||||
|
||||
// Update the mock location with new path and trigger re-render
|
||||
act(() => {
|
||||
mockLocation = newLocation;
|
||||
// Force a component update with the new location
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
});
|
||||
|
||||
// Fast-forward timer to allow the setTimeout to execute
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('calls Userpilot.reload when search parameters change', () => {
|
||||
const { rerender } = render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward initial render timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Create a new location object with different search params
|
||||
const newLocation = {
|
||||
...mockLocation,
|
||||
search: '?param=value',
|
||||
};
|
||||
|
||||
// Update the mock location with new search and trigger re-render
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
act(() => {
|
||||
mockLocation = newLocation;
|
||||
// Force a component update with the new location
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
});
|
||||
|
||||
// Fast-forward timer to allow the setTimeout to execute
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('handles errors in Userpilot.reload gracefully', () => {
|
||||
// Mock console.error to prevent test output noise and capture calls
|
||||
const consoleErrorSpy = jest
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Instead of using the component, we test the error handling behavior directly
|
||||
const errorMsg = 'Error message';
|
||||
|
||||
// Set up a function that has the same error handling behavior as in component
|
||||
const testErrorHandler = (): void => {
|
||||
try {
|
||||
if (typeof Userpilot !== 'undefined' && Userpilot.reload) {
|
||||
Userpilot.reload();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Userpilot] Error reloading on route change:', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Make Userpilot.reload throw an error
|
||||
(Userpilot.reload as jest.Mock).mockImplementation(() => {
|
||||
throw new Error(errorMsg);
|
||||
});
|
||||
|
||||
// Execute the function that should handle errors
|
||||
testErrorHandler();
|
||||
|
||||
// Verify error was logged
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'[Userpilot] Error reloading on route change:',
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
// Restore console mock
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('does not call Userpilot.reload when same route is rendered again', () => {
|
||||
const { rerender } = render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward initial render timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
|
||||
act(() => {
|
||||
mockLocation = {
|
||||
pathname: mockLocation.pathname,
|
||||
search: mockLocation.search,
|
||||
hash: mockLocation.hash,
|
||||
state: mockLocation.state,
|
||||
};
|
||||
// Force a component update with the same location
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
});
|
||||
|
||||
// Fast-forward timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
// Should not call reload since path and search are the same
|
||||
expect(Userpilot.reload).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,60 @@
|
||||
import { useCallback, useEffect, useRef } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Userpilot } from 'userpilot';
|
||||
|
||||
/**
|
||||
* UserpilotRouteTracker - A component that tracks route changes and calls Userpilot.reload
|
||||
* on actual page changes (pathname changes or significant query parameter changes).
|
||||
*
|
||||
* This component renders nothing and is designed to be placed once high in the component tree.
|
||||
*/
|
||||
function UserpilotRouteTracker(): null {
|
||||
const location = useLocation();
|
||||
const prevPathRef = useRef<string>(location.pathname);
|
||||
const prevSearchRef = useRef<string>(location.search);
|
||||
const isFirstRenderRef = useRef<boolean>(true);
|
||||
|
||||
// Function to reload Userpilot safely - using useCallback to avoid dependency issues
|
||||
const reloadUserpilot = useCallback((): void => {
|
||||
try {
|
||||
if (typeof Userpilot !== 'undefined' && Userpilot.reload) {
|
||||
setTimeout(() => {
|
||||
Userpilot.reload();
|
||||
}, 100);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Userpilot] Error reloading on route change:', error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Handle first render
|
||||
useEffect(() => {
|
||||
if (isFirstRenderRef.current) {
|
||||
isFirstRenderRef.current = false;
|
||||
reloadUserpilot();
|
||||
}
|
||||
}, [reloadUserpilot]);
|
||||
|
||||
// Handle route/query changes
|
||||
useEffect(() => {
|
||||
// Skip first render as it's handled by the effect above
|
||||
if (isFirstRenderRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the path has changed or if significant query params have changed
|
||||
const pathChanged = location.pathname !== prevPathRef.current;
|
||||
const searchChanged = location.search !== prevSearchRef.current;
|
||||
|
||||
if (pathChanged || searchChanged) {
|
||||
// Update refs
|
||||
prevPathRef.current = location.pathname;
|
||||
prevSearchRef.current = location.search;
|
||||
reloadUserpilot();
|
||||
}
|
||||
}, [location.pathname, location.search, reloadUserpilot]);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export default UserpilotRouteTracker;
|
||||
@@ -7,7 +7,7 @@ import ErrorIcon from 'assets/Error';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { BookOpenText, ChevronsDown, TriangleAlert } from 'lucide-react';
|
||||
import KeyValueLabel from 'periscope/components/KeyValueLabel';
|
||||
import { ReactNode, useMemo } from 'react';
|
||||
import { ReactNode } from 'react';
|
||||
import { Warning } from 'types/api';
|
||||
|
||||
interface WarningContentProps {
|
||||
@@ -106,51 +106,19 @@ export function WarningContent({ warning }: WarningContentProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
function PopoverMessage({
|
||||
message,
|
||||
}: {
|
||||
message: string | ReactNode;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<section className="warning-content">
|
||||
<section className="warning-content__summary-section">
|
||||
<header className="warning-content__summary">
|
||||
<div className="warning-content__summary-left">
|
||||
<div className="warning-content__summary-text">
|
||||
<p className="warning-content__warning-message">{message}</p>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
</section>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
interface WarningPopoverProps extends PopoverProps {
|
||||
children?: ReactNode;
|
||||
warningData?: Warning;
|
||||
message?: string | ReactNode;
|
||||
warningData: Warning;
|
||||
}
|
||||
|
||||
function WarningPopover({
|
||||
children,
|
||||
warningData,
|
||||
message = '',
|
||||
...popoverProps
|
||||
}: WarningPopoverProps): JSX.Element {
|
||||
const content = useMemo(() => {
|
||||
if (message) {
|
||||
return <PopoverMessage message={message} />;
|
||||
}
|
||||
if (warningData) {
|
||||
return <WarningContent warning={warningData} />;
|
||||
}
|
||||
return null;
|
||||
}, [message, warningData]);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
content={content}
|
||||
content={<WarningContent warning={warningData} />}
|
||||
overlayStyle={{ padding: 0, maxWidth: '600px' }}
|
||||
overlayInnerStyle={{ padding: 0 }}
|
||||
autoAdjustOverflow
|
||||
@@ -169,8 +137,6 @@ function WarningPopover({
|
||||
|
||||
WarningPopover.defaultProps = {
|
||||
children: undefined,
|
||||
warningData: null,
|
||||
message: null,
|
||||
};
|
||||
|
||||
export default WarningPopover;
|
||||
|
||||
@@ -3,9 +3,9 @@ import './styles.scss';
|
||||
import { Select } from 'antd';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
|
||||
import { UniversalYAxisUnitMappings } from './constants';
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_CATEGORIES } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
|
||||
import { getYAxisCategories, mapMetricUnitToUniversalUnit } from './utils';
|
||||
import { mapMetricUnitToUniversalUnit } from './utils';
|
||||
|
||||
function YAxisUnitSelector({
|
||||
value,
|
||||
@@ -13,7 +13,6 @@ function YAxisUnitSelector({
|
||||
placeholder = 'Please select a unit',
|
||||
loading = false,
|
||||
'data-testid': dataTestId,
|
||||
source,
|
||||
}: YAxisUnitSelectorProps): JSX.Element {
|
||||
const universalUnit = mapMetricUnitToUniversalUnit(value);
|
||||
|
||||
@@ -38,8 +37,6 @@ function YAxisUnitSelector({
|
||||
return aliases.some((alias) => alias.toLowerCase().includes(search));
|
||||
};
|
||||
|
||||
const categories = getYAxisCategories(source);
|
||||
|
||||
return (
|
||||
<div className="y-axis-unit-selector-component">
|
||||
<Select
|
||||
@@ -51,7 +48,7 @@ function YAxisUnitSelector({
|
||||
loading={loading}
|
||||
data-testid={dataTestId}
|
||||
>
|
||||
{categories.map((category) => (
|
||||
{Y_AXIS_CATEGORIES.map((category) => (
|
||||
<Select.OptGroup key={category.name} label={category.name}>
|
||||
{category.units.map((unit) => (
|
||||
<Select.Option key={unit.id} value={unit.id}>
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
|
||||
import { YAxisSource } from '../types';
|
||||
import YAxisUnitSelector from '../YAxisUnitSelector';
|
||||
|
||||
describe('YAxisUnitSelector', () => {
|
||||
@@ -11,13 +10,7 @@ describe('YAxisUnitSelector', () => {
|
||||
});
|
||||
|
||||
it('renders with default placeholder', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
expect(screen.getByText('Please select a unit')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
@@ -27,20 +20,13 @@ describe('YAxisUnitSelector', () => {
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
placeholder="Custom placeholder"
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
expect(screen.queryByText('Custom placeholder')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls onChange when a value is selected', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
@@ -55,30 +41,18 @@ describe('YAxisUnitSelector', () => {
|
||||
});
|
||||
|
||||
it('filters options based on search input', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
const input = screen.getByRole('combobox');
|
||||
fireEvent.change(input, { target: { value: 'bytes/sec' } });
|
||||
fireEvent.change(input, { target: { value: 'byte' } });
|
||||
|
||||
expect(screen.getByText('Bytes/sec')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows all categories and their units', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
/>,
|
||||
);
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
|
||||
@@ -1,951 +0,0 @@
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
|
||||
import {
|
||||
AdditionalLabelsMappingForGrafanaUnits,
|
||||
UniversalUnitToGrafanaUnit,
|
||||
} from '../constants';
|
||||
import { formatUniversalUnit } from '../formatter';
|
||||
|
||||
describe('formatUniversalUnit', () => {
|
||||
describe('Time', () => {
|
||||
test.each([
|
||||
// Days
|
||||
[31, UniversalYAxisUnit.DAYS, '4.43 weeks'],
|
||||
[7, UniversalYAxisUnit.DAYS, '1 week'],
|
||||
[6, UniversalYAxisUnit.DAYS, '6 days'],
|
||||
[1, UniversalYAxisUnit.DAYS, '1 day'],
|
||||
// Hours
|
||||
[25, UniversalYAxisUnit.HOURS, '1.04 days'],
|
||||
[23, UniversalYAxisUnit.HOURS, '23 hour'],
|
||||
[1, UniversalYAxisUnit.HOURS, '1 hour'],
|
||||
// Minutes
|
||||
[61, UniversalYAxisUnit.MINUTES, '1.02 hours'],
|
||||
[60, UniversalYAxisUnit.MINUTES, '1 hour'],
|
||||
[45, UniversalYAxisUnit.MINUTES, '45 min'],
|
||||
[1, UniversalYAxisUnit.MINUTES, '1 min'],
|
||||
// Seconds
|
||||
[100000, UniversalYAxisUnit.SECONDS, '1.16 days'],
|
||||
[10065, UniversalYAxisUnit.SECONDS, '2.8 hours'],
|
||||
[61, UniversalYAxisUnit.SECONDS, '1.02 mins'],
|
||||
[60, UniversalYAxisUnit.SECONDS, '1 min'],
|
||||
[12, UniversalYAxisUnit.SECONDS, '12 s'],
|
||||
[1, UniversalYAxisUnit.SECONDS, '1 s'],
|
||||
// Milliseconds
|
||||
[1006, UniversalYAxisUnit.MILLISECONDS, '1.01 s'],
|
||||
[10000000, UniversalYAxisUnit.MILLISECONDS, '2.78 hours'],
|
||||
[100006, UniversalYAxisUnit.MICROSECONDS, '100 ms'],
|
||||
[1, UniversalYAxisUnit.MICROSECONDS, '1 µs'],
|
||||
[12, UniversalYAxisUnit.MICROSECONDS, '12 µs'],
|
||||
// Nanoseconds
|
||||
[10000000000, UniversalYAxisUnit.NANOSECONDS, '10 s'],
|
||||
[10000006, UniversalYAxisUnit.NANOSECONDS, '10 ms'],
|
||||
[1006, UniversalYAxisUnit.NANOSECONDS, '1.01 µs'],
|
||||
[1, UniversalYAxisUnit.NANOSECONDS, '1 ns'],
|
||||
[12, UniversalYAxisUnit.NANOSECONDS, '12 ns'],
|
||||
])('formats time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data', () => {
|
||||
test.each([
|
||||
// Bytes
|
||||
[864, UniversalYAxisUnit.BYTES, '864 B'],
|
||||
[1000, UniversalYAxisUnit.BYTES, '1 kB'],
|
||||
[1020, UniversalYAxisUnit.BYTES, '1.02 kB'],
|
||||
// Kilobytes
|
||||
[512, UniversalYAxisUnit.KILOBYTES, '512 kB'],
|
||||
[1000, UniversalYAxisUnit.KILOBYTES, '1 MB'],
|
||||
[1023, UniversalYAxisUnit.KILOBYTES, '1.02 MB'],
|
||||
// Megabytes
|
||||
[777, UniversalYAxisUnit.MEGABYTES, '777 MB'],
|
||||
[1000, UniversalYAxisUnit.MEGABYTES, '1 GB'],
|
||||
[1023, UniversalYAxisUnit.MEGABYTES, '1.02 GB'],
|
||||
// Gigabytes
|
||||
[432, UniversalYAxisUnit.GIGABYTES, '432 GB'],
|
||||
[1000, UniversalYAxisUnit.GIGABYTES, '1 TB'],
|
||||
[1023, UniversalYAxisUnit.GIGABYTES, '1.02 TB'],
|
||||
// Terabytes
|
||||
[678, UniversalYAxisUnit.TERABYTES, '678 TB'],
|
||||
[1000, UniversalYAxisUnit.TERABYTES, '1 PB'],
|
||||
[1023, UniversalYAxisUnit.TERABYTES, '1.02 PB'],
|
||||
// Petabytes
|
||||
[845, UniversalYAxisUnit.PETABYTES, '845 PB'],
|
||||
[1000, UniversalYAxisUnit.PETABYTES, '1 EB'],
|
||||
[1023, UniversalYAxisUnit.PETABYTES, '1.02 EB'],
|
||||
// Exabytes
|
||||
[921, UniversalYAxisUnit.EXABYTES, '921 EB'],
|
||||
[1000, UniversalYAxisUnit.EXABYTES, '1 ZB'],
|
||||
[1023, UniversalYAxisUnit.EXABYTES, '1.02 ZB'],
|
||||
// Zettabytes
|
||||
[921, UniversalYAxisUnit.ZETTABYTES, '921 ZB'],
|
||||
[1000, UniversalYAxisUnit.ZETTABYTES, '1 YB'],
|
||||
[1023, UniversalYAxisUnit.ZETTABYTES, '1.02 YB'],
|
||||
// Yottabytes
|
||||
[921, UniversalYAxisUnit.YOTTABYTES, '921 YB'],
|
||||
[1000, UniversalYAxisUnit.YOTTABYTES, '1000 YB'],
|
||||
[1023, UniversalYAxisUnit.YOTTABYTES, '1023 YB'],
|
||||
])('formats data value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data rate', () => {
|
||||
test.each([
|
||||
// Bytes/second
|
||||
[864, UniversalYAxisUnit.BYTES_SECOND, '864 B/s'],
|
||||
[1000, UniversalYAxisUnit.BYTES_SECOND, '1 kB/s'],
|
||||
[1020, UniversalYAxisUnit.BYTES_SECOND, '1.02 kB/s'],
|
||||
// Kilobytes/second
|
||||
[512, UniversalYAxisUnit.KILOBYTES_SECOND, '512 kB/s'],
|
||||
[1000, UniversalYAxisUnit.KILOBYTES_SECOND, '1 MB/s'],
|
||||
[1023, UniversalYAxisUnit.KILOBYTES_SECOND, '1.02 MB/s'],
|
||||
// Megabytes/second
|
||||
[777, UniversalYAxisUnit.MEGABYTES_SECOND, '777 MB/s'],
|
||||
[1000, UniversalYAxisUnit.MEGABYTES_SECOND, '1 GB/s'],
|
||||
[1023, UniversalYAxisUnit.MEGABYTES_SECOND, '1.02 GB/s'],
|
||||
// Gigabytes/second
|
||||
[432, UniversalYAxisUnit.GIGABYTES_SECOND, '432 GB/s'],
|
||||
[1000, UniversalYAxisUnit.GIGABYTES_SECOND, '1 TB/s'],
|
||||
[1023, UniversalYAxisUnit.GIGABYTES_SECOND, '1.02 TB/s'],
|
||||
// Terabytes/second
|
||||
[678, UniversalYAxisUnit.TERABYTES_SECOND, '678 TB/s'],
|
||||
[1000, UniversalYAxisUnit.TERABYTES_SECOND, '1 PB/s'],
|
||||
[1023, UniversalYAxisUnit.TERABYTES_SECOND, '1.02 PB/s'],
|
||||
// Petabytes/second
|
||||
[845, UniversalYAxisUnit.PETABYTES_SECOND, '845 PB/s'],
|
||||
[1000, UniversalYAxisUnit.PETABYTES_SECOND, '1 EB/s'],
|
||||
[1023, UniversalYAxisUnit.PETABYTES_SECOND, '1.02 EB/s'],
|
||||
// Exabytes/second
|
||||
[921, UniversalYAxisUnit.EXABYTES_SECOND, '921 EB/s'],
|
||||
[1000, UniversalYAxisUnit.EXABYTES_SECOND, '1 ZB/s'],
|
||||
[1023, UniversalYAxisUnit.EXABYTES_SECOND, '1.02 ZB/s'],
|
||||
// Zettabytes/second
|
||||
[921, UniversalYAxisUnit.ZETTABYTES_SECOND, '921 ZB/s'],
|
||||
[1000, UniversalYAxisUnit.ZETTABYTES_SECOND, '1 YB/s'],
|
||||
[1023, UniversalYAxisUnit.ZETTABYTES_SECOND, '1.02 YB/s'],
|
||||
// Yottabytes/second
|
||||
[921, UniversalYAxisUnit.YOTTABYTES_SECOND, '921 YB/s'],
|
||||
[1000, UniversalYAxisUnit.YOTTABYTES_SECOND, '1000 YB/s'],
|
||||
[1023, UniversalYAxisUnit.YOTTABYTES_SECOND, '1023 YB/s'],
|
||||
])('formats data value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Bit', () => {
|
||||
test.each([
|
||||
// Bits
|
||||
[1, UniversalYAxisUnit.BITS, '1 b'],
|
||||
[250, UniversalYAxisUnit.BITS, '250 b'],
|
||||
[1000, UniversalYAxisUnit.BITS, '1 kb'],
|
||||
[1023, UniversalYAxisUnit.BITS, '1.02 kb'],
|
||||
// Kilobits
|
||||
[0.5, UniversalYAxisUnit.KILOBITS, '500 b'],
|
||||
[375, UniversalYAxisUnit.KILOBITS, '375 kb'],
|
||||
[1000, UniversalYAxisUnit.KILOBITS, '1 Mb'],
|
||||
[1023, UniversalYAxisUnit.KILOBITS, '1.02 Mb'],
|
||||
// Megabits
|
||||
[0.5, UniversalYAxisUnit.MEGABITS, '500 kb'],
|
||||
[640, UniversalYAxisUnit.MEGABITS, '640 Mb'],
|
||||
[1000, UniversalYAxisUnit.MEGABITS, '1 Gb'],
|
||||
[1023, UniversalYAxisUnit.MEGABITS, '1.02 Gb'],
|
||||
// Gigabits
|
||||
[0.5, UniversalYAxisUnit.GIGABITS, '500 Mb'],
|
||||
[875, UniversalYAxisUnit.GIGABITS, '875 Gb'],
|
||||
[1000, UniversalYAxisUnit.GIGABITS, '1 Tb'],
|
||||
[1023, UniversalYAxisUnit.GIGABITS, '1.02 Tb'],
|
||||
// Terabits
|
||||
[0.5, UniversalYAxisUnit.TERABITS, '500 Gb'],
|
||||
[430, UniversalYAxisUnit.TERABITS, '430 Tb'],
|
||||
[1000, UniversalYAxisUnit.TERABITS, '1 Pb'],
|
||||
[1023, UniversalYAxisUnit.TERABITS, '1.02 Pb'],
|
||||
// Petabits
|
||||
[0.5, UniversalYAxisUnit.PETABITS, '500 Tb'],
|
||||
[590, UniversalYAxisUnit.PETABITS, '590 Pb'],
|
||||
[1000, UniversalYAxisUnit.PETABITS, '1 Eb'],
|
||||
[1023, UniversalYAxisUnit.PETABITS, '1.02 Eb'],
|
||||
// Exabits
|
||||
[0.5, UniversalYAxisUnit.EXABITS, '500 Pb'],
|
||||
[715, UniversalYAxisUnit.EXABITS, '715 Eb'],
|
||||
[1000, UniversalYAxisUnit.EXABITS, '1 Zb'],
|
||||
[1023, UniversalYAxisUnit.EXABITS, '1.02 Zb'],
|
||||
// Zettabits
|
||||
[0.5, UniversalYAxisUnit.ZETTABITS, '500 Eb'],
|
||||
[840, UniversalYAxisUnit.ZETTABITS, '840 Zb'],
|
||||
[1000, UniversalYAxisUnit.ZETTABITS, '1 Yb'],
|
||||
[1023, UniversalYAxisUnit.ZETTABITS, '1.02 Yb'],
|
||||
// Yottabits
|
||||
[0.5, UniversalYAxisUnit.YOTTABITS, '500 Zb'],
|
||||
[965, UniversalYAxisUnit.YOTTABITS, '965 Yb'],
|
||||
[1000, UniversalYAxisUnit.YOTTABITS, '1000 Yb'],
|
||||
[1023, UniversalYAxisUnit.YOTTABITS, '1023 Yb'],
|
||||
])('formats bit value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Bit rate', () => {
|
||||
test.each([
|
||||
// Bits/second
|
||||
[512, UniversalYAxisUnit.BITS_SECOND, '512 b/s'],
|
||||
[1000, UniversalYAxisUnit.BITS_SECOND, '1 kb/s'],
|
||||
[1023, UniversalYAxisUnit.BITS_SECOND, '1.02 kb/s'],
|
||||
// Kilobits/second
|
||||
[0.5, UniversalYAxisUnit.KILOBITS_SECOND, '500 b/s'],
|
||||
[512, UniversalYAxisUnit.KILOBITS_SECOND, '512 kb/s'],
|
||||
[1000, UniversalYAxisUnit.KILOBITS_SECOND, '1 Mb/s'],
|
||||
[1023, UniversalYAxisUnit.KILOBITS_SECOND, '1.02 Mb/s'],
|
||||
// Megabits/second
|
||||
[0.5, UniversalYAxisUnit.MEGABITS_SECOND, '500 kb/s'],
|
||||
[512, UniversalYAxisUnit.MEGABITS_SECOND, '512 Mb/s'],
|
||||
[1000, UniversalYAxisUnit.MEGABITS_SECOND, '1 Gb/s'],
|
||||
[1023, UniversalYAxisUnit.MEGABITS_SECOND, '1.02 Gb/s'],
|
||||
// Gigabits/second
|
||||
[0.5, UniversalYAxisUnit.GIGABITS_SECOND, '500 Mb/s'],
|
||||
[512, UniversalYAxisUnit.GIGABITS_SECOND, '512 Gb/s'],
|
||||
[1000, UniversalYAxisUnit.GIGABITS_SECOND, '1 Tb/s'],
|
||||
[1023, UniversalYAxisUnit.GIGABITS_SECOND, '1.02 Tb/s'],
|
||||
// Terabits/second
|
||||
[0.5, UniversalYAxisUnit.TERABITS_SECOND, '500 Gb/s'],
|
||||
[512, UniversalYAxisUnit.TERABITS_SECOND, '512 Tb/s'],
|
||||
[1000, UniversalYAxisUnit.TERABITS_SECOND, '1 Pb/s'],
|
||||
[1023, UniversalYAxisUnit.TERABITS_SECOND, '1.02 Pb/s'],
|
||||
// Petabits/second
|
||||
[0.5, UniversalYAxisUnit.PETABITS_SECOND, '500 Tb/s'],
|
||||
[512, UniversalYAxisUnit.PETABITS_SECOND, '512 Pb/s'],
|
||||
[1000, UniversalYAxisUnit.PETABITS_SECOND, '1 Eb/s'],
|
||||
[1023, UniversalYAxisUnit.PETABITS_SECOND, '1.02 Eb/s'],
|
||||
// Exabits/second
|
||||
[512, UniversalYAxisUnit.EXABITS_SECOND, '512 Eb/s'],
|
||||
[1000, UniversalYAxisUnit.EXABITS_SECOND, '1 Zb/s'],
|
||||
[1023, UniversalYAxisUnit.EXABITS_SECOND, '1.02 Zb/s'],
|
||||
// Zettabits/second
|
||||
[0.5, UniversalYAxisUnit.ZETTABITS_SECOND, '500 Eb/s'],
|
||||
[512, UniversalYAxisUnit.ZETTABITS_SECOND, '512 Zb/s'],
|
||||
[1000, UniversalYAxisUnit.ZETTABITS_SECOND, '1 Yb/s'],
|
||||
[1023, UniversalYAxisUnit.ZETTABITS_SECOND, '1.02 Yb/s'],
|
||||
// Yottabits/second
|
||||
[0.5, UniversalYAxisUnit.YOTTABITS_SECOND, '500 Zb/s'],
|
||||
[512, UniversalYAxisUnit.YOTTABITS_SECOND, '512 Yb/s'],
|
||||
[1000, UniversalYAxisUnit.YOTTABITS_SECOND, '1000 Yb/s'],
|
||||
[1023, UniversalYAxisUnit.YOTTABITS_SECOND, '1023 Yb/s'],
|
||||
])('formats bit rate value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Count', () => {
|
||||
test.each([
|
||||
[100, UniversalYAxisUnit.COUNT, '100'],
|
||||
[875, UniversalYAxisUnit.COUNT, '875'],
|
||||
[1000, UniversalYAxisUnit.COUNT, '1 K'],
|
||||
[2500, UniversalYAxisUnit.COUNT, '2.5 K'],
|
||||
[10000, UniversalYAxisUnit.COUNT, '10 K'],
|
||||
[25000, UniversalYAxisUnit.COUNT, '25 K'],
|
||||
[100000, UniversalYAxisUnit.COUNT, '100 K'],
|
||||
[1000000, UniversalYAxisUnit.COUNT, '1 Mil'],
|
||||
[10000000, UniversalYAxisUnit.COUNT, '10 Mil'],
|
||||
[100000000, UniversalYAxisUnit.COUNT, '100 Mil'],
|
||||
[1000000000, UniversalYAxisUnit.COUNT, '1 Bil'],
|
||||
[10000000000, UniversalYAxisUnit.COUNT, '10 Bil'],
|
||||
[100000000000, UniversalYAxisUnit.COUNT, '100 Bil'],
|
||||
[1000000000000, UniversalYAxisUnit.COUNT, '1 Tri'],
|
||||
[10000000000000, UniversalYAxisUnit.COUNT, '10 Tri'],
|
||||
])('formats count value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
|
||||
test.each([
|
||||
[100, UniversalYAxisUnit.COUNT_SECOND, '100 c/s'],
|
||||
[875, UniversalYAxisUnit.COUNT_SECOND, '875 c/s'],
|
||||
[1000, UniversalYAxisUnit.COUNT_SECOND, '1K c/s'],
|
||||
[2500, UniversalYAxisUnit.COUNT_SECOND, '2.5K c/s'],
|
||||
[10000, UniversalYAxisUnit.COUNT_SECOND, '10K c/s'],
|
||||
[25000, UniversalYAxisUnit.COUNT_SECOND, '25K c/s'],
|
||||
])('formats count per time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
|
||||
test.each([
|
||||
[100, UniversalYAxisUnit.COUNT_MINUTE, '100 c/m'],
|
||||
[875, UniversalYAxisUnit.COUNT_MINUTE, '875 c/m'],
|
||||
[1000, UniversalYAxisUnit.COUNT_MINUTE, '1K c/m'],
|
||||
[2500, UniversalYAxisUnit.COUNT_MINUTE, '2.5K c/m'],
|
||||
[10000, UniversalYAxisUnit.COUNT_MINUTE, '10K c/m'],
|
||||
[25000, UniversalYAxisUnit.COUNT_MINUTE, '25K c/m'],
|
||||
])('formats count per time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Operations units', () => {
|
||||
test.each([
|
||||
[780, UniversalYAxisUnit.OPS_SECOND, '780 ops/s'],
|
||||
[1000, UniversalYAxisUnit.OPS_SECOND, '1K ops/s'],
|
||||
[520, UniversalYAxisUnit.OPS_MINUTE, '520 ops/m'],
|
||||
[1000, UniversalYAxisUnit.OPS_MINUTE, '1K ops/m'],
|
||||
[2500, UniversalYAxisUnit.OPS_MINUTE, '2.5K ops/m'],
|
||||
[10000, UniversalYAxisUnit.OPS_MINUTE, '10K ops/m'],
|
||||
[25000, UniversalYAxisUnit.OPS_MINUTE, '25K ops/m'],
|
||||
])(
|
||||
'formats operations per time value %s %s as %s',
|
||||
(value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('Request units', () => {
|
||||
test.each([
|
||||
[615, UniversalYAxisUnit.REQUESTS_SECOND, '615 req/s'],
|
||||
[1000, UniversalYAxisUnit.REQUESTS_SECOND, '1K req/s'],
|
||||
[480, UniversalYAxisUnit.REQUESTS_MINUTE, '480 req/m'],
|
||||
[1000, UniversalYAxisUnit.REQUESTS_MINUTE, '1K req/m'],
|
||||
[2500, UniversalYAxisUnit.REQUESTS_MINUTE, '2.5K req/m'],
|
||||
[10000, UniversalYAxisUnit.REQUESTS_MINUTE, '10K req/m'],
|
||||
[25000, UniversalYAxisUnit.REQUESTS_MINUTE, '25K req/m'],
|
||||
])('formats requests per time value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Read/Write units', () => {
|
||||
test.each([
|
||||
[505, UniversalYAxisUnit.READS_SECOND, '505 rd/s'],
|
||||
[1000, UniversalYAxisUnit.READS_SECOND, '1K rd/s'],
|
||||
[610, UniversalYAxisUnit.WRITES_SECOND, '610 wr/s'],
|
||||
[1000, UniversalYAxisUnit.WRITES_SECOND, '1K wr/s'],
|
||||
[715, UniversalYAxisUnit.READS_MINUTE, '715 rd/m'],
|
||||
[1000, UniversalYAxisUnit.READS_MINUTE, '1K rd/m'],
|
||||
[2500, UniversalYAxisUnit.READS_MINUTE, '2.5K rd/m'],
|
||||
[10000, UniversalYAxisUnit.READS_MINUTE, '10K rd/m'],
|
||||
[25000, UniversalYAxisUnit.READS_MINUTE, '25K rd/m'],
|
||||
[830, UniversalYAxisUnit.WRITES_MINUTE, '830 wr/m'],
|
||||
[1000, UniversalYAxisUnit.WRITES_MINUTE, '1K wr/m'],
|
||||
[2500, UniversalYAxisUnit.WRITES_MINUTE, '2.5K wr/m'],
|
||||
[10000, UniversalYAxisUnit.WRITES_MINUTE, '10K wr/m'],
|
||||
[25000, UniversalYAxisUnit.WRITES_MINUTE, '25K wr/m'],
|
||||
])(
|
||||
'formats reads and writes per time value %s %s as %s',
|
||||
(value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('IO Operations units', () => {
|
||||
test.each([
|
||||
[777, UniversalYAxisUnit.IOOPS_SECOND, '777 io/s'],
|
||||
[1000, UniversalYAxisUnit.IOOPS_SECOND, '1K io/s'],
|
||||
[2500, UniversalYAxisUnit.IOOPS_SECOND, '2.5K io/s'],
|
||||
[10000, UniversalYAxisUnit.IOOPS_SECOND, '10K io/s'],
|
||||
[25000, UniversalYAxisUnit.IOOPS_SECOND, '25K io/s'],
|
||||
])('formats IOPS value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Percent units', () => {
|
||||
it('formats percent as-is', () => {
|
||||
expect(formatUniversalUnit(456, UniversalYAxisUnit.PERCENT)).toBe('456%');
|
||||
});
|
||||
|
||||
it('multiplies percent_unit by 100', () => {
|
||||
expect(formatUniversalUnit(9, UniversalYAxisUnit.PERCENT_UNIT)).toBe('900%');
|
||||
});
|
||||
});
|
||||
|
||||
describe('None unit', () => {
|
||||
it('formats as plain number', () => {
|
||||
expect(formatUniversalUnit(742, UniversalYAxisUnit.NONE)).toBe('742');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Time (additional)', () => {
|
||||
test.each([
|
||||
[900, UniversalYAxisUnit.DURATION_MS, '900 milliseconds'],
|
||||
[1000, UniversalYAxisUnit.DURATION_MS, '1 second'],
|
||||
[1, UniversalYAxisUnit.DURATION_MS, '1 millisecond'],
|
||||
[900, UniversalYAxisUnit.DURATION_S, '15 minutes'],
|
||||
[1, UniversalYAxisUnit.DURATION_HMS, '00:00:01'],
|
||||
[90005, UniversalYAxisUnit.DURATION_HMS, '25:00:05'],
|
||||
[90005, UniversalYAxisUnit.DURATION_DHMS, '1 d 01:00:05'],
|
||||
[900, UniversalYAxisUnit.TIMETICKS, '9 s'],
|
||||
[1, UniversalYAxisUnit.TIMETICKS, '10 ms'],
|
||||
[900, UniversalYAxisUnit.CLOCK_MS, '900ms'],
|
||||
[1, UniversalYAxisUnit.CLOCK_MS, '001ms'],
|
||||
[1, UniversalYAxisUnit.CLOCK_S, '01s:000ms'],
|
||||
[900, UniversalYAxisUnit.CLOCK_S, '15m:00s:000ms'],
|
||||
[900, UniversalYAxisUnit.TIME_HERTZ, '900 Hz'],
|
||||
[1000, UniversalYAxisUnit.TIME_HERTZ, '1 kHz'],
|
||||
[1000000, UniversalYAxisUnit.TIME_HERTZ, '1 MHz'],
|
||||
[1000000000, UniversalYAxisUnit.TIME_HERTZ, '1 GHz'],
|
||||
[1008, UniversalYAxisUnit.TIME_HERTZ, '1.01 kHz'],
|
||||
])('formats duration value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data (IEC/Binary)', () => {
|
||||
test.each([
|
||||
// Bytes
|
||||
[900, UniversalYAxisUnit.BYTES_IEC, '900 B'],
|
||||
[1024, UniversalYAxisUnit.BYTES_IEC, '1 KiB'],
|
||||
[1080, UniversalYAxisUnit.BYTES_IEC, '1.05 KiB'],
|
||||
// Kibibytes
|
||||
[900, UniversalYAxisUnit.KIBIBYTES, '900 KiB'],
|
||||
[1024, UniversalYAxisUnit.KIBIBYTES, '1 MiB'],
|
||||
[1080, UniversalYAxisUnit.KIBIBYTES, '1.05 MiB'],
|
||||
// Mebibytes
|
||||
[900, UniversalYAxisUnit.MEBIBYTES, '900 MiB'],
|
||||
[1024, UniversalYAxisUnit.MEBIBYTES, '1 GiB'],
|
||||
[1080, UniversalYAxisUnit.MEBIBYTES, '1.05 GiB'],
|
||||
// Gibibytes
|
||||
[900, UniversalYAxisUnit.GIBIBYTES, '900 GiB'],
|
||||
[1024, UniversalYAxisUnit.GIBIBYTES, '1 TiB'],
|
||||
[1080, UniversalYAxisUnit.GIBIBYTES, '1.05 TiB'],
|
||||
// Tebibytes
|
||||
[900, UniversalYAxisUnit.TEBIBYTES, '900 TiB'],
|
||||
[1024, UniversalYAxisUnit.TEBIBYTES, '1 PiB'],
|
||||
[1080, UniversalYAxisUnit.TEBIBYTES, '1.05 PiB'],
|
||||
// Pebibytes
|
||||
[900, UniversalYAxisUnit.PEBIBYTES, '900 PiB'],
|
||||
[1024, UniversalYAxisUnit.PEBIBYTES, '1 EiB'],
|
||||
[1080, UniversalYAxisUnit.PEBIBYTES, '1.05 EiB'],
|
||||
// Exbibytes
|
||||
[900, UniversalYAxisUnit.EXBIBYTES, '900 EiB'],
|
||||
[1024, UniversalYAxisUnit.EXBIBYTES, '1 ZiB'],
|
||||
[1080, UniversalYAxisUnit.EXBIBYTES, '1.05 ZiB'],
|
||||
// Zebibytes
|
||||
[900, UniversalYAxisUnit.ZEBIBYTES, '900 ZiB'],
|
||||
[1024, UniversalYAxisUnit.ZEBIBYTES, '1 YiB'],
|
||||
[1080, UniversalYAxisUnit.ZEBIBYTES, '1.05 YiB'],
|
||||
// Yobibytes
|
||||
[900, UniversalYAxisUnit.YOBIBYTES, '900 YiB'],
|
||||
[1024, UniversalYAxisUnit.YOBIBYTES, '1024 YiB'],
|
||||
])('formats IEC bytes value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Data Rate (IEC/Binary)', () => {
|
||||
test.each([
|
||||
// Kibibytes/second
|
||||
[900, UniversalYAxisUnit.KIBIBYTES_SECOND, '900 KiB/s'],
|
||||
[1024, UniversalYAxisUnit.KIBIBYTES_SECOND, '1 MiB/s'],
|
||||
[1080, UniversalYAxisUnit.KIBIBYTES_SECOND, '1.05 MiB/s'],
|
||||
// Mebibytes/second
|
||||
[900, UniversalYAxisUnit.MEBIBYTES_SECOND, '900 MiB/s'],
|
||||
[1024, UniversalYAxisUnit.MEBIBYTES_SECOND, '1 GiB/s'],
|
||||
[1080, UniversalYAxisUnit.MEBIBYTES_SECOND, '1.05 GiB/s'],
|
||||
// Gibibytes/second
|
||||
[900, UniversalYAxisUnit.GIBIBYTES_SECOND, '900 GiB/s'],
|
||||
[1024, UniversalYAxisUnit.GIBIBYTES_SECOND, '1 TiB/s'],
|
||||
[1080, UniversalYAxisUnit.GIBIBYTES_SECOND, '1.05 TiB/s'],
|
||||
// Tebibytes/second
|
||||
[900, UniversalYAxisUnit.TEBIBYTES_SECOND, '900 TiB/s'],
|
||||
[1024, UniversalYAxisUnit.TEBIBYTES_SECOND, '1 PiB/s'],
|
||||
[1080, UniversalYAxisUnit.TEBIBYTES_SECOND, '1.05 PiB/s'],
|
||||
// Pebibytes/second
|
||||
[900, UniversalYAxisUnit.PEBIBYTES_SECOND, '900 PiB/s'],
|
||||
[1024, UniversalYAxisUnit.PEBIBYTES_SECOND, '1 EiB/s'],
|
||||
[1080, UniversalYAxisUnit.PEBIBYTES_SECOND, '1.05 EiB/s'],
|
||||
// Exbibytes/second
|
||||
[900, UniversalYAxisUnit.EXBIBYTES_SECOND, '900 EiB/s'],
|
||||
[1024, UniversalYAxisUnit.EXBIBYTES_SECOND, '1 ZiB/s'],
|
||||
[1080, UniversalYAxisUnit.EXBIBYTES_SECOND, '1.05 ZiB/s'],
|
||||
// Zebibytes/second
|
||||
[900, UniversalYAxisUnit.ZEBIBYTES_SECOND, '900 ZiB/s'],
|
||||
[1024, UniversalYAxisUnit.ZEBIBYTES_SECOND, '1 YiB/s'],
|
||||
[1080, UniversalYAxisUnit.ZEBIBYTES_SECOND, '1.05 YiB/s'],
|
||||
// Yobibytes/second
|
||||
[900, UniversalYAxisUnit.YOBIBYTES_SECOND, '900 YiB/s'],
|
||||
[1024, UniversalYAxisUnit.YOBIBYTES_SECOND, '1024 YiB/s'],
|
||||
[1080, UniversalYAxisUnit.YOBIBYTES_SECOND, '1080 YiB/s'],
|
||||
// Packets/second
|
||||
[900, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '900 p/s'],
|
||||
[1000, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '1 kp/s'],
|
||||
[1080, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '1.08 kp/s'],
|
||||
])('formats IEC byte rates value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Bits (IEC)', () => {
|
||||
test.each([
|
||||
[900, UniversalYAxisUnit.BITS_IEC, '900 b'],
|
||||
[1024, UniversalYAxisUnit.BITS_IEC, '1 Kib'],
|
||||
[1080, UniversalYAxisUnit.BITS_IEC, '1.05 Kib'],
|
||||
])('formats IEC bits value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Hash Rate', () => {
|
||||
test.each([
|
||||
// Hashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '412 H/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '1 kH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '1.02 kH/s'],
|
||||
// Kilohashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '412 kH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '1 MH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '1.02 MH/s'],
|
||||
// Megahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '412 MH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '1 GH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '1.02 GH/s'],
|
||||
// Gigahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '412 GH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '1 TH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '1.02 TH/s'],
|
||||
// Terahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '412 TH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '1 PH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '1.02 PH/s'],
|
||||
// Petahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '412 PH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '1 EH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '1.02 EH/s'],
|
||||
// Exahashes/second
|
||||
[412, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '412 EH/s'],
|
||||
[1000, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '1 ZH/s'],
|
||||
[1023, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '1.02 ZH/s'],
|
||||
])('formats hash rate value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Miscellaneous', () => {
|
||||
test.each([
|
||||
[742, UniversalYAxisUnit.MISC_STRING, '742'],
|
||||
[688, UniversalYAxisUnit.MISC_SHORT, '688'],
|
||||
[555, UniversalYAxisUnit.MISC_HUMIDITY, '555 %H'],
|
||||
[812, UniversalYAxisUnit.MISC_DECIBEL, '812 dB'],
|
||||
[1024, UniversalYAxisUnit.MISC_HEXADECIMAL, '400'],
|
||||
[1024, UniversalYAxisUnit.MISC_HEXADECIMAL_0X, '0x400'],
|
||||
[900, UniversalYAxisUnit.MISC_SCIENTIFIC_NOTATION, '9e+2'],
|
||||
[678, UniversalYAxisUnit.MISC_LOCALE_FORMAT, '678'],
|
||||
[444, UniversalYAxisUnit.MISC_PIXELS, '444 px'],
|
||||
])('formats miscellaneous value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Acceleration', () => {
|
||||
test.each([
|
||||
[
|
||||
875,
|
||||
UniversalYAxisUnit.ACCELERATION_METERS_PER_SECOND_SQUARED,
|
||||
'875 m/sec²',
|
||||
],
|
||||
[640, UniversalYAxisUnit.ACCELERATION_FEET_PER_SECOND_SQUARED, '640 f/sec²'],
|
||||
[512, UniversalYAxisUnit.ACCELERATION_G_UNIT, '512 g'],
|
||||
[
|
||||
2500,
|
||||
UniversalYAxisUnit.ACCELERATION_METERS_PER_SECOND_SQUARED,
|
||||
'2500 m/sec²',
|
||||
],
|
||||
])('formats acceleration value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Angular', () => {
|
||||
test.each([
|
||||
[415, UniversalYAxisUnit.ANGULAR_DEGREE, '415 °'],
|
||||
[732, UniversalYAxisUnit.ANGULAR_RADIAN, '732 rad'],
|
||||
[128, UniversalYAxisUnit.ANGULAR_GRADIAN, '128 grad'],
|
||||
[560, UniversalYAxisUnit.ANGULAR_ARC_MINUTE, '560 arcmin'],
|
||||
[945, UniversalYAxisUnit.ANGULAR_ARC_SECOND, '945 arcsec'],
|
||||
])('formats angular value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Area', () => {
|
||||
test.each([
|
||||
[210, UniversalYAxisUnit.AREA_SQUARE_METERS, '210 m²'],
|
||||
[152, UniversalYAxisUnit.AREA_SQUARE_FEET, '152 ft²'],
|
||||
[64, UniversalYAxisUnit.AREA_SQUARE_MILES, '64 mi²'],
|
||||
])('formats area value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FLOPs', () => {
|
||||
test.each([
|
||||
// FLOPS
|
||||
[150, UniversalYAxisUnit.FLOPS_FLOPS, '150 FLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_FLOPS, '1 kFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_FLOPS, '1.08 kFLOPS'],
|
||||
// MFLOPS
|
||||
[275, UniversalYAxisUnit.FLOPS_MFLOPS, '275 MFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_MFLOPS, '1 GFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_MFLOPS, '1.08 GFLOPS'],
|
||||
// GFLOPS
|
||||
[640, UniversalYAxisUnit.FLOPS_GFLOPS, '640 GFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_GFLOPS, '1 TFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_GFLOPS, '1.08 TFLOPS'],
|
||||
// TFLOPS
|
||||
[875, UniversalYAxisUnit.FLOPS_TFLOPS, '875 TFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_TFLOPS, '1 PFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_TFLOPS, '1.08 PFLOPS'],
|
||||
// PFLOPS
|
||||
[430, UniversalYAxisUnit.FLOPS_PFLOPS, '430 PFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_PFLOPS, '1 EFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_PFLOPS, '1.08 EFLOPS'],
|
||||
// EFLOPS
|
||||
[590, UniversalYAxisUnit.FLOPS_EFLOPS, '590 EFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_EFLOPS, '1 ZFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_EFLOPS, '1.08 ZFLOPS'],
|
||||
// ZFLOPS
|
||||
[715, UniversalYAxisUnit.FLOPS_ZFLOPS, '715 ZFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_ZFLOPS, '1 YFLOPS'],
|
||||
[1080, UniversalYAxisUnit.FLOPS_ZFLOPS, '1.08 YFLOPS'],
|
||||
// YFLOPS
|
||||
[840, UniversalYAxisUnit.FLOPS_YFLOPS, '840 YFLOPS'],
|
||||
[1000, UniversalYAxisUnit.FLOPS_YFLOPS, '1000 YFLOPS'],
|
||||
])('formats FLOPs value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concentration', () => {
|
||||
test.each([
|
||||
[415, UniversalYAxisUnit.CONCENTRATION_PPM, '415 ppm'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_PPM, '1000 ppm'],
|
||||
[732, UniversalYAxisUnit.CONCENTRATION_PPB, '732 ppb'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_PPB, '1000 ppb'],
|
||||
[128, UniversalYAxisUnit.CONCENTRATION_NG_M3, '128 ng/m³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_NG_M3, '1000 ng/m³'],
|
||||
[560, UniversalYAxisUnit.CONCENTRATION_NG_NORMAL_CUBIC_METER, '560 ng/Nm³'],
|
||||
[
|
||||
1000,
|
||||
UniversalYAxisUnit.CONCENTRATION_NG_NORMAL_CUBIC_METER,
|
||||
'1000 ng/Nm³',
|
||||
],
|
||||
[945, UniversalYAxisUnit.CONCENTRATION_UG_M3, '945 μg/m³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_UG_M3, '1000 μg/m³'],
|
||||
[210, UniversalYAxisUnit.CONCENTRATION_UG_NORMAL_CUBIC_METER, '210 μg/Nm³'],
|
||||
[
|
||||
1000,
|
||||
UniversalYAxisUnit.CONCENTRATION_UG_NORMAL_CUBIC_METER,
|
||||
'1000 μg/Nm³',
|
||||
],
|
||||
[152, UniversalYAxisUnit.CONCENTRATION_MG_M3, '152 mg/m³'],
|
||||
[64, UniversalYAxisUnit.CONCENTRATION_MG_NORMAL_CUBIC_METER, '64 mg/Nm³'],
|
||||
[508, UniversalYAxisUnit.CONCENTRATION_G_M3, '508 g/m³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_G_M3, '1000 g/m³'],
|
||||
[377, UniversalYAxisUnit.CONCENTRATION_G_NORMAL_CUBIC_METER, '377 g/Nm³'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_G_NORMAL_CUBIC_METER, '1000 g/Nm³'],
|
||||
[286, UniversalYAxisUnit.CONCENTRATION_MG_PER_DL, '286 mg/dL'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_MG_PER_DL, '1000 mg/dL'],
|
||||
[675, UniversalYAxisUnit.CONCENTRATION_MMOL_PER_L, '675 mmol/L'],
|
||||
[1000, UniversalYAxisUnit.CONCENTRATION_MMOL_PER_L, '1000 mmol/L'],
|
||||
])('formats concentration value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Currency', () => {
|
||||
test.each([
|
||||
[812, UniversalYAxisUnit.CURRENCY_USD, '$812'],
|
||||
[645, UniversalYAxisUnit.CURRENCY_GBP, '£645'],
|
||||
[731, UniversalYAxisUnit.CURRENCY_EUR, '€731'],
|
||||
[508, UniversalYAxisUnit.CURRENCY_JPY, '¥508'],
|
||||
[963, UniversalYAxisUnit.CURRENCY_RUB, '₽963'],
|
||||
[447, UniversalYAxisUnit.CURRENCY_UAH, '₴447'],
|
||||
[592, UniversalYAxisUnit.CURRENCY_BRL, 'R$592'],
|
||||
[375, UniversalYAxisUnit.CURRENCY_DKK, '375kr'],
|
||||
[418, UniversalYAxisUnit.CURRENCY_ISK, '418kr'],
|
||||
[536, UniversalYAxisUnit.CURRENCY_NOK, '536kr'],
|
||||
[689, UniversalYAxisUnit.CURRENCY_SEK, '689kr'],
|
||||
[724, UniversalYAxisUnit.CURRENCY_CZK, 'czk724'],
|
||||
[381, UniversalYAxisUnit.CURRENCY_CHF, 'CHF381'],
|
||||
[267, UniversalYAxisUnit.CURRENCY_PLN, 'PLN267'],
|
||||
[154, UniversalYAxisUnit.CURRENCY_BTC, '฿154'],
|
||||
[999, UniversalYAxisUnit.CURRENCY_MBTC, 'mBTC999'],
|
||||
[423, UniversalYAxisUnit.CURRENCY_UBTC, 'μBTC423'],
|
||||
[611, UniversalYAxisUnit.CURRENCY_ZAR, 'R611'],
|
||||
[782, UniversalYAxisUnit.CURRENCY_INR, '₹782'],
|
||||
[834, UniversalYAxisUnit.CURRENCY_KRW, '₩834'],
|
||||
[455, UniversalYAxisUnit.CURRENCY_IDR, 'Rp455'],
|
||||
[978, UniversalYAxisUnit.CURRENCY_PHP, 'PHP978'],
|
||||
[366, UniversalYAxisUnit.CURRENCY_VND, '366đ'],
|
||||
])('formats currency value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Datetime', () => {
|
||||
it('formats datetime units', () => {
|
||||
expect(formatUniversalUnit(900, UniversalYAxisUnit.DATETIME_FROM_NOW)).toBe(
|
||||
'56 years ago',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Power/Electrical', () => {
|
||||
test.each([
|
||||
[715, UniversalYAxisUnit.POWER_WATT, '715 W'],
|
||||
[1000, UniversalYAxisUnit.POWER_WATT, '1 kW'],
|
||||
[1080, UniversalYAxisUnit.POWER_WATT, '1.08 kW'],
|
||||
[438, UniversalYAxisUnit.POWER_KILOWATT, '438 kW'],
|
||||
[1000, UniversalYAxisUnit.POWER_KILOWATT, '1 MW'],
|
||||
[1080, UniversalYAxisUnit.POWER_KILOWATT, '1.08 MW'],
|
||||
[582, UniversalYAxisUnit.POWER_MEGAWATT, '582 MW'],
|
||||
[1000, UniversalYAxisUnit.POWER_MEGAWATT, '1 GW'],
|
||||
[1080, UniversalYAxisUnit.POWER_MEGAWATT, '1.08 GW'],
|
||||
[267, UniversalYAxisUnit.POWER_GIGAWATT, '267 GW'],
|
||||
[853, UniversalYAxisUnit.POWER_MILLIWATT, '853 mW'],
|
||||
[693, UniversalYAxisUnit.POWER_WATT_PER_SQUARE_METER, '693 W/m²'],
|
||||
[544, UniversalYAxisUnit.POWER_VOLT_AMPERE, '544 VA'],
|
||||
[812, UniversalYAxisUnit.POWER_KILOVOLT_AMPERE, '812 kVA'],
|
||||
[478, UniversalYAxisUnit.POWER_VOLT_AMPERE_REACTIVE, '478 VAr'],
|
||||
[365, UniversalYAxisUnit.POWER_KILOVOLT_AMPERE_REACTIVE, '365 kVAr'],
|
||||
[629, UniversalYAxisUnit.POWER_WATT_HOUR, '629 Wh'],
|
||||
[471, UniversalYAxisUnit.POWER_WATT_HOUR_PER_KG, '471 Wh/kg'],
|
||||
[557, UniversalYAxisUnit.POWER_KILOWATT_HOUR, '557 kWh'],
|
||||
[389, UniversalYAxisUnit.POWER_KILOWATT_MINUTE, '389 kW-Min'],
|
||||
[642, UniversalYAxisUnit.POWER_AMPERE_HOUR, '642 Ah'],
|
||||
[731, UniversalYAxisUnit.POWER_KILOAMPERE_HOUR, '731 kAh'],
|
||||
[815, UniversalYAxisUnit.POWER_MILLIAMPERE_HOUR, '815 mAh'],
|
||||
[963, UniversalYAxisUnit.POWER_JOULE, '963 J'],
|
||||
[506, UniversalYAxisUnit.POWER_ELECTRON_VOLT, '506 eV'],
|
||||
[298, UniversalYAxisUnit.POWER_AMPERE, '298 A'],
|
||||
[654, UniversalYAxisUnit.POWER_KILOAMPERE, '654 kA'],
|
||||
[187, UniversalYAxisUnit.POWER_MILLIAMPERE, '187 mA'],
|
||||
[472, UniversalYAxisUnit.POWER_VOLT, '472 V'],
|
||||
[538, UniversalYAxisUnit.POWER_KILOVOLT, '538 kV'],
|
||||
[226, UniversalYAxisUnit.POWER_MILLIVOLT, '226 mV'],
|
||||
[592, UniversalYAxisUnit.POWER_DECIBEL_MILLIWATT, '592 dBm'],
|
||||
[333, UniversalYAxisUnit.POWER_OHM, '333 Ω'],
|
||||
[447, UniversalYAxisUnit.POWER_KILOOHM, '447 kΩ'],
|
||||
[781, UniversalYAxisUnit.POWER_MEGAOHM, '781 MΩ'],
|
||||
[650, UniversalYAxisUnit.POWER_FARAD, '650 F'],
|
||||
[512, UniversalYAxisUnit.POWER_MICROFARAD, '512 µF'],
|
||||
[478, UniversalYAxisUnit.POWER_NANOFARAD, '478 nF'],
|
||||
[341, UniversalYAxisUnit.POWER_PICOFARAD, '341 pF'],
|
||||
[129, UniversalYAxisUnit.POWER_FEMTOFARAD, '129 fF'],
|
||||
[904, UniversalYAxisUnit.POWER_HENRY, '904 H'],
|
||||
[1000, UniversalYAxisUnit.POWER_HENRY, '1 kH'],
|
||||
[275, UniversalYAxisUnit.POWER_MILLIHENRY, '275 mH'],
|
||||
[618, UniversalYAxisUnit.POWER_MICROHENRY, '618 µH'],
|
||||
[1000, UniversalYAxisUnit.POWER_MICROHENRY, '1 mH'],
|
||||
[1080, UniversalYAxisUnit.POWER_MICROHENRY, '1.08 mH'],
|
||||
[459, UniversalYAxisUnit.POWER_LUMENS, '459 Lm'],
|
||||
[1000, UniversalYAxisUnit.POWER_LUMENS, '1 kLm'],
|
||||
[1080, UniversalYAxisUnit.POWER_LUMENS, '1.08 kLm'],
|
||||
])('formats power value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Flow', () => {
|
||||
test.each([
|
||||
[512, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '512 gpm'],
|
||||
[1000, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '1000 gpm'],
|
||||
[678, UniversalYAxisUnit.FLOW_CUBIC_METERS_PER_SECOND, '678 cms'],
|
||||
[1000, UniversalYAxisUnit.FLOW_CUBIC_METERS_PER_SECOND, '1000 cms'],
|
||||
[245, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_SECOND, '245 cfs'],
|
||||
[389, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_MINUTE, '389 cfm'],
|
||||
[1000, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_MINUTE, '1000 cfm'],
|
||||
[731, UniversalYAxisUnit.FLOW_LITERS_PER_HOUR, '731 L/h'],
|
||||
[1000, UniversalYAxisUnit.FLOW_LITERS_PER_HOUR, '1000 L/h'],
|
||||
[864, UniversalYAxisUnit.FLOW_LITERS_PER_MINUTE, '864 L/min'],
|
||||
[1000, UniversalYAxisUnit.FLOW_LITERS_PER_MINUTE, '1000 L/min'],
|
||||
[150, UniversalYAxisUnit.FLOW_MILLILITERS_PER_MINUTE, '150 mL/min'],
|
||||
[1000, UniversalYAxisUnit.FLOW_MILLILITERS_PER_MINUTE, '1000 mL/min'],
|
||||
[947, UniversalYAxisUnit.FLOW_LUX, '947 lux'],
|
||||
[1000, UniversalYAxisUnit.FLOW_LUX, '1000 lux'],
|
||||
])('formats flow value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Force', () => {
|
||||
test.each([
|
||||
[845, UniversalYAxisUnit.FORCE_NEWTON_METERS, '845 Nm'],
|
||||
[1000, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1 kNm'],
|
||||
[1080, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1.08 kNm'],
|
||||
[268, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '268 kNm'],
|
||||
[1000, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '1 MNm'],
|
||||
[1080, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '1.08 MNm'],
|
||||
[593, UniversalYAxisUnit.FORCE_NEWTONS, '593 N'],
|
||||
[1000, UniversalYAxisUnit.FORCE_KILONEWTONS, '1 MN'],
|
||||
[1080, UniversalYAxisUnit.FORCE_KILONEWTONS, '1.08 MN'],
|
||||
])('formats force value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mass', () => {
|
||||
test.each([
|
||||
[120, UniversalYAxisUnit.MASS_MILLIGRAM, '120 mg'],
|
||||
[120000, UniversalYAxisUnit.MASS_MILLIGRAM, '120 g'],
|
||||
[987, UniversalYAxisUnit.MASS_GRAM, '987 g'],
|
||||
[1020, UniversalYAxisUnit.MASS_GRAM, '1.02 kg'],
|
||||
[456, UniversalYAxisUnit.MASS_POUND, '456 lb'],
|
||||
[321, UniversalYAxisUnit.MASS_KILOGRAM, '321 kg'],
|
||||
[654, UniversalYAxisUnit.MASS_METRIC_TON, '654 t'],
|
||||
])('formats mass value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Length', () => {
|
||||
test.each([
|
||||
[88, UniversalYAxisUnit.LENGTH_MILLIMETER, '88 mm'],
|
||||
[100, UniversalYAxisUnit.LENGTH_MILLIMETER, '100 mm'],
|
||||
[1000, UniversalYAxisUnit.LENGTH_MILLIMETER, '1 m'],
|
||||
[177, UniversalYAxisUnit.LENGTH_INCH, '177 in'],
|
||||
[266, UniversalYAxisUnit.LENGTH_FOOT, '266 ft'],
|
||||
[355, UniversalYAxisUnit.LENGTH_METER, '355 m'],
|
||||
[355000, UniversalYAxisUnit.LENGTH_METER, '355 km'],
|
||||
[444, UniversalYAxisUnit.LENGTH_KILOMETER, '444 km'],
|
||||
[533, UniversalYAxisUnit.LENGTH_MILE, '533 mi'],
|
||||
])('formats length value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pressure', () => {
|
||||
test.each([
|
||||
[45, UniversalYAxisUnit.PRESSURE_MILLIBAR, '45 mbar'],
|
||||
[1013, UniversalYAxisUnit.PRESSURE_MILLIBAR, '1.01 bar'],
|
||||
[27, UniversalYAxisUnit.PRESSURE_BAR, '27 bar'],
|
||||
[62, UniversalYAxisUnit.PRESSURE_KILOBAR, '62 kbar'],
|
||||
[845, UniversalYAxisUnit.PRESSURE_PASCAL, '845 Pa'],
|
||||
[540, UniversalYAxisUnit.PRESSURE_HECTOPASCAL, '540 hPa'],
|
||||
[378, UniversalYAxisUnit.PRESSURE_KILOPASCAL, '378 kPa'],
|
||||
[29, UniversalYAxisUnit.PRESSURE_INCHES_HG, '29 "Hg'],
|
||||
[65, UniversalYAxisUnit.PRESSURE_PSI, '65psi'],
|
||||
])('formats pressure value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Radiation', () => {
|
||||
test.each([
|
||||
[452, UniversalYAxisUnit.RADIATION_BECQUEREL, '452 Bq'],
|
||||
[37, UniversalYAxisUnit.RADIATION_CURIE, '37 Ci'],
|
||||
[128, UniversalYAxisUnit.RADIATION_GRAY, '128 Gy'],
|
||||
[512, UniversalYAxisUnit.RADIATION_RAD, '512 rad'],
|
||||
[256, UniversalYAxisUnit.RADIATION_SIEVERT, '256 Sv'],
|
||||
[640, UniversalYAxisUnit.RADIATION_MILLISIEVERT, '640 mSv'],
|
||||
[875, UniversalYAxisUnit.RADIATION_MICROSIEVERT, '875 µSv'],
|
||||
[875000, UniversalYAxisUnit.RADIATION_MICROSIEVERT, '875 mSv'],
|
||||
[92, UniversalYAxisUnit.RADIATION_REM, '92 rem'],
|
||||
[715, UniversalYAxisUnit.RADIATION_EXPOSURE_C_PER_KG, '715 C/kg'],
|
||||
[833, UniversalYAxisUnit.RADIATION_ROENTGEN, '833 R'],
|
||||
[468, UniversalYAxisUnit.RADIATION_SIEVERT_PER_HOUR, '468 Sv/h'],
|
||||
[590, UniversalYAxisUnit.RADIATION_MILLISIEVERT_PER_HOUR, '590 mSv/h'],
|
||||
[712, UniversalYAxisUnit.RADIATION_MICROSIEVERT_PER_HOUR, '712 µSv/h'],
|
||||
])('formats radiation value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rotation Speed', () => {
|
||||
test.each([
|
||||
[345, UniversalYAxisUnit.ROTATION_SPEED_REVOLUTIONS_PER_MINUTE, '345 rpm'],
|
||||
[789, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 Hz'],
|
||||
[789000, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 kHz'],
|
||||
[213, UniversalYAxisUnit.ROTATION_SPEED_RADIANS_PER_SECOND, '213 rad/s'],
|
||||
[654, UniversalYAxisUnit.ROTATION_SPEED_DEGREES_PER_SECOND, '654 °/s'],
|
||||
])('formats rotation speed value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Temperature', () => {
|
||||
test.each([
|
||||
[37, UniversalYAxisUnit.TEMPERATURE_CELSIUS, '37 °C'],
|
||||
[451, UniversalYAxisUnit.TEMPERATURE_FAHRENHEIT, '451 °F'],
|
||||
[310, UniversalYAxisUnit.TEMPERATURE_KELVIN, '310 K'],
|
||||
])('formats temperature value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Velocity', () => {
|
||||
test.each([
|
||||
[900, UniversalYAxisUnit.VELOCITY_METERS_PER_SECOND, '900 m/s'],
|
||||
[456, UniversalYAxisUnit.VELOCITY_KILOMETERS_PER_HOUR, '456 km/h'],
|
||||
[789, UniversalYAxisUnit.VELOCITY_MILES_PER_HOUR, '789 mph'],
|
||||
[222, UniversalYAxisUnit.VELOCITY_KNOT, '222 kn'],
|
||||
])('formats velocity value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Volume', () => {
|
||||
test.each([
|
||||
[1200, UniversalYAxisUnit.VOLUME_MILLILITER, '1.2 L'],
|
||||
[9000000, UniversalYAxisUnit.VOLUME_MILLILITER, '9 kL'],
|
||||
[9, UniversalYAxisUnit.VOLUME_LITER, '9 L'],
|
||||
[9000, UniversalYAxisUnit.VOLUME_LITER, '9 kL'],
|
||||
[9000000, UniversalYAxisUnit.VOLUME_LITER, '9 ML'],
|
||||
[9000000000, UniversalYAxisUnit.VOLUME_LITER, '9 GL'],
|
||||
[9000000000000, UniversalYAxisUnit.VOLUME_LITER, '9 TL'],
|
||||
[9000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9 PL'],
|
||||
[9010000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.01 EL'],
|
||||
[9020000000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.02 ZL'],
|
||||
[9030000000000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.03 YL'],
|
||||
[900, UniversalYAxisUnit.VOLUME_CUBIC_METER, '900 m³'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_CUBIC_METER,
|
||||
'9e+30 m³',
|
||||
],
|
||||
[900, UniversalYAxisUnit.VOLUME_NORMAL_CUBIC_METER, '900 Nm³'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_NORMAL_CUBIC_METER,
|
||||
'9e+30 Nm³',
|
||||
],
|
||||
[900, UniversalYAxisUnit.VOLUME_CUBIC_DECIMETER, '900 dm³'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_CUBIC_DECIMETER,
|
||||
'9e+30 dm³',
|
||||
],
|
||||
[900, UniversalYAxisUnit.VOLUME_GALLON, '900 gal'],
|
||||
[
|
||||
9000000000000000000000000000000,
|
||||
UniversalYAxisUnit.VOLUME_GALLON,
|
||||
'9e+30 gal',
|
||||
],
|
||||
])('formats volume value %s %s as %s', (value, unit, expected) => {
|
||||
expect(formatUniversalUnit(value, unit)).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean', () => {
|
||||
it('formats boolean units', () => {
|
||||
expect(formatUniversalUnit(1, UniversalYAxisUnit.TRUE_FALSE)).toBe('True');
|
||||
expect(formatUniversalUnit(1, UniversalYAxisUnit.YES_NO)).toBe('Yes');
|
||||
expect(formatUniversalUnit(1, UniversalYAxisUnit.ON_OFF)).toBe('On');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mapping Validator', () => {
|
||||
it('validates that all units have a mapping', () => {
|
||||
// Each universal unit should have a mapping to a 1:1 Grafana unit in UniversalUnitToGrafanaUnit or an additional mapping in AdditionalLabelsMappingForGrafanaUnits
|
||||
const units = Object.values(UniversalYAxisUnit);
|
||||
expect(
|
||||
units.every((unit) => {
|
||||
const hasBaseMapping = unit in UniversalUnitToGrafanaUnit;
|
||||
const hasAdditionalMapping = unit in AdditionalLabelsMappingForGrafanaUnits;
|
||||
const hasMapping = hasBaseMapping || hasAdditionalMapping;
|
||||
if (!hasMapping) {
|
||||
throw new Error(`Unit ${unit} does not have a mapping`);
|
||||
}
|
||||
return hasMapping;
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,6 @@
|
||||
import { UniversalYAxisUnit } from '../types';
|
||||
import {
|
||||
getUniversalNameFromMetricUnit,
|
||||
mapMetricUnitToUniversalUnit,
|
||||
mergeCategories,
|
||||
} from '../utils';
|
||||
|
||||
describe('YAxisUnitSelector utils', () => {
|
||||
@@ -38,43 +36,4 @@ describe('YAxisUnitSelector utils', () => {
|
||||
expect(getUniversalNameFromMetricUnit('s')).toBe('Seconds (s)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeCategories', () => {
|
||||
it('merges categories correctly', () => {
|
||||
const categories1 = [
|
||||
{
|
||||
name: 'Data',
|
||||
units: [
|
||||
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
|
||||
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
|
||||
],
|
||||
},
|
||||
];
|
||||
const categories2 = [
|
||||
{
|
||||
name: 'Data',
|
||||
units: [{ name: 'bits', id: UniversalYAxisUnit.BITS }],
|
||||
},
|
||||
{
|
||||
name: 'Time',
|
||||
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
|
||||
},
|
||||
];
|
||||
const mergedCategories = mergeCategories(categories1, categories2);
|
||||
expect(mergedCategories).toEqual([
|
||||
{
|
||||
name: 'Data',
|
||||
units: [
|
||||
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
|
||||
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
|
||||
{ name: 'bits', id: UniversalYAxisUnit.BITS },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Time',
|
||||
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,90 +0,0 @@
|
||||
import { formattedValueToString, getValueFormat } from '@grafana/data';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import { formatDecimalWithLeadingZeros } from 'components/Graph/utils';
|
||||
import {
|
||||
AdditionalLabelsMappingForGrafanaUnits,
|
||||
CUSTOM_SCALING_FAMILIES,
|
||||
UniversalUnitToGrafanaUnit,
|
||||
} from 'components/YAxisUnitSelector/constants';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
|
||||
function scaleValue(
|
||||
value: number,
|
||||
unit: UniversalYAxisUnit,
|
||||
family: UniversalYAxisUnit[],
|
||||
factor: number,
|
||||
): { value: number; label: string } {
|
||||
let idx = family.indexOf(unit);
|
||||
// If the unit is not in the family, return the unit with the additional label
|
||||
if (idx === -1) {
|
||||
return { value, label: AdditionalLabelsMappingForGrafanaUnits[unit] || '' };
|
||||
}
|
||||
|
||||
// Scale the value up or down to the nearest unit in the family
|
||||
let scaled = value;
|
||||
// Scale up
|
||||
while (scaled >= factor && idx < family.length - 1) {
|
||||
scaled /= factor;
|
||||
idx += 1;
|
||||
}
|
||||
// Scale down
|
||||
while (scaled < 1 && idx > 0) {
|
||||
scaled *= factor;
|
||||
idx -= 1;
|
||||
}
|
||||
|
||||
// Return the scaled value and the label of the nearest unit in the family
|
||||
return {
|
||||
value: scaled,
|
||||
label: AdditionalLabelsMappingForGrafanaUnits[family[idx]] || '',
|
||||
};
|
||||
}
|
||||
|
||||
export function formatUniversalUnit(
|
||||
value: number,
|
||||
unit: UniversalYAxisUnit,
|
||||
precision: PrecisionOption = PrecisionOptionsEnum.FULL,
|
||||
decimals: number | undefined = undefined,
|
||||
): string {
|
||||
// Check if this unit belongs to a family that needs custom scaling
|
||||
const family = CUSTOM_SCALING_FAMILIES.find((family) =>
|
||||
family.units.includes(unit),
|
||||
);
|
||||
if (family) {
|
||||
const scaled = scaleValue(value, unit, family.units, family.scaleFactor);
|
||||
const formatter = getValueFormat(scaled.label);
|
||||
const formatted = formatter(scaled.value, decimals);
|
||||
if (formatted.text && formatted.text.includes('.')) {
|
||||
formatted.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formatted.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
return `${formatted.text} ${scaled.label}`;
|
||||
}
|
||||
|
||||
// Use Grafana formatting with custom label mappings
|
||||
const grafanaFormat = UniversalUnitToGrafanaUnit[unit];
|
||||
if (grafanaFormat) {
|
||||
const formatter = getValueFormat(grafanaFormat);
|
||||
const formatted = formatter(value, decimals);
|
||||
if (formatted.text && formatted.text.includes('.')) {
|
||||
formatted.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formatted.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
return formattedValueToString(formatted);
|
||||
}
|
||||
|
||||
// Fallback to short format for other units
|
||||
const formatter = getValueFormat('short');
|
||||
const formatted = formatter(value, decimals);
|
||||
if (formatted.text && formatted.text.includes('.')) {
|
||||
formatted.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formatted.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
return `${formatted.text} ${unit}`;
|
||||
}
|
||||
@@ -5,11 +5,11 @@ export interface YAxisUnitSelectorProps {
|
||||
loading?: boolean;
|
||||
disabled?: boolean;
|
||||
'data-testid'?: string;
|
||||
source: YAxisSource;
|
||||
}
|
||||
|
||||
export enum UniversalYAxisUnit {
|
||||
// Time
|
||||
WEEKS = 'wk',
|
||||
DAYS = 'd',
|
||||
HOURS = 'h',
|
||||
MINUTES = 'min',
|
||||
@@ -17,14 +17,6 @@ export enum UniversalYAxisUnit {
|
||||
MICROSECONDS = 'us',
|
||||
MILLISECONDS = 'ms',
|
||||
NANOSECONDS = 'ns',
|
||||
DURATION_MS = 'dtdurationms',
|
||||
DURATION_S = 'dtdurations',
|
||||
DURATION_HMS = 'dthms',
|
||||
DURATION_DHMS = 'dtdhms',
|
||||
TIMETICKS = 'timeticks',
|
||||
CLOCK_MS = 'clockms',
|
||||
CLOCK_S = 'clocks',
|
||||
TIME_HERTZ = 'hertz',
|
||||
|
||||
// Data
|
||||
BYTES = 'By',
|
||||
@@ -37,17 +29,6 @@ export enum UniversalYAxisUnit {
|
||||
ZETTABYTES = 'ZBy',
|
||||
YOTTABYTES = 'YBy',
|
||||
|
||||
// Binary (IEC) Data
|
||||
BYTES_IEC = 'bytes',
|
||||
KIBIBYTES = 'KiBy',
|
||||
MEBIBYTES = 'MiBy',
|
||||
GIBIBYTES = 'GiBy',
|
||||
TEBIBYTES = 'TiBy',
|
||||
PEBIBYTES = 'PiBy',
|
||||
EXBIBYTES = 'EiBy',
|
||||
ZEBIBYTES = 'ZiBy',
|
||||
YOBIBYTES = 'YiBy',
|
||||
|
||||
// Data Rate
|
||||
BYTES_SECOND = 'By/s',
|
||||
KILOBYTES_SECOND = 'kBy/s',
|
||||
@@ -58,21 +39,9 @@ export enum UniversalYAxisUnit {
|
||||
EXABYTES_SECOND = 'EBy/s',
|
||||
ZETTABYTES_SECOND = 'ZBy/s',
|
||||
YOTTABYTES_SECOND = 'YBy/s',
|
||||
DATA_RATE_PACKETS_PER_SECOND = 'pps',
|
||||
|
||||
// Binary (IEC) Data Rate
|
||||
KIBIBYTES_SECOND = 'KiBy/s',
|
||||
MEBIBYTES_SECOND = 'MiBy/s',
|
||||
GIBIBYTES_SECOND = 'GiBy/s',
|
||||
TEBIBYTES_SECOND = 'TiBy/s',
|
||||
PEBIBYTES_SECOND = 'PiBy/s',
|
||||
EXBIBYTES_SECOND = 'EiBy/s',
|
||||
ZEBIBYTES_SECOND = 'ZiBy/s',
|
||||
YOBIBYTES_SECOND = 'YiBy/s',
|
||||
|
||||
// Bits
|
||||
BITS = 'bit',
|
||||
BITS_IEC = 'bits',
|
||||
KILOBITS = 'kbit',
|
||||
MEGABITS = 'Mbit',
|
||||
GIGABITS = 'Gbit',
|
||||
@@ -93,16 +62,6 @@ export enum UniversalYAxisUnit {
|
||||
ZETTABITS_SECOND = 'Zbit/s',
|
||||
YOTTABITS_SECOND = 'Ybit/s',
|
||||
|
||||
// Binary (IEC) Bit Rate
|
||||
KIBIBITS_SECOND = 'Kibit/s',
|
||||
MEBIBITS_SECOND = 'Mibit/s',
|
||||
GIBIBITS_SECOND = 'Gibit/s',
|
||||
TEBIBITS_SECOND = 'Tibit/s',
|
||||
PEBIBITS_SECOND = 'Pibit/s',
|
||||
EXBIBITS_SECOND = 'Eibit/s',
|
||||
ZEBIBITS_SECOND = 'Zibit/s',
|
||||
YOBIBITS_SECOND = 'Yibit/s',
|
||||
|
||||
// Count
|
||||
COUNT = '{count}',
|
||||
COUNT_SECOND = '{count}/s',
|
||||
@@ -128,231 +87,7 @@ export enum UniversalYAxisUnit {
|
||||
// Percent
|
||||
PERCENT = '%',
|
||||
PERCENT_UNIT = 'percentunit',
|
||||
|
||||
// Boolean
|
||||
TRUE_FALSE = '{bool}',
|
||||
YES_NO = '{bool_yn}',
|
||||
ON_OFF = 'bool_on_off',
|
||||
|
||||
// None
|
||||
NONE = '1',
|
||||
|
||||
// Hash rate
|
||||
HASH_RATE_HASHES_PER_SECOND = 'Hs',
|
||||
HASH_RATE_KILOHASHES_PER_SECOND = 'KHs',
|
||||
HASH_RATE_MEGAHASHES_PER_SECOND = 'MHs',
|
||||
HASH_RATE_GIGAHASHES_PER_SECOND = 'GHs',
|
||||
HASH_RATE_TERAHASHES_PER_SECOND = 'THs',
|
||||
HASH_RATE_PETAHASHES_PER_SECOND = 'PHs',
|
||||
HASH_RATE_EXAHASHES_PER_SECOND = 'EHs',
|
||||
|
||||
// Miscellaneous
|
||||
MISC_STRING = 'string',
|
||||
MISC_SHORT = 'short',
|
||||
MISC_HUMIDITY = 'humidity',
|
||||
MISC_DECIBEL = 'dB',
|
||||
MISC_HEXADECIMAL = 'hex',
|
||||
MISC_HEXADECIMAL_0X = 'hex0x',
|
||||
MISC_SCIENTIFIC_NOTATION = 'sci',
|
||||
MISC_LOCALE_FORMAT = 'locale',
|
||||
MISC_PIXELS = 'pixel',
|
||||
|
||||
// Acceleration
|
||||
ACCELERATION_METERS_PER_SECOND_SQUARED = 'accMS2',
|
||||
ACCELERATION_FEET_PER_SECOND_SQUARED = 'accFS2',
|
||||
ACCELERATION_G_UNIT = 'accG',
|
||||
|
||||
// Angular
|
||||
ANGULAR_DEGREE = 'degree',
|
||||
ANGULAR_RADIAN = 'radian',
|
||||
ANGULAR_GRADIAN = 'grad',
|
||||
ANGULAR_ARC_MINUTE = 'arcmin',
|
||||
ANGULAR_ARC_SECOND = 'arcsec',
|
||||
|
||||
// Area
|
||||
AREA_SQUARE_METERS = 'areaM2',
|
||||
AREA_SQUARE_FEET = 'areaF2',
|
||||
AREA_SQUARE_MILES = 'areaMI2',
|
||||
|
||||
// FLOPs
|
||||
FLOPS_FLOPS = 'flops',
|
||||
FLOPS_MFLOPS = 'mflops',
|
||||
FLOPS_GFLOPS = 'gflops',
|
||||
FLOPS_TFLOPS = 'tflops',
|
||||
FLOPS_PFLOPS = 'pflops',
|
||||
FLOPS_EFLOPS = 'eflops',
|
||||
FLOPS_ZFLOPS = 'zflops',
|
||||
FLOPS_YFLOPS = 'yflops',
|
||||
|
||||
// Concentration
|
||||
CONCENTRATION_PPM = 'ppm',
|
||||
CONCENTRATION_PPB = 'conppb',
|
||||
CONCENTRATION_NG_M3 = 'conngm3',
|
||||
CONCENTRATION_NG_NORMAL_CUBIC_METER = 'conngNm3',
|
||||
CONCENTRATION_UG_M3 = 'conμgm3',
|
||||
CONCENTRATION_UG_NORMAL_CUBIC_METER = 'conμgNm3',
|
||||
CONCENTRATION_MG_M3 = 'conmgm3',
|
||||
CONCENTRATION_MG_NORMAL_CUBIC_METER = 'conmgNm3',
|
||||
CONCENTRATION_G_M3 = 'congm3',
|
||||
CONCENTRATION_G_NORMAL_CUBIC_METER = 'congNm3',
|
||||
CONCENTRATION_MG_PER_DL = 'conmgdL',
|
||||
CONCENTRATION_MMOL_PER_L = 'conmmolL',
|
||||
|
||||
// Currency
|
||||
CURRENCY_USD = 'currencyUSD',
|
||||
CURRENCY_GBP = 'currencyGBP',
|
||||
CURRENCY_EUR = 'currencyEUR',
|
||||
CURRENCY_JPY = 'currencyJPY',
|
||||
CURRENCY_RUB = 'currencyRUB',
|
||||
CURRENCY_UAH = 'currencyUAH',
|
||||
CURRENCY_BRL = 'currencyBRL',
|
||||
CURRENCY_DKK = 'currencyDKK',
|
||||
CURRENCY_ISK = 'currencyISK',
|
||||
CURRENCY_NOK = 'currencyNOK',
|
||||
CURRENCY_SEK = 'currencySEK',
|
||||
CURRENCY_CZK = 'currencyCZK',
|
||||
CURRENCY_CHF = 'currencyCHF',
|
||||
CURRENCY_PLN = 'currencyPLN',
|
||||
CURRENCY_BTC = 'currencyBTC',
|
||||
CURRENCY_MBTC = 'currencymBTC',
|
||||
CURRENCY_UBTC = 'currencyμBTC',
|
||||
CURRENCY_ZAR = 'currencyZAR',
|
||||
CURRENCY_INR = 'currencyINR',
|
||||
CURRENCY_KRW = 'currencyKRW',
|
||||
CURRENCY_IDR = 'currencyIDR',
|
||||
CURRENCY_PHP = 'currencyPHP',
|
||||
CURRENCY_VND = 'currencyVND',
|
||||
|
||||
// Datetime
|
||||
DATETIME_ISO = 'dateTimeAsIso',
|
||||
DATETIME_ISO_NO_DATE_IF_TODAY = 'dateTimeAsIsoNoDateIfToday',
|
||||
DATETIME_US = 'dateTimeAsUS',
|
||||
DATETIME_US_NO_DATE_IF_TODAY = 'dateTimeAsUSNoDateIfToday',
|
||||
DATETIME_LOCAL = 'dateTimeAsLocal',
|
||||
DATETIME_LOCAL_NO_DATE_IF_TODAY = 'dateTimeAsLocalNoDateIfToday',
|
||||
DATETIME_SYSTEM = 'dateTimeAsSystem',
|
||||
DATETIME_FROM_NOW = 'dateTimeFromNow',
|
||||
|
||||
// Power/Electrical
|
||||
POWER_WATT = 'watt',
|
||||
POWER_KILOWATT = 'kwatt',
|
||||
POWER_MEGAWATT = 'megwatt',
|
||||
POWER_GIGAWATT = 'gwatt',
|
||||
POWER_MILLIWATT = 'mwatt',
|
||||
POWER_WATT_PER_SQUARE_METER = 'Wm2',
|
||||
POWER_VOLT_AMPERE = 'voltamp',
|
||||
POWER_KILOVOLT_AMPERE = 'kvoltamp',
|
||||
POWER_VOLT_AMPERE_REACTIVE = 'voltampreact',
|
||||
POWER_KILOVOLT_AMPERE_REACTIVE = 'kvoltampreact',
|
||||
POWER_WATT_HOUR = 'watth',
|
||||
POWER_WATT_HOUR_PER_KG = 'watthperkg',
|
||||
POWER_KILOWATT_HOUR = 'kwatth',
|
||||
POWER_KILOWATT_MINUTE = 'kwattm',
|
||||
POWER_AMPERE_HOUR = 'amph',
|
||||
POWER_KILOAMPERE_HOUR = 'kamph',
|
||||
POWER_MILLIAMPERE_HOUR = 'mamph',
|
||||
POWER_JOULE = 'joule',
|
||||
POWER_ELECTRON_VOLT = 'ev',
|
||||
POWER_AMPERE = 'amp',
|
||||
POWER_KILOAMPERE = 'kamp',
|
||||
POWER_MILLIAMPERE = 'mamp',
|
||||
POWER_VOLT = 'volt',
|
||||
POWER_KILOVOLT = 'kvolt',
|
||||
POWER_MILLIVOLT = 'mvolt',
|
||||
POWER_DECIBEL_MILLIWATT = 'dBm',
|
||||
POWER_OHM = 'ohm',
|
||||
POWER_KILOOHM = 'kohm',
|
||||
POWER_MEGAOHM = 'Mohm',
|
||||
POWER_FARAD = 'farad',
|
||||
POWER_MICROFARAD = 'µfarad',
|
||||
POWER_NANOFARAD = 'nfarad',
|
||||
POWER_PICOFARAD = 'pfarad',
|
||||
POWER_FEMTOFARAD = 'ffarad',
|
||||
POWER_HENRY = 'henry',
|
||||
POWER_MILLIHENRY = 'mhenry',
|
||||
POWER_MICROHENRY = 'µhenry',
|
||||
POWER_LUMENS = 'lumens',
|
||||
|
||||
// Flow
|
||||
FLOW_GALLONS_PER_MINUTE = 'flowgpm',
|
||||
FLOW_CUBIC_METERS_PER_SECOND = 'flowcms',
|
||||
FLOW_CUBIC_FEET_PER_SECOND = 'flowcfs',
|
||||
FLOW_CUBIC_FEET_PER_MINUTE = 'flowcfm',
|
||||
FLOW_LITERS_PER_HOUR = 'litreh',
|
||||
FLOW_LITERS_PER_MINUTE = 'flowlpm',
|
||||
FLOW_MILLILITERS_PER_MINUTE = 'flowmlpm',
|
||||
FLOW_LUX = 'lux',
|
||||
|
||||
// Force
|
||||
FORCE_NEWTON_METERS = 'forceNm',
|
||||
FORCE_KILONEWTON_METERS = 'forcekNm',
|
||||
FORCE_NEWTONS = 'forceN',
|
||||
FORCE_KILONEWTONS = 'forcekN',
|
||||
|
||||
// Mass
|
||||
MASS_MILLIGRAM = 'massmg',
|
||||
MASS_GRAM = 'massg',
|
||||
MASS_POUND = 'masslb',
|
||||
MASS_KILOGRAM = 'masskg',
|
||||
MASS_METRIC_TON = 'masst',
|
||||
|
||||
// Length
|
||||
LENGTH_MILLIMETER = 'lengthmm',
|
||||
LENGTH_INCH = 'lengthin',
|
||||
LENGTH_FOOT = 'lengthft',
|
||||
LENGTH_METER = 'lengthm',
|
||||
LENGTH_KILOMETER = 'lengthkm',
|
||||
LENGTH_MILE = 'lengthmi',
|
||||
|
||||
// Pressure
|
||||
PRESSURE_MILLIBAR = 'pressurembar',
|
||||
PRESSURE_BAR = 'pressurebar',
|
||||
PRESSURE_KILOBAR = 'pressurekbar',
|
||||
PRESSURE_PASCAL = 'pressurepa',
|
||||
PRESSURE_HECTOPASCAL = 'pressurehpa',
|
||||
PRESSURE_KILOPASCAL = 'pressurekpa',
|
||||
PRESSURE_INCHES_HG = 'pressurehg',
|
||||
PRESSURE_PSI = 'pressurepsi',
|
||||
|
||||
// Radiation
|
||||
RADIATION_BECQUEREL = 'radbq',
|
||||
RADIATION_CURIE = 'radci',
|
||||
RADIATION_GRAY = 'radgy',
|
||||
RADIATION_RAD = 'radrad',
|
||||
RADIATION_SIEVERT = 'radsv',
|
||||
RADIATION_MILLISIEVERT = 'radmsv',
|
||||
RADIATION_MICROSIEVERT = 'radusv',
|
||||
RADIATION_REM = 'radrem',
|
||||
RADIATION_EXPOSURE_C_PER_KG = 'radexpckg',
|
||||
RADIATION_ROENTGEN = 'radr',
|
||||
RADIATION_SIEVERT_PER_HOUR = 'radsvh',
|
||||
RADIATION_MILLISIEVERT_PER_HOUR = 'radmsvh',
|
||||
RADIATION_MICROSIEVERT_PER_HOUR = 'radusvh',
|
||||
|
||||
// Rotation speed
|
||||
ROTATION_SPEED_REVOLUTIONS_PER_MINUTE = 'rotrpm',
|
||||
ROTATION_SPEED_HERTZ = 'rothz',
|
||||
ROTATION_SPEED_RADIANS_PER_SECOND = 'rotrads',
|
||||
ROTATION_SPEED_DEGREES_PER_SECOND = 'rotdegs',
|
||||
|
||||
// Temperature
|
||||
TEMPERATURE_CELSIUS = 'celsius',
|
||||
TEMPERATURE_FAHRENHEIT = 'fahrenheit',
|
||||
TEMPERATURE_KELVIN = 'kelvin',
|
||||
|
||||
// Velocity
|
||||
VELOCITY_METERS_PER_SECOND = 'velocityms',
|
||||
VELOCITY_KILOMETERS_PER_HOUR = 'velocitykmh',
|
||||
VELOCITY_MILES_PER_HOUR = 'velocitymph',
|
||||
VELOCITY_KNOT = 'velocityknot',
|
||||
|
||||
// Volume
|
||||
VOLUME_MILLILITER = 'mlitre',
|
||||
VOLUME_LITER = 'litre',
|
||||
VOLUME_CUBIC_METER = 'm3',
|
||||
VOLUME_NORMAL_CUBIC_METER = 'Nm3',
|
||||
VOLUME_CUBIC_DECIMETER = 'dm3',
|
||||
VOLUME_GALLON = 'gallons',
|
||||
}
|
||||
|
||||
export enum YAxisUnit {
|
||||
@@ -558,15 +293,6 @@ export enum YAxisUnit {
|
||||
UCUM_PEBIBYTES = 'PiBy',
|
||||
OPEN_METRICS_PEBIBYTES = 'pebibytes',
|
||||
|
||||
UCUM_EXBIBYTES = 'EiBy',
|
||||
OPEN_METRICS_EXBIBYTES = 'exbibytes',
|
||||
|
||||
UCUM_ZEBIBYTES = 'ZiBy',
|
||||
OPEN_METRICS_ZEBIBYTES = 'zebibytes',
|
||||
|
||||
UCUM_YOBIBYTES = 'YiBy',
|
||||
OPEN_METRICS_YOBIBYTES = 'yobibytes',
|
||||
|
||||
UCUM_KIBIBYTES_SECOND = 'KiBy/s',
|
||||
OPEN_METRICS_KIBIBYTES_SECOND = 'kibibytes_per_second',
|
||||
|
||||
@@ -597,24 +323,6 @@ export enum YAxisUnit {
|
||||
UCUM_PEBIBITS_SECOND = 'Pibit/s',
|
||||
OPEN_METRICS_PEBIBITS_SECOND = 'pebibits_per_second',
|
||||
|
||||
UCUM_EXBIBYTES_SECOND = 'EiBy/s',
|
||||
OPEN_METRICS_EXBIBYTES_SECOND = 'exbibytes_per_second',
|
||||
|
||||
UCUM_EXBIBITS_SECOND = 'Eibit/s',
|
||||
OPEN_METRICS_EXBIBITS_SECOND = 'exbibits_per_second',
|
||||
|
||||
UCUM_ZEBIBYTES_SECOND = 'ZiBy/s',
|
||||
OPEN_METRICS_ZEBIBYTES_SECOND = 'zebibytes_per_second',
|
||||
|
||||
UCUM_ZEBIBITS_SECOND = 'Zibit/s',
|
||||
OPEN_METRICS_ZEBIBITS_SECOND = 'zebibits_per_second',
|
||||
|
||||
UCUM_YOBIBYTES_SECOND = 'YiBy/s',
|
||||
OPEN_METRICS_YOBIBYTES_SECOND = 'yobibytes_per_second',
|
||||
|
||||
UCUM_YOBIBITS_SECOND = 'Yibit/s',
|
||||
OPEN_METRICS_YOBIBITS_SECOND = 'yobibits_per_second',
|
||||
|
||||
UCUM_TRUE_FALSE = '{bool}',
|
||||
OPEN_METRICS_TRUE_FALSE = 'boolean_true_false',
|
||||
|
||||
@@ -656,27 +364,3 @@ export enum YAxisUnit {
|
||||
|
||||
OPEN_METRICS_PERCENT_UNIT = 'percentunit',
|
||||
}
|
||||
|
||||
export interface ScaledValue {
|
||||
value: number;
|
||||
label: string;
|
||||
}
|
||||
|
||||
export interface UnitFamilyConfig {
|
||||
units: UniversalYAxisUnit[];
|
||||
scaleFactor: number;
|
||||
}
|
||||
|
||||
export interface YAxisCategory {
|
||||
name: string;
|
||||
units: {
|
||||
name: string;
|
||||
id: UniversalYAxisUnit;
|
||||
}[];
|
||||
}
|
||||
|
||||
export enum YAxisSource {
|
||||
ALERTS = 'alerts',
|
||||
DASHBOARDS = 'dashboards',
|
||||
EXPLORER = 'explorer',
|
||||
}
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_UNIT_NAMES } from './constants';
|
||||
import { ADDITIONAL_Y_AXIS_CATEGORIES, BASE_Y_AXIS_CATEGORIES } from './data';
|
||||
import {
|
||||
UniversalYAxisUnit,
|
||||
YAxisCategory,
|
||||
YAxisSource,
|
||||
YAxisUnit,
|
||||
} from './types';
|
||||
import { UniversalYAxisUnit, YAxisUnit } from './types';
|
||||
|
||||
export const mapMetricUnitToUniversalUnit = (
|
||||
unit: string | undefined,
|
||||
@@ -15,7 +9,7 @@ export const mapMetricUnitToUniversalUnit = (
|
||||
}
|
||||
|
||||
const universalUnit = Object.values(UniversalYAxisUnit).find(
|
||||
(u) => UniversalYAxisUnitMappings[u]?.has(unit as YAxisUnit) || unit === u,
|
||||
(u) => UniversalYAxisUnitMappings[u].has(unit as YAxisUnit) || unit === u,
|
||||
);
|
||||
|
||||
return universalUnit || (unit as UniversalYAxisUnit) || null;
|
||||
@@ -37,44 +31,3 @@ export const getUniversalNameFromMetricUnit = (
|
||||
|
||||
return universalName || unit || '-';
|
||||
};
|
||||
|
||||
export function isUniversalUnit(format: string): boolean {
|
||||
return Object.values(UniversalYAxisUnit).includes(
|
||||
format as UniversalYAxisUnit,
|
||||
);
|
||||
}
|
||||
|
||||
export function mergeCategories(
|
||||
categories1: YAxisCategory[],
|
||||
categories2: YAxisCategory[],
|
||||
): YAxisCategory[] {
|
||||
const mapOfCategories = new Map<string, YAxisCategory>();
|
||||
|
||||
categories1.forEach((category) => {
|
||||
mapOfCategories.set(category.name, category);
|
||||
});
|
||||
|
||||
categories2.forEach((category) => {
|
||||
if (mapOfCategories.has(category.name)) {
|
||||
mapOfCategories.set(category.name, {
|
||||
name: category.name,
|
||||
units: [
|
||||
...(mapOfCategories.get(category.name)?.units ?? []),
|
||||
...category.units,
|
||||
],
|
||||
});
|
||||
} else {
|
||||
mapOfCategories.set(category.name, category);
|
||||
}
|
||||
});
|
||||
|
||||
return Array.from(mapOfCategories.values());
|
||||
}
|
||||
|
||||
export function getYAxisCategories(source: YAxisSource): YAxisCategory[] {
|
||||
if (source !== YAxisSource.DASHBOARDS) {
|
||||
return BASE_Y_AXIS_CATEGORIES;
|
||||
}
|
||||
|
||||
return mergeCategories(BASE_Y_AXIS_CATEGORIES, ADDITIONAL_Y_AXIS_CATEGORIES);
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ const themeColors = {
|
||||
cyan: '#00FFFF',
|
||||
},
|
||||
chartcolors: {
|
||||
radicalRed: '#FF1A66',
|
||||
robin: '#3F5ECC',
|
||||
dodgerBlue: '#2F80ED',
|
||||
mediumOrchid: '#BB6BD9',
|
||||
seaBuckthorn: '#F2994A',
|
||||
@@ -58,7 +58,7 @@ const themeColors = {
|
||||
oliveDrab: '#66991A',
|
||||
lavenderRose: '#FF99E6',
|
||||
electricLime: '#CCFF1A',
|
||||
robin: '#3F5ECC',
|
||||
radicalRed: '#FF1A66',
|
||||
harleyOrange: '#E6331A',
|
||||
turquoise: '#33FFCC',
|
||||
gladeGreen: '#66994D',
|
||||
@@ -80,7 +80,7 @@ const themeColors = {
|
||||
maroon: '#800000',
|
||||
navy: '#000080',
|
||||
aquamarine: '#7FFFD4',
|
||||
darkSeaGreen: '#8FBC8F',
|
||||
gold: '#FFD700',
|
||||
gray: '#808080',
|
||||
skyBlue: '#87CEEB',
|
||||
indigo: '#4B0082',
|
||||
@@ -105,7 +105,7 @@ const themeColors = {
|
||||
lawnGreen: '#7CFC00',
|
||||
mediumSeaGreen: '#3CB371',
|
||||
lightCoral: '#F08080',
|
||||
gold: '#FFD700',
|
||||
darkSeaGreen: '#8FBC8F',
|
||||
sandyBrown: '#F4A460',
|
||||
darkKhaki: '#BDB76B',
|
||||
cornflowerBlue: '#6495ED',
|
||||
@@ -113,7 +113,7 @@ const themeColors = {
|
||||
paleGreen: '#98FB98',
|
||||
},
|
||||
lightModeColor: {
|
||||
radicalRed: '#FF1A66',
|
||||
robin: '#3F5ECC',
|
||||
dodgerBlueDark: '#0C6EED',
|
||||
steelgrey: '#2f4b7c',
|
||||
steelpurple: '#665191',
|
||||
@@ -143,7 +143,7 @@ const themeColors = {
|
||||
oliveDrab: '#66991A',
|
||||
lavenderRoseDark: '#F024BD',
|
||||
electricLimeDark: '#84A800',
|
||||
robin: '#3F5ECC',
|
||||
radicalRed: '#FF1A66',
|
||||
harleyOrange: '#E6331A',
|
||||
gladeGreen: '#66994D',
|
||||
hemlock: '#66664D',
|
||||
@@ -181,7 +181,7 @@ const themeColors = {
|
||||
darkOrchid: '#9932CC',
|
||||
mediumSeaGreenDark: '#109E50',
|
||||
lightCoralDark: '#F85959',
|
||||
gold: '#FFD700',
|
||||
darkSeaGreenDark: '#509F50',
|
||||
sandyBrownDark: '#D97117',
|
||||
darkKhakiDark: '#99900A',
|
||||
cornflowerBlueDark: '#3371E6',
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Select } from 'antd';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
getAllEndpointsWidgetData,
|
||||
@@ -265,7 +264,6 @@ function AllEndPoints({
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customOnRowClick={onRowClick}
|
||||
version={ENTITY_VERSION_V5}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -244,10 +244,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add border-bottom to table cells when pagination is not present
|
||||
.ant-spin-container:not(:has(.ant-pagination)) .ant-table-cell {
|
||||
border-bottom: 1px solid var(--bg-slate-500) !important;
|
||||
}
|
||||
|
||||
.endpoints-table-container {
|
||||
display: flex;
|
||||
@@ -426,28 +422,30 @@
|
||||
gap: 8px;
|
||||
.endpoint-meta-data-pill {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
overflow: hidden;
|
||||
box-sizing: content-box;
|
||||
width: fit-content;
|
||||
.endpoint-meta-data-label {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-right: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-vanilla-100);
|
||||
font-size: 14px;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 6px 8px;
|
||||
background: var(--bg-slate-500);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
color: var(--text-vanilla-400);
|
||||
background: var(--bg-slate-400);
|
||||
font-size: 14px;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -455,23 +453,9 @@
|
||||
.endpoint-details-filters-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
height: 36px;
|
||||
box-sizing: content-box;
|
||||
.ant-select-selector {
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container-dropdown {
|
||||
width: 120px;
|
||||
border-right: 1px solid var(--bg-slate-500);
|
||||
height: 36px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
.ant-select-single {
|
||||
height: 32px;
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container-search {
|
||||
@@ -1012,6 +996,7 @@
|
||||
|
||||
.lightMode {
|
||||
.ant-drawer-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
@@ -1022,25 +1007,6 @@
|
||||
}
|
||||
|
||||
.domain-detail-drawer {
|
||||
.endpoint-details-card,
|
||||
.status-code-table-container,
|
||||
.endpoint-details-filters-container,
|
||||
.endpoint-details-filters-container-dropdown,
|
||||
.ant-radio-button-wrapper,
|
||||
.views-tabs-container,
|
||||
.ant-btn-default.tab,
|
||||
.tab::before,
|
||||
.endpoint-meta-data-pill,
|
||||
.endpoint-meta-data-label,
|
||||
.endpoints-table-container,
|
||||
.group-by-label,
|
||||
.ant-select-selector,
|
||||
.ant-drawer-header {
|
||||
border-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
.views-tabs .tab::before {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
.title {
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
@@ -1065,6 +1031,7 @@
|
||||
|
||||
.selected_view {
|
||||
background: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
|
||||
@@ -1193,11 +1160,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
.top-services-content {
|
||||
border-color: var(--bg-vanilla-300);
|
||||
}
|
||||
.dependent-services-container {
|
||||
border: none;
|
||||
padding: 10px 12px;
|
||||
.top-services-item {
|
||||
display: flex;
|
||||
@@ -1224,31 +1187,11 @@
|
||||
}
|
||||
|
||||
.top-services-item-progress-bar {
|
||||
background-color: var(--bg-vanilla-200);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background-color: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
&,
|
||||
&:has(.top-services-item-latency) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: var(--bg-vanilla-200);
|
||||
}
|
||||
.table-row-dark {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
|
||||
.top-services-item-percentage {
|
||||
color: var(--text-ink-300);
|
||||
@@ -1282,8 +1225,4 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add border-bottom to table cells when pagination is not present
|
||||
.ant-spin-container:not(:has(.ant-pagination)) .ant-table-cell {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useApiMonitoringParams } from 'container/ApiMonitoring/queryParams';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
@@ -179,33 +178,18 @@ function EndPointDetails({
|
||||
[domainName, filters, minTime, maxTime],
|
||||
);
|
||||
|
||||
const V5_QUERIES = [
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_METRICS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DEPENDENT_SERVICES_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
||||
] as const;
|
||||
|
||||
const endPointDetailsDataQueries = useQueries(
|
||||
endPointDetailsQueryPayload.map((payload, index) => {
|
||||
const queryKey = END_POINT_DETAILS_QUERY_KEYS_ARRAY[index];
|
||||
const version = (V5_QUERIES as readonly string[]).includes(queryKey)
|
||||
? ENTITY_VERSION_V5
|
||||
: ENTITY_VERSION_V4;
|
||||
return {
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
...(filters?.items?.length ? filters.items : []), // Include filters.items in queryKey for better caching
|
||||
version,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, version),
|
||||
enabled: !!payload,
|
||||
};
|
||||
}),
|
||||
endPointDetailsQueryPayload.map((payload, index) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
filters?.items, // Include filters.items in queryKey for better caching
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
);
|
||||
|
||||
const [
|
||||
|
||||
@@ -4,7 +4,7 @@ import { getQueryRangeV5 } from 'api/v5/queryRange/getQueryRange';
|
||||
import { MetricRangePayloadV5, ScalarData } from 'api/v5/v5';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { withErrorBoundary } from 'components/ErrorBoundaryHOC';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
@@ -56,10 +56,6 @@ function TopErrors({
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
||||
...(initialFilters?.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
) || []),
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
@@ -70,6 +66,7 @@ function TopErrors({
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...(initialFilters?.items || []),
|
||||
]
|
||||
: [...(initialFilters?.items || [])],
|
||||
op: 'AND',
|
||||
@@ -131,12 +128,12 @@ function TopErrors({
|
||||
const endPointDropDownDataQueries = useQueries(
|
||||
endPointDropDownQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[2],
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[4],
|
||||
payload,
|
||||
ENTITY_VERSION_V5,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000,
|
||||
})),
|
||||
|
||||
@@ -1,337 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { TraceAggregation } from 'api/v5/v5';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import DomainMetrics from './DomainMetrics';
|
||||
|
||||
// Mock the API call
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
GetMetricQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock ErrorState component
|
||||
jest.mock('./ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(({ refetch }) => (
|
||||
<div data-testid="error-state">
|
||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const mockProps = {
|
||||
domainName: '0.0.0.0',
|
||||
timeRange: {
|
||||
startTime: 1758259531000,
|
||||
endTime: 1758261331000,
|
||||
},
|
||||
domainListFilters: {
|
||||
items: [],
|
||||
op: 'AND' as const,
|
||||
} as IBuilderQuery['filters'],
|
||||
};
|
||||
|
||||
const mockSuccessResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: '150',
|
||||
B: '125000000',
|
||||
D: '2021-01-01T23:00:00Z',
|
||||
F1: '5.5',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
const renderComponent = (props = mockProps): ReturnType<typeof render> =>
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<DomainMetrics {...props} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
describe('1. V5 Query Payload with Filters', () => {
|
||||
it('sends correct V5 payload structure with domain name filters', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(GetMetricQueryRange).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
const [payload, version] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
||||
|
||||
// Verify it's using V5
|
||||
expect(version).toBe(ENTITY_VERSION_V5);
|
||||
|
||||
// Verify time range
|
||||
expect(payload.start).toBe(1758259531000);
|
||||
expect(payload.end).toBe(1758261331000);
|
||||
|
||||
// Verify V3 payload structure (getDomainMetricsQueryPayload returns V3 format)
|
||||
expect(payload.query).toBeDefined();
|
||||
expect(payload.query.builder).toBeDefined();
|
||||
expect(payload.query.builder.queryData).toBeDefined();
|
||||
|
||||
const queryData = payload.query.builder.queryData;
|
||||
|
||||
// Verify Query A - count with URL filter
|
||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
||||
expect(queryA).toBeDefined();
|
||||
expect(queryA.dataSource).toBe('traces');
|
||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain(
|
||||
'url.full EXISTS OR http.url EXISTS',
|
||||
);
|
||||
|
||||
// Verify Query B - p99 latency
|
||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
||||
expect(queryB).toBeDefined();
|
||||
expect(queryB.aggregateOperator).toBe('p99');
|
||||
expect(queryB.aggregations?.[0]).toBeDefined();
|
||||
expect((queryB.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
);
|
||||
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
|
||||
// Verify Query D - max timestamp
|
||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
||||
expect(queryD).toBeDefined();
|
||||
expect(queryD.aggregateOperator).toBe('max');
|
||||
expect(queryD.aggregations?.[0]).toBeDefined();
|
||||
expect((queryD.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'max(timestamp)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = payload.query.builder.queryFormulas;
|
||||
expect(formulas).toBeDefined();
|
||||
expect(formulas.length).toBeGreaterThan(0);
|
||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
||||
expect(formulaF1).toBeDefined();
|
||||
expect(formulaF1.expression).toBe('(C/A)*100');
|
||||
});
|
||||
|
||||
it('includes custom filters in filter expressions', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'my-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
renderComponent({
|
||||
...mockProps,
|
||||
domainListFilters: customFilters,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(GetMetricQueryRange).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
const [payload] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
||||
const queryData = payload.query.builder.queryData;
|
||||
|
||||
// Verify all queries include the custom filters
|
||||
queryData.forEach((query: any) => {
|
||||
if (query.filter && query.filter.expression) {
|
||||
expect(query.filter.expression).toContain('service.name');
|
||||
expect(query.filter.expression).toContain('my-service');
|
||||
expect(query.filter.expression).toContain('deployment.environment');
|
||||
expect(query.filter.expression).toContain('production');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Data Display State', () => {
|
||||
it('displays metrics when data is successfully loaded', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
// Wait for skeletons to disappear
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// Verify all metric labels are displayed
|
||||
expect(screen.getByText('EXTERNAL API')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
|
||||
// Verify metric values are displayed
|
||||
expect(screen.getByText('150')).toBeInTheDocument();
|
||||
expect(screen.getByText('0.125s')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Empty/Missing Data State', () => {
|
||||
it('displays "-" for missing data values', async () => {
|
||||
const emptyResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(emptyResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// When no data, all values should show "-"
|
||||
const dashValues = screen.getAllByText('-');
|
||||
expect(dashValues.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. Error State', () => {
|
||||
it('displays error state when API call fails', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockRejectedValue(new Error('API Error'));
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('retries API call when retry button is clicked', async () => {
|
||||
let callCount = 0;
|
||||
(GetMetricQueryRange as jest.Mock).mockImplementation(() => {
|
||||
callCount += 1;
|
||||
if (callCount === 1) {
|
||||
return Promise.reject(new Error('API Error'));
|
||||
}
|
||||
return Promise.resolve(mockSuccessResponse);
|
||||
});
|
||||
|
||||
renderComponent();
|
||||
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click retry
|
||||
const retryButton = screen.getByTestId('retry-button');
|
||||
retryButton.click();
|
||||
|
||||
// Wait for successful load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('150')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(callCount).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
DomainMetricsResponseRow,
|
||||
@@ -44,10 +44,10 @@ function DomainMetrics({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_DOMAIN_METRICS_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V5,
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
@@ -132,9 +132,7 @@ function DomainMetrics({
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{formattedDomainMetricsData.latency !== '-'
|
||||
? `${(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s`
|
||||
: '-'}
|
||||
{(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
@@ -145,27 +143,23 @@ function DomainMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.errorRate}>
|
||||
{formattedDomainMetricsData.errorRate !== '-' ? (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
) : (
|
||||
'-'
|
||||
)}
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
@@ -1,419 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { QueryClient, QueryClientProvider, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointMetrics from './EndPointMetrics';
|
||||
|
||||
// Mock the API call
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
GetMetricQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock ErrorState component
|
||||
jest.mock('./ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(({ refetch }) => (
|
||||
<div data-testid="error-state">
|
||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const mockSuccessResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: '85.5',
|
||||
B: '245000000',
|
||||
D: '2021-01-01T22:30:00Z',
|
||||
F1: '3.2',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
// Helper to create mock query result
|
||||
const createMockQueryResult = (
|
||||
response: any,
|
||||
overrides?: Partial<UseQueryResult<SuccessResponse<any>, unknown>>,
|
||||
): UseQueryResult<SuccessResponse<any>, unknown> =>
|
||||
({
|
||||
data: response,
|
||||
error: null,
|
||||
isError: false,
|
||||
isIdle: false,
|
||||
isLoading: false,
|
||||
isLoadingError: false,
|
||||
isRefetchError: false,
|
||||
isRefetching: false,
|
||||
isStale: true,
|
||||
isSuccess: true,
|
||||
status: 'success' as const,
|
||||
dataUpdatedAt: Date.now(),
|
||||
errorUpdateCount: 0,
|
||||
errorUpdatedAt: 0,
|
||||
failureCount: 0,
|
||||
isFetched: true,
|
||||
isFetchedAfterMount: true,
|
||||
isFetching: false,
|
||||
isPlaceholderData: false,
|
||||
isPreviousData: false,
|
||||
refetch: jest.fn(),
|
||||
remove: jest.fn(),
|
||||
...overrides,
|
||||
} as UseQueryResult<SuccessResponse<any>, unknown>);
|
||||
|
||||
const renderComponent = (
|
||||
endPointMetricsDataQuery: UseQueryResult<SuccessResponse<any>, unknown>,
|
||||
): ReturnType<typeof render> =>
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
describe('1. V5 Query Payload with Filters', () => {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
it('sends correct V5 payload structure with domain and endpoint filters', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const domainName = 'api.example.com';
|
||||
const startTime = 1758259531000;
|
||||
const endTime = 1758261331000;
|
||||
const filters = {
|
||||
items: [],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
// Get the actual payload that would be generated
|
||||
const payloads = getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
startTime,
|
||||
endTime,
|
||||
filters,
|
||||
);
|
||||
|
||||
// First payload is for endpoint metrics
|
||||
const metricsPayload = payloads[0];
|
||||
|
||||
// Verify it's using the correct structure (V3 format for V5 API)
|
||||
expect(metricsPayload.query).toBeDefined();
|
||||
expect(metricsPayload.query.builder).toBeDefined();
|
||||
expect(metricsPayload.query.builder.queryData).toBeDefined();
|
||||
|
||||
const queryData = metricsPayload.query.builder.queryData;
|
||||
|
||||
// Verify Query A - rate with domain and client kind filters
|
||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
||||
expect(queryA).toBeDefined();
|
||||
if (queryA) {
|
||||
expect(queryA.dataSource).toBe('traces');
|
||||
expect(queryA.aggregateOperator).toBe('rate');
|
||||
expect(queryA.timeAggregation).toBe('rate');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryA.filter) {
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query B - p99 latency with duration_nano
|
||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
||||
expect(queryB).toBeDefined();
|
||||
if (queryB) {
|
||||
expect(queryB.aggregateOperator).toBe('p99');
|
||||
if (queryB.aggregateAttribute) {
|
||||
expect(queryB.aggregateAttribute.key).toBe('duration_nano');
|
||||
}
|
||||
expect(queryB.timeAggregation).toBe('p99');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryB.filter) {
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
if (queryC) {
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.aggregateOperator).toBe('count');
|
||||
if (queryC.filter) {
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query D - max timestamp for last used
|
||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
||||
expect(queryD).toBeDefined();
|
||||
if (queryD) {
|
||||
expect(queryD.aggregateOperator).toBe('max');
|
||||
if (queryD.aggregateAttribute) {
|
||||
expect(queryD.aggregateAttribute.key).toBe('timestamp');
|
||||
}
|
||||
expect(queryD.timeAggregation).toBe('max');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryD.filter) {
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query E - total count (disabled)
|
||||
const queryE = queryData.find((q: any) => q.queryName === 'E');
|
||||
expect(queryE).toBeDefined();
|
||||
if (queryE) {
|
||||
expect(queryE.disabled).toBe(true);
|
||||
expect(queryE.aggregateOperator).toBe('count');
|
||||
if (queryE.aggregateAttribute) {
|
||||
expect(queryE.aggregateAttribute.key).toBe('span_id');
|
||||
}
|
||||
if (queryE.filter) {
|
||||
expect(queryE.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = metricsPayload.query.builder.queryFormulas;
|
||||
expect(formulas).toBeDefined();
|
||||
expect(formulas.length).toBeGreaterThan(0);
|
||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
||||
expect(formulaF1).toBeDefined();
|
||||
if (formulaF1) {
|
||||
expect(formulaF1.expression).toBe('(C/E)*100');
|
||||
expect(formulaF1.disabled).toBe(false);
|
||||
expect(formulaF1.legend).toBe('error percentage');
|
||||
}
|
||||
});
|
||||
|
||||
it('includes custom domainListFilters in all query expressions', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const customFilters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'payment-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'staging',
|
||||
},
|
||||
],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
const payloads = getEndPointDetailsQueryPayload(
|
||||
'api.internal.com',
|
||||
1758259531000,
|
||||
1758261331000,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const queryData = payloads[0].query.builder.queryData;
|
||||
|
||||
// Verify ALL queries (A, B, C, D, E) include the custom filters
|
||||
const allQueryNames = ['A', 'B', 'C', 'D', 'E'];
|
||||
allQueryNames.forEach((queryName) => {
|
||||
const query = queryData.find((q: any) => q.queryName === queryName);
|
||||
expect(query).toBeDefined();
|
||||
if (query && query.filter && query.filter.expression) {
|
||||
// Check for exact filter inclusion
|
||||
expect(query.filter.expression).toContain('service.name');
|
||||
expect(query.filter.expression).toContain('payment-service');
|
||||
expect(query.filter.expression).toContain('deployment.environment');
|
||||
expect(query.filter.expression).toContain('staging');
|
||||
// Also verify domain filter is still present
|
||||
expect(query.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
||||
);
|
||||
// Verify client kind filter is present
|
||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Data Display State', () => {
|
||||
it('displays metrics when data is successfully loaded', async () => {
|
||||
const mockQuery = createMockQueryResult(mockSuccessResponse);
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
// Wait for skeletons to disappear
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// Verify all metric labels are displayed
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
|
||||
// Verify metric values are displayed
|
||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
||||
expect(screen.getByText('245ms')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Empty/Missing Data State', () => {
|
||||
it("displays '-' for missing data values", async () => {
|
||||
const emptyResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockQuery = createMockQueryResult(emptyResponse);
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// When no data, all values should show "-"
|
||||
const dashValues = screen.getAllByText('-');
|
||||
// Should have at least 2 dashes (rate and last used - latency shows "-", error % shows progress bar)
|
||||
expect(dashValues.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. Error State', () => {
|
||||
it('displays error state when API call fails', async () => {
|
||||
const mockQuery = createMockQueryResult(null, {
|
||||
isError: true,
|
||||
isSuccess: false,
|
||||
status: 'error',
|
||||
error: new Error('API Error'),
|
||||
});
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('retries API call when retry button is clicked', async () => {
|
||||
const refetch = jest.fn().mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
// Start with error state
|
||||
const mockQuery = createMockQueryResult(null, {
|
||||
isError: true,
|
||||
isSuccess: false,
|
||||
status: 'error',
|
||||
error: new Error('API Error'),
|
||||
refetch,
|
||||
});
|
||||
|
||||
const { rerender } = renderComponent(mockQuery);
|
||||
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click retry
|
||||
const retryButton = screen.getByTestId('retry-button');
|
||||
retryButton.click();
|
||||
|
||||
// Verify refetch was called
|
||||
expect(refetch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Simulate successful refetch by rerendering with success state
|
||||
const successQuery = createMockQueryResult(mockSuccessResponse);
|
||||
rerender(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<EndPointMetrics endPointMetricsDataQuery={successQuery} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
// Wait for successful load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,16 +1,12 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import {
|
||||
getDisplayValue,
|
||||
getFormattedEndPointMetricsData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function EndPointMetrics({
|
||||
endPointMetricsDataQuery,
|
||||
}: {
|
||||
@@ -74,9 +70,7 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.rate}>
|
||||
<span className="round-metric-tag">
|
||||
{metricsData?.rate !== '-' ? `${metricsData?.rate} ops/sec` : '-'}
|
||||
</span>
|
||||
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -85,7 +79,7 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.latency}>
|
||||
{metricsData?.latency !== '-' ? `${metricsData?.latency}ms` : '-'}
|
||||
<span className="round-metric-tag">{metricsData?.latency}ms</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -94,25 +88,21 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.errorRate}>
|
||||
{metricsData?.errorRate !== '-' ? (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
) : (
|
||||
'-'
|
||||
)}
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -120,9 +110,7 @@ function EndPointMetrics({
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.lastUsed}>
|
||||
{getDisplayValue(metricsData?.lastUsed)}
|
||||
</Tooltip>
|
||||
<Tooltip title={metricsData?.lastUsed}>{metricsData?.lastUsed}</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Card } from 'antd';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
@@ -23,7 +22,6 @@ function MetricOverTimeGraph({
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customTimeRangeWindowForCoRelation="5m"
|
||||
version={ENTITY_VERSION_V5}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -8,11 +8,17 @@ import {
|
||||
endPointStatusCodeColumns,
|
||||
extractPortAndEndpoint,
|
||||
formatDataForTable,
|
||||
getAllEndpointsWidgetData,
|
||||
getCustomFiltersForBarChart,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getFormattedDependentServicesData,
|
||||
getFormattedEndPointDropDownData,
|
||||
getFormattedEndPointMetricsData,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getFormattedEndPointStatusCodeData,
|
||||
getGroupByFiltersFromGroupByValues,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
@@ -43,13 +49,119 @@ jest.mock('../utils', () => {
|
||||
});
|
||||
|
||||
describe('API Monitoring Utils', () => {
|
||||
describe('getAllEndpointsWidgetData', () => {
|
||||
it('should create a widget with correct configuration', () => {
|
||||
// Arrange
|
||||
const groupBy = [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
key: 'http.method',
|
||||
type: '',
|
||||
},
|
||||
];
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
const domainName = 'test-domain';
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: 'test-key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getAllEndpointsWidgetData(
|
||||
groupBy as BaseAutocompleteData[],
|
||||
domainName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBeDefined();
|
||||
// Title is a React component, not a string
|
||||
expect(result.title).toBeDefined();
|
||||
expect(result.panelTypes).toBe(PANEL_TYPES.TABLE);
|
||||
|
||||
// Check that each query includes the domainName filter
|
||||
result.query.builder.queryData.forEach((query) => {
|
||||
const serverNameFilter = query.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(serverNameFilter).toBeDefined();
|
||||
expect(serverNameFilter?.value).toBe(domainName);
|
||||
|
||||
// Check that the custom filters were included
|
||||
const testFilter = query.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(testFilter).toBeDefined();
|
||||
});
|
||||
|
||||
// Verify groupBy was included in queries
|
||||
if (result.query.builder.queryData[0].groupBy) {
|
||||
const hasCustomGroupBy = result.query.builder.queryData[0].groupBy.some(
|
||||
(item) => item && item.key === 'http.method',
|
||||
);
|
||||
expect(hasCustomGroupBy).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle empty groupBy correctly', () => {
|
||||
// Arrange
|
||||
const groupBy: any[] = [];
|
||||
const domainName = 'test-domain';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getAllEndpointsWidgetData(groupBy, domainName, filters);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
// Should only include default groupBy
|
||||
if (result.query.builder.queryData[0].groupBy) {
|
||||
expect(result.query.builder.queryData[0].groupBy.length).toBeGreaterThan(0);
|
||||
// Check that it doesn't have extra group by fields (only defaults)
|
||||
const defaultGroupByLength =
|
||||
result.query.builder.queryData[0].groupBy.length;
|
||||
const resultWithCustomGroupBy = getAllEndpointsWidgetData(
|
||||
[
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'custom.field',
|
||||
type: '',
|
||||
},
|
||||
] as BaseAutocompleteData[],
|
||||
domainName,
|
||||
filters,
|
||||
);
|
||||
// Custom groupBy should have more fields than default
|
||||
if (resultWithCustomGroupBy.query.builder.queryData[0].groupBy) {
|
||||
expect(
|
||||
resultWithCustomGroupBy.query.builder.queryData[0].groupBy.length,
|
||||
).toBeGreaterThan(defaultGroupByLength);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// New tests for formatDataForTable
|
||||
describe('formatDataForTable', () => {
|
||||
it('should format rows correctly with valid data', () => {
|
||||
const columns = APIMonitoringColumnsMock;
|
||||
const data = [
|
||||
[
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
'test-domain', // domainName
|
||||
'10', // endpoints
|
||||
'25', // rps
|
||||
@@ -107,7 +219,6 @@ describe('API Monitoring Utils', () => {
|
||||
const groupBy = [
|
||||
{
|
||||
id: 'group-by-1',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
key: 'http.method',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
@@ -341,6 +452,243 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEndPointDetailsQueryPayload', () => {
|
||||
it('should generate proper query payload with all parameters', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const startTime = 1609459200000; // 2021-01-01
|
||||
const endTime = 1609545600000; // 2021-01-02
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: 'string',
|
||||
key: 'test.key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
startTime,
|
||||
endTime,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toHaveLength(6); // Should return 6 queries
|
||||
|
||||
// Check that each query includes proper parameters
|
||||
result.forEach((query) => {
|
||||
expect(query).toHaveProperty('start', startTime);
|
||||
expect(query).toHaveProperty('end', endTime);
|
||||
|
||||
// Should have query property with builder data
|
||||
expect(query).toHaveProperty('query');
|
||||
expect(query.query).toHaveProperty('builder');
|
||||
|
||||
// All queries should include the domain filter
|
||||
const {
|
||||
query: {
|
||||
builder: { queryData },
|
||||
},
|
||||
} = query;
|
||||
queryData.forEach((qd) => {
|
||||
if (qd.filters && qd.filters.items) {
|
||||
const serverNameFilter = qd.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(serverNameFilter).toBeDefined();
|
||||
// Only check if the serverNameFilter exists, as the actual value might vary
|
||||
// depending on implementation details or domain defaults
|
||||
if (serverNameFilter) {
|
||||
expect(typeof serverNameFilter.value).toBe('string');
|
||||
}
|
||||
}
|
||||
|
||||
// Should include our custom filter
|
||||
const customFilter = qd.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(customFilter).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRateOverTimeWidgetData', () => {
|
||||
it('should generate widget configuration for rate over time', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getRateOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('title', 'Rate Over Time');
|
||||
// Check only title since description might vary
|
||||
|
||||
// Check query configuration
|
||||
expect(result).toHaveProperty('query');
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(result).toHaveProperty('query.builder.queryData');
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Should have domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
if (domainFilter) {
|
||||
expect(typeof domainFilter.value).toBe('string');
|
||||
}
|
||||
|
||||
// Should have 'rate' time aggregation
|
||||
expect(queryData).toHaveProperty('timeAggregation', 'rate');
|
||||
|
||||
// Should have proper legend that includes endpoint info
|
||||
expect(queryData).toHaveProperty('legend');
|
||||
expect(
|
||||
typeof queryData.legend === 'string' ? queryData.legend : '',
|
||||
).toContain('/api/test');
|
||||
});
|
||||
|
||||
it('should handle case without endpoint name', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getRateOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Legend should be domain name only
|
||||
expect(queryData).toHaveProperty('legend', domainName);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLatencyOverTimeWidgetData', () => {
|
||||
it('should generate widget configuration for latency over time', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('title', 'Latency Over Time');
|
||||
// Check only title since description might vary
|
||||
|
||||
// Check query configuration
|
||||
expect(result).toHaveProperty('query');
|
||||
expect(result).toHaveProperty('query.builder.queryData');
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Should have domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
if (domainFilter) {
|
||||
expect(typeof domainFilter.value).toBe('string');
|
||||
}
|
||||
|
||||
// Should use duration_nano as the aggregate attribute
|
||||
expect(queryData.aggregateAttribute).toHaveProperty('key', 'duration_nano');
|
||||
|
||||
// Should have 'p99' time aggregation
|
||||
expect(queryData).toHaveProperty('timeAggregation', 'p99');
|
||||
});
|
||||
|
||||
it('should handle case without endpoint name', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Legend should be domain name only
|
||||
expect(queryData).toHaveProperty('legend', domainName);
|
||||
});
|
||||
|
||||
// Changed approach to verify end-to-end behavior for URL with port
|
||||
it('should format legends appropriately for complete URLs with ports', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = 'http://example.com:8080/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Extract what we expect the function to extract
|
||||
const expectedParts = extractPortAndEndpoint(endPointName);
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Check that legend is present and is a string
|
||||
expect(queryData).toHaveProperty('legend');
|
||||
expect(typeof queryData.legend).toBe('string');
|
||||
|
||||
// If the URL has a port and endpoint, the legend should reflect that appropriately
|
||||
// (Testing the integration rather than the exact formatting)
|
||||
if (expectedParts.port !== '-') {
|
||||
// Verify that both components are incorporated into the legend in some way
|
||||
// This tests the behavior without relying on the exact implementation details
|
||||
const legendStr = queryData.legend as string;
|
||||
expect(legendStr).not.toBe(domainName); // Legend should be different when URL has port/endpoint
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointDropDownData', () => {
|
||||
it('should format endpoint dropdown data correctly', () => {
|
||||
// Arrange
|
||||
@@ -350,7 +698,6 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/users',
|
||||
'url.full': 'http://example.com/api/users',
|
||||
A: 150, // count or other metric
|
||||
},
|
||||
},
|
||||
@@ -358,7 +705,6 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/orders',
|
||||
'url.full': 'http://example.com/api/orders',
|
||||
A: 75,
|
||||
},
|
||||
},
|
||||
@@ -442,6 +788,87 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointMetricsData', () => {
|
||||
it('should format endpoint metrics data correctly', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
A: '50', // rate
|
||||
B: '15000000', // latency in nanoseconds
|
||||
C: '5', // required by type
|
||||
D: '1640995200000000', // timestamp in nanoseconds
|
||||
F1: '5.5', // error rate
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.key).toBeDefined();
|
||||
expect(result.rate).toBe('50');
|
||||
expect(result.latency).toBe(15); // Should be converted from ns to ms
|
||||
expect(result.errorRate).toBe(5.5);
|
||||
expect(typeof result.lastUsed).toBe('string'); // Time formatting is tested elsewhere
|
||||
});
|
||||
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
it('should handle undefined values in data', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
A: undefined,
|
||||
B: 'n/a',
|
||||
C: '', // required by type
|
||||
D: undefined,
|
||||
F1: 'n/a',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
|
||||
it('should handle empty input array', () => {
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData([]);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
|
||||
it('should handle undefined input', () => {
|
||||
// Arrange
|
||||
const undefinedInput = undefined as any;
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(undefinedInput);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointStatusCodeData', () => {
|
||||
it('should format status code data correctly', () => {
|
||||
// Arrange
|
||||
@@ -578,6 +1005,139 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedDependentServicesData', () => {
|
||||
it('should format dependent services data correctly', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
'service.name': 'auth-service',
|
||||
A: '500', // count
|
||||
B: '120000000', // latency in nanoseconds
|
||||
C: '15', // rate
|
||||
F1: '2.5', // error percentage
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
'service.name': 'db-service',
|
||||
A: '300',
|
||||
B: '80000000',
|
||||
C: '10',
|
||||
F1: '1.2',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(2);
|
||||
|
||||
// Check first service
|
||||
expect(result[0].key).toBeDefined();
|
||||
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||
expect(result[0].serviceData.count).toBe(500);
|
||||
expect(typeof result[0].serviceData.percentage).toBe('number');
|
||||
expect(result[0].latency).toBe(120); // Should be converted from ns to ms
|
||||
expect(result[0].rate).toBe('15');
|
||||
expect(result[0].errorPercentage).toBe('2.5');
|
||||
|
||||
// Check second service
|
||||
expect(result[1].serviceData.serviceName).toBe('db-service');
|
||||
expect(result[1].serviceData.count).toBe(300);
|
||||
expect(result[1].latency).toBe(80);
|
||||
expect(result[1].rate).toBe('10');
|
||||
expect(result[1].errorPercentage).toBe('1.2');
|
||||
|
||||
// Verify percentage calculation
|
||||
const totalCount = 500 + 300;
|
||||
expect(result[0].serviceData.percentage).toBeCloseTo(
|
||||
(500 / totalCount) * 100,
|
||||
2,
|
||||
);
|
||||
expect(result[1].serviceData.percentage).toBeCloseTo(
|
||||
(300 / totalCount) * 100,
|
||||
2,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle undefined values in data', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
'service.name': 'auth-service',
|
||||
A: 'n/a',
|
||||
B: undefined,
|
||||
C: 'n/a',
|
||||
F1: undefined,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||
expect(result[0].serviceData.count).toBe('-');
|
||||
expect(result[0].serviceData.percentage).toBe(0);
|
||||
expect(result[0].latency).toBe('-');
|
||||
expect(result[0].rate).toBe('-');
|
||||
expect(result[0].errorPercentage).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle empty input array', () => {
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData([]);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined input', () => {
|
||||
// Arrange
|
||||
const undefinedInput = undefined as any;
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(undefinedInput);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle missing service name', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// Missing service.name
|
||||
A: '200',
|
||||
B: '50000000',
|
||||
C: '8',
|
||||
F1: '0.5',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].serviceData.serviceName).toBe('-');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointStatusCodeChartData', () => {
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user