Compare commits
2 Commits
enh/dashbo
...
tvats-hand
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
509a1cfb85 | ||
|
|
fd118d386a |
@@ -42,7 +42,7 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 3
|
retries: 3
|
||||||
schema-migrator-sync:
|
schema-migrator-sync:
|
||||||
image: signoz/signoz-schema-migrator:v0.129.11
|
image: signoz/signoz-schema-migrator:v0.129.8
|
||||||
container_name: schema-migrator-sync
|
container_name: schema-migrator-sync
|
||||||
command:
|
command:
|
||||||
- sync
|
- sync
|
||||||
@@ -55,7 +55,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
schema-migrator-async:
|
schema-migrator-async:
|
||||||
image: signoz/signoz-schema-migrator:v0.129.11
|
image: signoz/signoz-schema-migrator:v0.129.8
|
||||||
container_name: schema-migrator-async
|
container_name: schema-migrator-async
|
||||||
command:
|
command:
|
||||||
- async
|
- async
|
||||||
|
|||||||
10
Makefile
10
Makefile
@@ -84,9 +84,10 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
|||||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
|
||||||
go run -race \
|
go run -race \
|
||||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go
|
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||||
|
--config ./conf/prometheus.yml \
|
||||||
|
--cluster cluster
|
||||||
|
|
||||||
.PHONY: go-test
|
.PHONY: go-test
|
||||||
go-test: ## Runs go unit tests
|
go-test: ## Runs go unit tests
|
||||||
@@ -101,9 +102,10 @@ go-run-community: ## Runs the community go backend server
|
|||||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
|
||||||
go run -race \
|
go run -race \
|
||||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
|
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
|
||||||
|
--config ./conf/prometheus.yml \
|
||||||
|
--cluster cluster
|
||||||
|
|
||||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||||
go-build-community: ## Builds the go backend server for community
|
go-build-community: ## Builds the go backend server for community
|
||||||
|
|||||||
@@ -5,12 +5,9 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/cmd"
|
"github.com/SigNoz/signoz/cmd"
|
||||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
|
||||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
|
||||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/analytics"
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/authn"
|
"github.com/SigNoz/signoz/pkg/authn"
|
||||||
"github.com/SigNoz/signoz/pkg/authz"
|
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/licensing"
|
"github.com/SigNoz/signoz/pkg/licensing"
|
||||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||||
@@ -79,9 +76,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
|||||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||||
},
|
},
|
||||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
|
||||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||||
|
|||||||
@@ -8,8 +8,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/cmd"
|
"github.com/SigNoz/signoz/cmd"
|
||||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
|
||||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
|
||||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||||
@@ -19,7 +17,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||||
"github.com/SigNoz/signoz/pkg/analytics"
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/authn"
|
"github.com/SigNoz/signoz/pkg/authn"
|
||||||
"github.com/SigNoz/signoz/pkg/authz"
|
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/licensing"
|
"github.com/SigNoz/signoz/pkg/licensing"
|
||||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||||
@@ -108,9 +105,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
|||||||
|
|
||||||
return authNs, nil
|
return authNs, nil
|
||||||
},
|
},
|
||||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
|
||||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||||
|
|||||||
@@ -176,7 +176,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:v0.102.0
|
image: signoz/signoz:v0.100.1
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
ports:
|
ports:
|
||||||
@@ -209,7 +209,7 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:v0.129.11
|
image: signoz/signoz-otel-collector:v0.129.8
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
- --manager-config=/etc/manager-config.yaml
|
- --manager-config=/etc/manager-config.yaml
|
||||||
@@ -233,7 +233,7 @@ services:
|
|||||||
- signoz
|
- signoz
|
||||||
schema-migrator:
|
schema-migrator:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:v0.129.11
|
image: signoz/signoz-schema-migrator:v0.129.8
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:v0.102.0
|
image: signoz/signoz:v0.100.1
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
ports:
|
ports:
|
||||||
@@ -150,7 +150,7 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:v0.129.11
|
image: signoz/signoz-otel-collector:v0.129.8
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
- --manager-config=/etc/manager-config.yaml
|
- --manager-config=/etc/manager-config.yaml
|
||||||
@@ -176,7 +176,7 @@ services:
|
|||||||
- signoz
|
- signoz
|
||||||
schema-migrator:
|
schema-migrator:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:v0.129.11
|
image: signoz/signoz-schema-migrator:v0.129.8
|
||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:${VERSION:-v0.102.0}
|
image: signoz/signoz:${VERSION:-v0.100.1}
|
||||||
container_name: signoz
|
container_name: signoz
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
@@ -213,7 +213,7 @@ services:
|
|||||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
@@ -239,7 +239,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
schema-migrator-sync:
|
schema-migrator-sync:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||||
container_name: schema-migrator-sync
|
container_name: schema-migrator-sync
|
||||||
command:
|
command:
|
||||||
- sync
|
- sync
|
||||||
@@ -250,7 +250,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
schema-migrator-async:
|
schema-migrator-async:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||||
container_name: schema-migrator-async
|
container_name: schema-migrator-async
|
||||||
command:
|
command:
|
||||||
- async
|
- async
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:${VERSION:-v0.102.0}
|
image: signoz/signoz:${VERSION:-v0.100.1}
|
||||||
container_name: signoz
|
container_name: signoz
|
||||||
command:
|
command:
|
||||||
- --config=/root/config/prometheus.yml
|
- --config=/root/config/prometheus.yml
|
||||||
@@ -144,7 +144,7 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
otel-collector:
|
otel-collector:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
|
||||||
container_name: signoz-otel-collector
|
container_name: signoz-otel-collector
|
||||||
command:
|
command:
|
||||||
- --config=/etc/otel-collector-config.yaml
|
- --config=/etc/otel-collector-config.yaml
|
||||||
@@ -166,7 +166,7 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
schema-migrator-sync:
|
schema-migrator-sync:
|
||||||
!!merge <<: *common
|
!!merge <<: *common
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||||
container_name: schema-migrator-sync
|
container_name: schema-migrator-sync
|
||||||
command:
|
command:
|
||||||
- sync
|
- sync
|
||||||
@@ -178,7 +178,7 @@ services:
|
|||||||
restart: on-failure
|
restart: on-failure
|
||||||
schema-migrator-async:
|
schema-migrator-async:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||||
container_name: schema-migrator-async
|
container_name: schema-migrator-async
|
||||||
command:
|
command:
|
||||||
- async
|
- async
|
||||||
|
|||||||
@@ -103,19 +103,9 @@ Remember to replace the region and ingestion key with proper values as obtained
|
|||||||
|
|
||||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||||
|
|
||||||
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
|
|
||||||
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
|
ENVOY_PORT=8081 docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||||
```sh
|
```sh
|
||||||
docker compose ps -a
|
docker compose ps -a
|
||||||
|
|||||||
@@ -48,26 +48,7 @@ func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = provider.BatchCheck(ctx, tuples)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
|
||||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,18 +15,18 @@ type anonymous
|
|||||||
|
|
||||||
type role
|
type role
|
||||||
relations
|
relations
|
||||||
define assignee: [user, anonymous]
|
define assignee: [user]
|
||||||
|
|
||||||
define read: [user, role#assignee]
|
define read: [user, role#assignee]
|
||||||
define update: [user, role#assignee]
|
define update: [user, role#assignee]
|
||||||
define delete: [user, role#assignee]
|
define delete: [user, role#assignee]
|
||||||
|
|
||||||
type metaresources
|
type resources
|
||||||
relations
|
relations
|
||||||
define create: [user, role#assignee]
|
define create: [user, role#assignee]
|
||||||
define list: [user, role#assignee]
|
define list: [user, role#assignee]
|
||||||
|
|
||||||
type metaresource
|
type resource
|
||||||
relations
|
relations
|
||||||
define read: [user, anonymous, role#assignee]
|
define read: [user, anonymous, role#assignee]
|
||||||
define update: [user, role#assignee]
|
define update: [user, role#assignee]
|
||||||
@@ -35,6 +35,6 @@ type metaresource
|
|||||||
define block: [user, role#assignee]
|
define block: [user, role#assignee]
|
||||||
|
|
||||||
|
|
||||||
type telemetryresource
|
type telemetry
|
||||||
relations
|
relations
|
||||||
define read: [user, role#assignee]
|
define read: [user, anonymous, role#assignee]
|
||||||
|
|||||||
@@ -20,10 +20,6 @@ import (
|
|||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||||
"github.com/SigNoz/signoz/pkg/signoz"
|
"github.com/SigNoz/signoz/pkg/signoz"
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
|
||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
|
||||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
|
||||||
"github.com/SigNoz/signoz/pkg/version"
|
"github.com/SigNoz/signoz/pkg/version"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
)
|
)
|
||||||
@@ -103,39 +99,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
|||||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
||||||
|
|
||||||
// dashboards
|
|
||||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.CreatePublic)).Methods(http.MethodPost)
|
|
||||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.GetPublic)).Methods(http.MethodGet)
|
|
||||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.UpdatePublic)).Methods(http.MethodPut)
|
|
||||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.DeletePublic)).Methods(http.MethodDelete)
|
|
||||||
|
|
||||||
// public access for dashboards
|
|
||||||
router.HandleFunc("/api/v1/public/dashboards/{id}", am.CheckWithoutClaims(
|
|
||||||
ah.Signoz.Handlers.Dashboard.GetPublicData,
|
|
||||||
authtypes.RelationRead, authtypes.RelationRead,
|
|
||||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
|
||||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
|
||||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
|
||||||
if err != nil {
|
|
||||||
return nil, valuer.UUID{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
|
||||||
})).Methods(http.MethodGet)
|
|
||||||
|
|
||||||
router.HandleFunc("/api/v1/public/dashboards/{id}/widgets/{index}/query_range", am.CheckWithoutClaims(
|
|
||||||
ah.Signoz.Handlers.Dashboard.GetPublicWidgetQueryRange,
|
|
||||||
authtypes.RelationRead, authtypes.RelationRead,
|
|
||||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
|
||||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
|
||||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
|
||||||
if err != nil {
|
|
||||||
return nil, valuer.UUID{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
|
||||||
})).Methods(http.MethodGet)
|
|
||||||
|
|
||||||
// v3
|
// v3
|
||||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
||||||
|
|||||||
@@ -192,7 +192,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
|||||||
|
|
||||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||||
r := baseapp.NewRouter()
|
r := baseapp.NewRouter()
|
||||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||||
|
|
||||||
r.Use(otelmux.Middleware(
|
r.Use(otelmux.Middleware(
|
||||||
"apiserver",
|
"apiserver",
|
||||||
|
|||||||
@@ -280,7 +280,6 @@
|
|||||||
"got": "11.8.5",
|
"got": "11.8.5",
|
||||||
"form-data": "4.0.4",
|
"form-data": "4.0.4",
|
||||||
"brace-expansion": "^2.0.2",
|
"brace-expansion": "^2.0.2",
|
||||||
"on-headers": "^1.1.0",
|
"on-headers": "^1.1.0"
|
||||||
"tmp": "0.2.4"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 98 KiB |
@@ -274,7 +274,7 @@ function App(): JSX.Element {
|
|||||||
chat_settings: {
|
chat_settings: {
|
||||||
app_id: process.env.PYLON_APP_ID,
|
app_id: process.env.PYLON_APP_ID,
|
||||||
email: user.email,
|
email: user.email,
|
||||||
name: user.displayName || user.email,
|
name: user.displayName,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { LogEventAxiosInstance as axios } from 'api';
|
import { ApiBaseInstance as axios } from 'api';
|
||||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
/* eslint-disable sonarjs/no-duplicate-string */
|
||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
|
|
||||||
import { getFieldKeys } from '../getFieldKeys';
|
import { getFieldKeys } from '../getFieldKeys';
|
||||||
|
|
||||||
// Mock the API instance
|
// Mock the API instance
|
||||||
jest.mock('api', () => ({
|
jest.mock('api', () => ({
|
||||||
get: jest.fn(),
|
ApiBaseInstance: {
|
||||||
|
get: jest.fn(),
|
||||||
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('getFieldKeys API', () => {
|
describe('getFieldKeys API', () => {
|
||||||
@@ -29,33 +31,33 @@ describe('getFieldKeys API', () => {
|
|||||||
|
|
||||||
it('should call API with correct parameters when no args provided', async () => {
|
it('should call API with correct parameters when no args provided', async () => {
|
||||||
// Mock successful API response
|
// Mock successful API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||||
|
|
||||||
// Call function with no parameters
|
// Call function with no parameters
|
||||||
await getFieldKeys();
|
await getFieldKeys();
|
||||||
|
|
||||||
// Verify API was called correctly with empty params object
|
// Verify API was called correctly with empty params object
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||||
params: {},
|
params: {},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call API with signal parameter when provided', async () => {
|
it('should call API with signal parameter when provided', async () => {
|
||||||
// Mock successful API response
|
// Mock successful API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||||
|
|
||||||
// Call function with signal parameter
|
// Call function with signal parameter
|
||||||
await getFieldKeys('traces');
|
await getFieldKeys('traces');
|
||||||
|
|
||||||
// Verify API was called with signal parameter
|
// Verify API was called with signal parameter
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||||
params: { signal: 'traces' },
|
params: { signal: 'traces' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call API with name parameter when provided', async () => {
|
it('should call API with name parameter when provided', async () => {
|
||||||
// Mock successful API response
|
// Mock successful API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
data: {
|
data: {
|
||||||
status: 'success',
|
status: 'success',
|
||||||
@@ -70,14 +72,14 @@ describe('getFieldKeys API', () => {
|
|||||||
await getFieldKeys(undefined, 'service');
|
await getFieldKeys(undefined, 'service');
|
||||||
|
|
||||||
// Verify API was called with name parameter
|
// Verify API was called with name parameter
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||||
params: { name: 'service' },
|
params: { name: 'service' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call API with both signal and name when provided', async () => {
|
it('should call API with both signal and name when provided', async () => {
|
||||||
// Mock successful API response
|
// Mock successful API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
data: {
|
data: {
|
||||||
status: 'success',
|
status: 'success',
|
||||||
@@ -92,14 +94,14 @@ describe('getFieldKeys API', () => {
|
|||||||
await getFieldKeys('logs', 'service');
|
await getFieldKeys('logs', 'service');
|
||||||
|
|
||||||
// Verify API was called with both parameters
|
// Verify API was called with both parameters
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||||
params: { signal: 'logs', name: 'service' },
|
params: { signal: 'logs', name: 'service' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return properly formatted response', async () => {
|
it('should return properly formatted response', async () => {
|
||||||
// Mock API to return our response
|
// Mock API to return our response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||||
|
|
||||||
// Call the function
|
// Call the function
|
||||||
const result = await getFieldKeys('traces');
|
const result = await getFieldKeys('traces');
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
/* eslint-disable sonarjs/no-duplicate-string */
|
||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
|
|
||||||
import { getFieldValues } from '../getFieldValues';
|
import { getFieldValues } from '../getFieldValues';
|
||||||
|
|
||||||
// Mock the API instance
|
// Mock the API instance
|
||||||
jest.mock('api', () => ({
|
jest.mock('api', () => ({
|
||||||
get: jest.fn(),
|
ApiBaseInstance: {
|
||||||
|
get: jest.fn(),
|
||||||
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('getFieldValues API', () => {
|
describe('getFieldValues API', () => {
|
||||||
@@ -15,7 +17,7 @@ describe('getFieldValues API', () => {
|
|||||||
|
|
||||||
it('should call the API with correct parameters (no options)', async () => {
|
it('should call the API with correct parameters (no options)', async () => {
|
||||||
// Mock API response
|
// Mock API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
data: {
|
data: {
|
||||||
status: 'success',
|
status: 'success',
|
||||||
@@ -32,14 +34,14 @@ describe('getFieldValues API', () => {
|
|||||||
await getFieldValues();
|
await getFieldValues();
|
||||||
|
|
||||||
// Verify API was called correctly with empty params
|
// Verify API was called correctly with empty params
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||||
params: {},
|
params: {},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call the API with signal parameter', async () => {
|
it('should call the API with signal parameter', async () => {
|
||||||
// Mock API response
|
// Mock API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
data: {
|
data: {
|
||||||
status: 'success',
|
status: 'success',
|
||||||
@@ -56,14 +58,14 @@ describe('getFieldValues API', () => {
|
|||||||
await getFieldValues('traces');
|
await getFieldValues('traces');
|
||||||
|
|
||||||
// Verify API was called with signal parameter
|
// Verify API was called with signal parameter
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||||
params: { signal: 'traces' },
|
params: { signal: 'traces' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call the API with name parameter', async () => {
|
it('should call the API with name parameter', async () => {
|
||||||
// Mock API response
|
// Mock API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
data: {
|
data: {
|
||||||
status: 'success',
|
status: 'success',
|
||||||
@@ -80,14 +82,14 @@ describe('getFieldValues API', () => {
|
|||||||
await getFieldValues(undefined, 'service.name');
|
await getFieldValues(undefined, 'service.name');
|
||||||
|
|
||||||
// Verify API was called with name parameter
|
// Verify API was called with name parameter
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||||
params: { name: 'service.name' },
|
params: { name: 'service.name' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call the API with value parameter', async () => {
|
it('should call the API with value parameter', async () => {
|
||||||
// Mock API response
|
// Mock API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
data: {
|
data: {
|
||||||
status: 'success',
|
status: 'success',
|
||||||
@@ -104,14 +106,14 @@ describe('getFieldValues API', () => {
|
|||||||
await getFieldValues(undefined, 'service.name', 'front');
|
await getFieldValues(undefined, 'service.name', 'front');
|
||||||
|
|
||||||
// Verify API was called with value parameter
|
// Verify API was called with value parameter
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||||
params: { name: 'service.name', searchText: 'front' },
|
params: { name: 'service.name', searchText: 'front' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call the API with time range parameters', async () => {
|
it('should call the API with time range parameters', async () => {
|
||||||
// Mock API response
|
// Mock API response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
data: {
|
data: {
|
||||||
status: 'success',
|
status: 'success',
|
||||||
@@ -136,7 +138,7 @@ describe('getFieldValues API', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Verify API was called with time range parameters (converted to milliseconds)
|
// Verify API was called with time range parameters (converted to milliseconds)
|
||||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||||
params: {
|
params: {
|
||||||
signal: 'logs',
|
signal: 'logs',
|
||||||
name: 'service.name',
|
name: 'service.name',
|
||||||
@@ -163,7 +165,7 @@ describe('getFieldValues API', () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
// Call the function
|
// Call the function
|
||||||
const result = await getFieldValues('traces', 'mixed.values');
|
const result = await getFieldValues('traces', 'mixed.values');
|
||||||
@@ -194,7 +196,7 @@ describe('getFieldValues API', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Mock API to return our response
|
// Mock API to return our response
|
||||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||||
|
|
||||||
// Call the function
|
// Call the function
|
||||||
const result = await getFieldValues('traces', 'service.name');
|
const result = await getFieldValues('traces', 'service.name');
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||||
@@ -24,7 +24,7 @@ export const getFieldKeys = async (
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await axios.get('/fields/keys', { params });
|
const response = await ApiBaseInstance.get('/fields/keys', { params });
|
||||||
|
|
||||||
return {
|
return {
|
||||||
httpStatusCode: response.status,
|
httpStatusCode: response.status,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/* eslint-disable sonarjs/cognitive-complexity */
|
/* eslint-disable sonarjs/cognitive-complexity */
|
||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||||
@@ -47,7 +47,7 @@ export const getFieldValues = async (
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await axios.get('/fields/values', { params });
|
const response = await ApiBaseInstance.get('/fields/values', { params });
|
||||||
|
|
||||||
// Normalize values from different types (stringValues, boolValues, etc.)
|
// Normalize values from different types (stringValues, boolValues, etc.)
|
||||||
if (response.data?.data?.values) {
|
if (response.data?.data?.values) {
|
||||||
|
|||||||
@@ -86,9 +86,8 @@ const interceptorRejected = async (
|
|||||||
|
|
||||||
if (
|
if (
|
||||||
response.status === 401 &&
|
response.status === 401 &&
|
||||||
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||||
response.config.url !== '/sessions/rotate' &&
|
response.config.url !== '/sessions/rotate' &&
|
||||||
response.config.url !== '/sessions/email_password' &&
|
|
||||||
!(
|
!(
|
||||||
response.config.url === '/sessions' && response.config.method === 'delete'
|
response.config.url === '/sessions' && response.config.method === 'delete'
|
||||||
)
|
)
|
||||||
@@ -200,15 +199,15 @@ ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
|
|||||||
//
|
//
|
||||||
|
|
||||||
// axios Base
|
// axios Base
|
||||||
export const LogEventAxiosInstance = axios.create({
|
export const ApiBaseInstance = axios.create({
|
||||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||||
});
|
});
|
||||||
|
|
||||||
LogEventAxiosInstance.interceptors.response.use(
|
ApiBaseInstance.interceptors.response.use(
|
||||||
interceptorsResponse,
|
interceptorsResponse,
|
||||||
interceptorRejectedBase,
|
interceptorRejectedBase,
|
||||||
);
|
);
|
||||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||||
//
|
//
|
||||||
|
|
||||||
// gateway Api V1
|
// gateway Api V1
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||||
import { AxiosError, AxiosResponse } from 'axios';
|
import { AxiosError, AxiosResponse } from 'axios';
|
||||||
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
||||||
@@ -17,7 +17,7 @@ export const getHostAttributeKeys = async (
|
|||||||
try {
|
try {
|
||||||
const response: AxiosResponse<{
|
const response: AxiosResponse<{
|
||||||
data: IQueryAutocompleteResponse;
|
data: IQueryAutocompleteResponse;
|
||||||
}> = await axios.get(
|
}> = await ApiBaseInstance.get(
|
||||||
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
|
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
|
||||||
{
|
{
|
||||||
params: {
|
params: {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||||
@@ -20,7 +20,7 @@ const getOnboardingStatus = async (props: {
|
|||||||
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
||||||
const { endpointService, ...rest } = props;
|
const { endpointService, ...rest } = props;
|
||||||
try {
|
try {
|
||||||
const response = await axios.post(
|
const response = await ApiBaseInstance.post(
|
||||||
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
||||||
rest,
|
rest,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,20 +1,13 @@
|
|||||||
import { ApiV2Instance } from 'api';
|
import axios from 'api';
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
|
||||||
import { AxiosError } from 'axios';
|
|
||||||
import { ErrorV2Resp } from 'types/api';
|
|
||||||
import { PayloadProps, Props } from 'types/api/metrics/getService';
|
import { PayloadProps, Props } from 'types/api/metrics/getService';
|
||||||
|
|
||||||
const getService = async (props: Props): Promise<PayloadProps> => {
|
const getService = async (props: Props): Promise<PayloadProps> => {
|
||||||
try {
|
const response = await axios.post(`/services`, {
|
||||||
const response = await ApiV2Instance.post(`/services`, {
|
start: `${props.start}`,
|
||||||
start: `${props.start}`,
|
end: `${props.end}`,
|
||||||
end: `${props.end}`,
|
tags: props.selectedTags,
|
||||||
tags: props.selectedTags,
|
});
|
||||||
});
|
return response.data;
|
||||||
return response.data.data;
|
|
||||||
} catch (error) {
|
|
||||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default getService;
|
export default getService;
|
||||||
|
|||||||
@@ -1,27 +1,22 @@
|
|||||||
import { ApiV2Instance } from 'api';
|
import axios from 'api';
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
|
||||||
import { AxiosError } from 'axios';
|
|
||||||
import { ErrorV2Resp } from 'types/api';
|
|
||||||
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
|
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
|
||||||
|
|
||||||
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
|
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
|
||||||
try {
|
const endpoint = props.isEntryPoint
|
||||||
const endpoint = props.isEntryPoint
|
? '/service/entry_point_operations'
|
||||||
? '/service/entry_point_operations'
|
: '/service/top_operations';
|
||||||
: '/service/top_operations';
|
|
||||||
|
|
||||||
const response = await ApiV2Instance.post(endpoint, {
|
const response = await axios.post(endpoint, {
|
||||||
start: `${props.start}`,
|
start: `${props.start}`,
|
||||||
end: `${props.end}`,
|
end: `${props.end}`,
|
||||||
service: props.service,
|
service: props.service,
|
||||||
tags: props.selectedTags,
|
tags: props.selectedTags,
|
||||||
limit: 5000,
|
});
|
||||||
});
|
|
||||||
|
|
||||||
|
if (props.isEntryPoint) {
|
||||||
return response.data.data;
|
return response.data.data;
|
||||||
} catch (error) {
|
|
||||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
|
||||||
}
|
}
|
||||||
|
return response.data;
|
||||||
};
|
};
|
||||||
|
|
||||||
export default getTopOperations;
|
export default getTopOperations;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||||
@@ -9,7 +9,7 @@ const getCustomFilters = async (
|
|||||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||||
const { signal } = props;
|
const { signal } = props;
|
||||||
try {
|
try {
|
||||||
const response = await axios.get(`/orgs/me/filters/${signal}`);
|
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { SuccessResponse } from 'types/api';
|
import { SuccessResponse } from 'types/api';
|
||||||
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
|
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
|
||||||
@@ -6,7 +6,7 @@ import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFil
|
|||||||
const updateCustomFiltersAPI = async (
|
const updateCustomFiltersAPI = async (
|
||||||
props: UpdateCustomFiltersProps,
|
props: UpdateCustomFiltersProps,
|
||||||
): Promise<SuccessResponse<void> | AxiosError> =>
|
): Promise<SuccessResponse<void> | AxiosError> =>
|
||||||
axios.put(`/orgs/me/filters`, {
|
ApiBaseInstance.put(`orgs/me/filters`, {
|
||||||
...props.data,
|
...props.data,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||||
@@ -9,12 +9,15 @@ const listOverview = async (
|
|||||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||||
const { start, end, show_ip: showIp, filter } = props;
|
const { start, end, show_ip: showIp, filter } = props;
|
||||||
try {
|
try {
|
||||||
const response = await axios.post(`/third-party-apis/overview/list`, {
|
const response = await ApiBaseInstance.post(
|
||||||
start,
|
`/third-party-apis/overview/list`,
|
||||||
end,
|
{
|
||||||
show_ip: showIp,
|
start,
|
||||||
filter,
|
end,
|
||||||
});
|
show_ip: showIp,
|
||||||
|
filter,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
httpStatusCode: response.status,
|
httpStatusCode: response.status,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import axios from 'api';
|
import { ApiBaseInstance } from 'api';
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||||
import { AxiosError } from 'axios';
|
import { AxiosError } from 'axios';
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||||
@@ -11,7 +11,7 @@ const getSpanPercentiles = async (
|
|||||||
props: GetSpanPercentilesProps,
|
props: GetSpanPercentilesProps,
|
||||||
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
|
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.post('/span_percentile', {
|
const response = await ApiBaseInstance.post('/span_percentile', {
|
||||||
...props,
|
...props,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,10 @@ import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
|||||||
import { FontSize } from 'container/OptionsMenu/types';
|
import { FontSize } from 'container/OptionsMenu/types';
|
||||||
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
||||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||||
|
import {
|
||||||
|
LOG_FIELD_BODY_KEY,
|
||||||
|
LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
} from 'lib/logs/flatLogData';
|
||||||
import { useCallback, useEffect, useMemo } from 'react';
|
import { useCallback, useEffect, useMemo } from 'react';
|
||||||
import { useQuery } from 'react-query';
|
import { useQuery } from 'react-query';
|
||||||
import { Virtuoso } from 'react-virtuoso';
|
import { Virtuoso } from 'react-virtuoso';
|
||||||
@@ -85,11 +89,15 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
|||||||
dataType: 'string',
|
dataType: 'string',
|
||||||
type: '',
|
type: '',
|
||||||
name: 'body',
|
name: 'body',
|
||||||
|
displayName: 'Body',
|
||||||
|
key: LOG_FIELD_BODY_KEY,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
dataType: 'string',
|
dataType: 'string',
|
||||||
type: '',
|
type: '',
|
||||||
name: 'timestamp',
|
name: 'timestamp',
|
||||||
|
displayName: 'Timestamp',
|
||||||
|
key: LOG_FIELD_TIMESTAMP_KEY,
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -13,6 +13,10 @@ import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
|||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
// utils
|
// utils
|
||||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||||
|
import {
|
||||||
|
LOG_FIELD_BODY_KEY,
|
||||||
|
LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
} from 'lib/logs/flatLogData';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
import { useTimezone } from 'providers/Timezone';
|
||||||
import { useCallback, useMemo, useState } from 'react';
|
import { useCallback, useMemo, useState } from 'react';
|
||||||
// interfaces
|
// interfaces
|
||||||
@@ -42,7 +46,9 @@ interface LogFieldProps {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> &
|
type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> &
|
||||||
Pick<AddToQueryHOCProps, 'onAddToQuery'>;
|
Pick<AddToQueryHOCProps, 'onAddToQuery'> & {
|
||||||
|
fieldKeyDisplay: string;
|
||||||
|
};
|
||||||
|
|
||||||
function LogGeneralField({
|
function LogGeneralField({
|
||||||
fieldKey,
|
fieldKey,
|
||||||
@@ -74,6 +80,7 @@ function LogGeneralField({
|
|||||||
function LogSelectedField({
|
function LogSelectedField({
|
||||||
fieldKey = '',
|
fieldKey = '',
|
||||||
fieldValue = '',
|
fieldValue = '',
|
||||||
|
fieldKeyDisplay = '',
|
||||||
onAddToQuery,
|
onAddToQuery,
|
||||||
fontSize,
|
fontSize,
|
||||||
}: LogSelectedFieldProps): JSX.Element {
|
}: LogSelectedFieldProps): JSX.Element {
|
||||||
@@ -90,7 +97,7 @@ function LogSelectedField({
|
|||||||
style={{ color: blue[4] }}
|
style={{ color: blue[4] }}
|
||||||
className={cx('selected-log-field-key', fontSize)}
|
className={cx('selected-log-field-key', fontSize)}
|
||||||
>
|
>
|
||||||
{fieldKey}
|
{fieldKeyDisplay}
|
||||||
</span>
|
</span>
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
</AddToQueryHOC>
|
</AddToQueryHOC>
|
||||||
@@ -162,7 +169,7 @@ function ListLogView({
|
|||||||
);
|
);
|
||||||
|
|
||||||
const updatedSelecedFields = useMemo(
|
const updatedSelecedFields = useMemo(
|
||||||
() => selectedFields.filter((e) => e.name !== 'id'),
|
() => selectedFields.filter((e) => e.key !== 'id'),
|
||||||
[selectedFields],
|
[selectedFields],
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -170,16 +177,16 @@ function ListLogView({
|
|||||||
|
|
||||||
const timestampValue = useMemo(
|
const timestampValue = useMemo(
|
||||||
() =>
|
() =>
|
||||||
typeof flattenLogData.timestamp === 'string'
|
typeof flattenLogData[LOG_FIELD_TIMESTAMP_KEY] === 'string'
|
||||||
? formatTimezoneAdjustedTimestamp(
|
? formatTimezoneAdjustedTimestamp(
|
||||||
flattenLogData.timestamp,
|
flattenLogData[LOG_FIELD_TIMESTAMP_KEY],
|
||||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||||
)
|
)
|
||||||
: formatTimezoneAdjustedTimestamp(
|
: formatTimezoneAdjustedTimestamp(
|
||||||
flattenLogData.timestamp / 1e6,
|
flattenLogData[LOG_FIELD_TIMESTAMP_KEY] / 1e6,
|
||||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||||
),
|
),
|
||||||
[flattenLogData.timestamp, formatTimezoneAdjustedTimestamp],
|
[flattenLogData, formatTimezoneAdjustedTimestamp],
|
||||||
);
|
);
|
||||||
|
|
||||||
const logType = getLogIndicatorType(logData);
|
const logType = getLogIndicatorType(logData);
|
||||||
@@ -215,10 +222,12 @@ function ListLogView({
|
|||||||
/>
|
/>
|
||||||
<div>
|
<div>
|
||||||
<LogContainer fontSize={fontSize}>
|
<LogContainer fontSize={fontSize}>
|
||||||
{updatedSelecedFields.some((field) => field.name === 'body') && (
|
{updatedSelecedFields.some(
|
||||||
|
(field) => field.key === LOG_FIELD_BODY_KEY,
|
||||||
|
) && (
|
||||||
<LogGeneralField
|
<LogGeneralField
|
||||||
fieldKey="Log"
|
fieldKey="Log"
|
||||||
fieldValue={flattenLogData.body}
|
fieldValue={flattenLogData[LOG_FIELD_BODY_KEY]}
|
||||||
linesPerRow={linesPerRow}
|
linesPerRow={linesPerRow}
|
||||||
fontSize={fontSize}
|
fontSize={fontSize}
|
||||||
/>
|
/>
|
||||||
@@ -230,7 +239,9 @@ function ListLogView({
|
|||||||
fontSize={fontSize}
|
fontSize={fontSize}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{updatedSelecedFields.some((field) => field.name === 'timestamp') && (
|
{updatedSelecedFields.some(
|
||||||
|
(field) => field.key === LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
) && (
|
||||||
<LogGeneralField
|
<LogGeneralField
|
||||||
fieldKey="Timestamp"
|
fieldKey="Timestamp"
|
||||||
fieldValue={timestampValue}
|
fieldValue={timestampValue}
|
||||||
@@ -239,13 +250,17 @@ function ListLogView({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
{updatedSelecedFields
|
{updatedSelecedFields
|
||||||
.filter((field) => !['timestamp', 'body'].includes(field.name))
|
.filter(
|
||||||
|
(field) =>
|
||||||
|
![LOG_FIELD_TIMESTAMP_KEY, LOG_FIELD_BODY_KEY].includes(field.key),
|
||||||
|
)
|
||||||
.map((field) =>
|
.map((field) =>
|
||||||
isValidLogField(flattenLogData[field.name] as never) ? (
|
isValidLogField(flattenLogData[field.key] as never) ? (
|
||||||
<LogSelectedField
|
<LogSelectedField
|
||||||
key={field.name}
|
key={field.key}
|
||||||
fieldKey={field.name}
|
fieldKey={field.key}
|
||||||
fieldValue={flattenLogData[field.name] as never}
|
fieldKeyDisplay={field.displayName}
|
||||||
|
fieldValue={flattenLogData[field.key] as never}
|
||||||
onAddToQuery={onAddToQuery}
|
onAddToQuery={onAddToQuery}
|
||||||
fontSize={fontSize}
|
fontSize={fontSize}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -73,16 +73,25 @@ function RawLogView({
|
|||||||
);
|
);
|
||||||
|
|
||||||
const attributesValues = updatedSelecedFields
|
const attributesValues = updatedSelecedFields
|
||||||
.filter((field) => !['timestamp', 'body'].includes(field.name))
|
.filter(
|
||||||
.map((field) => flattenLogData[field.name])
|
(field) => !['log.timestamp:string', 'log.body:string'].includes(field.key),
|
||||||
.filter((attribute) => {
|
)
|
||||||
|
.map((field) => {
|
||||||
|
const value = flattenLogData[field.key];
|
||||||
|
const label = field.displayName;
|
||||||
|
|
||||||
// loadash isEmpty doesnot work with numbers
|
// loadash isEmpty doesnot work with numbers
|
||||||
if (isNumber(attribute)) {
|
if (isNumber(value)) {
|
||||||
return true;
|
return `${label}: ${value}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
return !isUndefined(attribute) && !isEmpty(attribute);
|
if (!isUndefined(value) && !isEmpty(value)) {
|
||||||
});
|
return `${label}: ${value}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
})
|
||||||
|
.filter((attribute) => attribute !== null);
|
||||||
|
|
||||||
let attributesText = attributesValues.join(' | ');
|
let attributesText = attributesValues.join(' | ');
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,11 @@ import cx from 'classnames';
|
|||||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||||
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
|
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
import {
|
||||||
|
FlatLogData,
|
||||||
|
LOG_FIELD_BODY_KEY,
|
||||||
|
LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
} from 'lib/logs/flatLogData';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
import { useTimezone } from 'providers/Timezone';
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
|
|
||||||
@@ -51,28 +55,33 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
|||||||
|
|
||||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
||||||
.filter((e) => !['id', 'body', 'timestamp'].includes(e.name))
|
.filter(
|
||||||
.map(({ name }) => ({
|
(e) => !['id', LOG_FIELD_BODY_KEY, LOG_FIELD_TIMESTAMP_KEY].includes(e.key),
|
||||||
title: name,
|
)
|
||||||
dataIndex: name,
|
.map((field) => ({
|
||||||
accessorKey: name,
|
title: field.displayName,
|
||||||
id: name.toLowerCase().replace(/\./g, '_'),
|
dataIndex: field.key,
|
||||||
key: name,
|
accessorKey: field.key,
|
||||||
render: (field): ColumnTypeRender<Record<string, unknown>> => ({
|
id: field.key.toLowerCase().replace(/\./g, '_').replace(/:/g, '_'),
|
||||||
props: {
|
key: field.key,
|
||||||
style: isListViewPanel
|
render: (fieldValue, record): ColumnTypeRender<Record<string, unknown>> => {
|
||||||
? defaultListViewPanelStyle
|
const value = record[field.key] || fieldValue;
|
||||||
: getDefaultCellStyle(isDarkMode),
|
return {
|
||||||
},
|
props: {
|
||||||
children: (
|
style: isListViewPanel
|
||||||
<Typography.Paragraph
|
? defaultListViewPanelStyle
|
||||||
ellipsis={{ rows: linesPerRow }}
|
: getDefaultCellStyle(isDarkMode),
|
||||||
className={cx('paragraph', fontSize)}
|
},
|
||||||
>
|
children: (
|
||||||
{field}
|
<Typography.Paragraph
|
||||||
</Typography.Paragraph>
|
ellipsis={{ rows: linesPerRow }}
|
||||||
),
|
className={cx('paragraph', fontSize)}
|
||||||
}),
|
>
|
||||||
|
{value}
|
||||||
|
</Typography.Paragraph>
|
||||||
|
),
|
||||||
|
};
|
||||||
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
if (isListViewPanel) {
|
if (isListViewPanel) {
|
||||||
@@ -100,26 +109,29 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
|||||||
),
|
),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
...(fields.some((field) => field.name === 'timestamp')
|
...(fields.some((field) => field.key === LOG_FIELD_TIMESTAMP_KEY)
|
||||||
? [
|
? [
|
||||||
{
|
{
|
||||||
title: 'timestamp',
|
title: 'timestamp',
|
||||||
dataIndex: 'timestamp',
|
dataIndex: LOG_FIELD_TIMESTAMP_KEY,
|
||||||
key: 'timestamp',
|
key: 'timestamp',
|
||||||
accessorKey: 'timestamp',
|
accessorKey: LOG_FIELD_TIMESTAMP_KEY,
|
||||||
id: 'timestamp',
|
id: 'timestamp',
|
||||||
// https://github.com/ant-design/ant-design/discussions/36886
|
// https://github.com/ant-design/ant-design/discussions/36886
|
||||||
render: (
|
render: (
|
||||||
field: string | number,
|
field: string | number,
|
||||||
|
record: Record<string, unknown>,
|
||||||
): ColumnTypeRender<Record<string, unknown>> => {
|
): ColumnTypeRender<Record<string, unknown>> => {
|
||||||
|
const timestampValue =
|
||||||
|
(record[LOG_FIELD_TIMESTAMP_KEY] as string | number) || field;
|
||||||
const date =
|
const date =
|
||||||
typeof field === 'string'
|
typeof timestampValue === 'string'
|
||||||
? formatTimezoneAdjustedTimestamp(
|
? formatTimezoneAdjustedTimestamp(
|
||||||
field,
|
timestampValue,
|
||||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||||
)
|
)
|
||||||
: formatTimezoneAdjustedTimestamp(
|
: formatTimezoneAdjustedTimestamp(
|
||||||
field / 1e6,
|
timestampValue / 1e6,
|
||||||
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
DATE_TIME_FORMATS.ISO_DATETIME_MS,
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
@@ -136,33 +148,37 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
|||||||
]
|
]
|
||||||
: []),
|
: []),
|
||||||
...(appendTo === 'center' ? fieldColumns : []),
|
...(appendTo === 'center' ? fieldColumns : []),
|
||||||
...(fields.some((field) => field.name === 'body')
|
...(fields.some((field) => field.key === LOG_FIELD_BODY_KEY)
|
||||||
? [
|
? [
|
||||||
{
|
{
|
||||||
title: 'body',
|
title: 'body',
|
||||||
dataIndex: 'body',
|
dataIndex: LOG_FIELD_BODY_KEY,
|
||||||
key: 'body',
|
key: 'body',
|
||||||
accessorKey: 'body',
|
accessorKey: LOG_FIELD_BODY_KEY,
|
||||||
id: 'body',
|
id: 'body',
|
||||||
render: (
|
render: (
|
||||||
field: string | number,
|
field: string | number,
|
||||||
): ColumnTypeRender<Record<string, unknown>> => ({
|
record: Record<string, unknown>,
|
||||||
props: {
|
): ColumnTypeRender<Record<string, unknown>> => {
|
||||||
style: bodyColumnStyle,
|
const bodyValue = (record[LOG_FIELD_BODY_KEY] as string) || '';
|
||||||
},
|
return {
|
||||||
children: (
|
props: {
|
||||||
<TableBodyContent
|
style: bodyColumnStyle,
|
||||||
dangerouslySetInnerHTML={{
|
},
|
||||||
__html: getSanitizedLogBody(field as string, {
|
children: (
|
||||||
shouldEscapeHtml: true,
|
<TableBodyContent
|
||||||
}),
|
dangerouslySetInnerHTML={{
|
||||||
}}
|
__html: getSanitizedLogBody(bodyValue, {
|
||||||
fontSize={fontSize}
|
shouldEscapeHtml: true,
|
||||||
linesPerRow={linesPerRow}
|
}),
|
||||||
isDarkMode={isDarkMode}
|
}}
|
||||||
/>
|
fontSize={fontSize}
|
||||||
),
|
linesPerRow={linesPerRow}
|
||||||
}),
|
isDarkMode={isDarkMode}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
};
|
||||||
|
},
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
: []),
|
: []),
|
||||||
|
|||||||
@@ -416,18 +416,21 @@ function OptionsMenu({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
<div className="column-format">
|
<div className="column-format">
|
||||||
{addColumn?.value?.map(({ name }) => (
|
{addColumn?.value?.map((column) => (
|
||||||
<div className="column-name" key={name}>
|
<div className="column-name" key={column.key}>
|
||||||
<div className="name">
|
<div className="name">
|
||||||
<Tooltip placement="left" title={name}>
|
<Tooltip
|
||||||
{name}
|
placement="left"
|
||||||
|
title={column.displayName || column.name}
|
||||||
|
>
|
||||||
|
{column.displayName || column.name}
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
</div>
|
</div>
|
||||||
{addColumn?.value?.length > 1 && (
|
{addColumn?.value?.length > 1 && (
|
||||||
<X
|
<X
|
||||||
className="delete-btn"
|
className="delete-btn"
|
||||||
size={14}
|
size={14}
|
||||||
onClick={(): void => addColumn.onRemove(name)}
|
onClick={(): void => addColumn.onRemove(column.key)}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -224,7 +224,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
|||||||
const visitedPairs: Set<string> = new Set(); // Set to track visited query pairs
|
const visitedPairs: Set<string> = new Set(); // Set to track visited query pairs
|
||||||
|
|
||||||
// Map extracted query pairs to key-specific pair information for faster access
|
// Map extracted query pairs to key-specific pair information for faster access
|
||||||
let queryPairsMap = getQueryPairsMap(existingQuery);
|
let queryPairsMap = getQueryPairsMap(existingQuery.trim());
|
||||||
|
|
||||||
filters?.items?.forEach((filter) => {
|
filters?.items?.forEach((filter) => {
|
||||||
const { key, op, value } = filter;
|
const { key, op, value } = filter;
|
||||||
@@ -309,7 +309,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
|||||||
)}${OPERATORS.IN} ${formattedValue} ${modifiedQuery.slice(
|
)}${OPERATORS.IN} ${formattedValue} ${modifiedQuery.slice(
|
||||||
notInPair.position.valueEnd + 1,
|
notInPair.position.valueEnd + 1,
|
||||||
)}`;
|
)}`;
|
||||||
queryPairsMap = getQueryPairsMap(modifiedQuery);
|
queryPairsMap = getQueryPairsMap(modifiedQuery.trim());
|
||||||
}
|
}
|
||||||
shouldAddToNonExisting = false; // Don't add this to non-existing filters
|
shouldAddToNonExisting = false; // Don't add this to non-existing filters
|
||||||
} else if (
|
} else if (
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { Select } from 'antd';
|
import { Select } from 'antd';
|
||||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
|
||||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||||
import {
|
import {
|
||||||
getAllEndpointsWidgetData,
|
getAllEndpointsWidgetData,
|
||||||
@@ -265,7 +264,6 @@ function AllEndPoints({
|
|||||||
customOnDragSelect={(): void => {}}
|
customOnDragSelect={(): void => {}}
|
||||||
customTimeRange={timeRange}
|
customTimeRange={timeRange}
|
||||||
customOnRowClick={onRowClick}
|
customOnRowClick={onRowClick}
|
||||||
version={ENTITY_VERSION_V5}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
|
||||||
import { useApiMonitoringParams } from 'container/ApiMonitoring/queryParams';
|
import { useApiMonitoringParams } from 'container/ApiMonitoring/queryParams';
|
||||||
import {
|
import {
|
||||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||||
@@ -179,33 +178,18 @@ function EndPointDetails({
|
|||||||
[domainName, filters, minTime, maxTime],
|
[domainName, filters, minTime, maxTime],
|
||||||
);
|
);
|
||||||
|
|
||||||
const V5_QUERIES = [
|
|
||||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_DATA,
|
|
||||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA,
|
|
||||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA,
|
|
||||||
REACT_QUERY_KEY.GET_ENDPOINT_METRICS_DATA,
|
|
||||||
REACT_QUERY_KEY.GET_ENDPOINT_DEPENDENT_SERVICES_DATA,
|
|
||||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
|
||||||
] as const;
|
|
||||||
|
|
||||||
const endPointDetailsDataQueries = useQueries(
|
const endPointDetailsDataQueries = useQueries(
|
||||||
endPointDetailsQueryPayload.map((payload, index) => {
|
endPointDetailsQueryPayload.map((payload, index) => ({
|
||||||
const queryKey = END_POINT_DETAILS_QUERY_KEYS_ARRAY[index];
|
queryKey: [
|
||||||
const version = (V5_QUERIES as readonly string[]).includes(queryKey)
|
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||||
? ENTITY_VERSION_V5
|
payload,
|
||||||
: ENTITY_VERSION_V4;
|
filters?.items, // Include filters.items in queryKey for better caching
|
||||||
return {
|
ENTITY_VERSION_V4,
|
||||||
queryKey: [
|
],
|
||||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||||
payload,
|
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||||
...(filters?.items?.length ? filters.items : []), // Include filters.items in queryKey for better caching
|
enabled: !!payload,
|
||||||
version,
|
})),
|
||||||
],
|
|
||||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
|
||||||
GetMetricQueryRange(payload, version),
|
|
||||||
enabled: !!payload,
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const [
|
const [
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { getQueryRangeV5 } from 'api/v5/queryRange/getQueryRange';
|
|||||||
import { MetricRangePayloadV5, ScalarData } from 'api/v5/v5';
|
import { MetricRangePayloadV5, ScalarData } from 'api/v5/v5';
|
||||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||||
import { withErrorBoundary } from 'components/ErrorBoundaryHOC';
|
import { withErrorBoundary } from 'components/ErrorBoundaryHOC';
|
||||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
import {
|
import {
|
||||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||||
@@ -56,10 +56,6 @@ function TopErrors({
|
|||||||
{
|
{
|
||||||
items: endPointName
|
items: endPointName
|
||||||
? [
|
? [
|
||||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
|
||||||
...(initialFilters?.items?.filter(
|
|
||||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
|
||||||
) || []),
|
|
||||||
{
|
{
|
||||||
id: '92b8a1c1',
|
id: '92b8a1c1',
|
||||||
key: {
|
key: {
|
||||||
@@ -70,6 +66,7 @@ function TopErrors({
|
|||||||
op: '=',
|
op: '=',
|
||||||
value: endPointName,
|
value: endPointName,
|
||||||
},
|
},
|
||||||
|
...(initialFilters?.items || []),
|
||||||
]
|
]
|
||||||
: [...(initialFilters?.items || [])],
|
: [...(initialFilters?.items || [])],
|
||||||
op: 'AND',
|
op: 'AND',
|
||||||
@@ -131,12 +128,12 @@ function TopErrors({
|
|||||||
const endPointDropDownDataQueries = useQueries(
|
const endPointDropDownDataQueries = useQueries(
|
||||||
endPointDropDownQueryPayload.map((payload) => ({
|
endPointDropDownQueryPayload.map((payload) => ({
|
||||||
queryKey: [
|
queryKey: [
|
||||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[2],
|
END_POINT_DETAILS_QUERY_KEYS_ARRAY[4],
|
||||||
payload,
|
payload,
|
||||||
ENTITY_VERSION_V5,
|
ENTITY_VERSION_V4,
|
||||||
],
|
],
|
||||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||||
enabled: !!payload,
|
enabled: !!payload,
|
||||||
staleTime: 60 * 1000,
|
staleTime: 60 * 1000,
|
||||||
})),
|
})),
|
||||||
|
|||||||
@@ -1,337 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable react/jsx-props-no-spreading */
|
|
||||||
/* eslint-disable prefer-destructuring */
|
|
||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
|
||||||
import { render, screen, waitFor } from '@testing-library/react';
|
|
||||||
import { TraceAggregation } from 'api/v5/v5';
|
|
||||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
|
||||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
|
||||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
|
||||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
|
||||||
|
|
||||||
import DomainMetrics from './DomainMetrics';
|
|
||||||
|
|
||||||
// Mock the API call
|
|
||||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
|
||||||
GetMetricQueryRange: jest.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock ErrorState component
|
|
||||||
jest.mock('./ErrorState', () => ({
|
|
||||||
__esModule: true,
|
|
||||||
default: jest.fn(({ refetch }) => (
|
|
||||||
<div data-testid="error-state">
|
|
||||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
|
||||||
Retry
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
)),
|
|
||||||
}));
|
|
||||||
|
|
||||||
describe('DomainMetrics - V5 Query Payload Tests', () => {
|
|
||||||
let queryClient: QueryClient;
|
|
||||||
|
|
||||||
const mockProps = {
|
|
||||||
domainName: '0.0.0.0',
|
|
||||||
timeRange: {
|
|
||||||
startTime: 1758259531000,
|
|
||||||
endTime: 1758261331000,
|
|
||||||
},
|
|
||||||
domainListFilters: {
|
|
||||||
items: [],
|
|
||||||
op: 'AND' as const,
|
|
||||||
} as IBuilderQuery['filters'],
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockSuccessResponse = {
|
|
||||||
statusCode: 200,
|
|
||||||
error: null,
|
|
||||||
payload: {
|
|
||||||
data: {
|
|
||||||
result: [
|
|
||||||
{
|
|
||||||
table: {
|
|
||||||
rows: [
|
|
||||||
{
|
|
||||||
data: {
|
|
||||||
A: '150',
|
|
||||||
B: '125000000',
|
|
||||||
D: '2021-01-01T23:00:00Z',
|
|
||||||
F1: '5.5',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
queryClient = new QueryClient({
|
|
||||||
defaultOptions: {
|
|
||||||
queries: {
|
|
||||||
retry: false,
|
|
||||||
cacheTime: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
jest.clearAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
queryClient.clear();
|
|
||||||
});
|
|
||||||
|
|
||||||
const renderComponent = (props = mockProps): ReturnType<typeof render> =>
|
|
||||||
render(
|
|
||||||
<QueryClientProvider client={queryClient}>
|
|
||||||
<DomainMetrics {...props} />
|
|
||||||
</QueryClientProvider>,
|
|
||||||
);
|
|
||||||
|
|
||||||
describe('1. V5 Query Payload with Filters', () => {
|
|
||||||
it('sends correct V5 payload structure with domain name filters', async () => {
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
|
||||||
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(GetMetricQueryRange).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
const [payload, version] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
|
||||||
|
|
||||||
// Verify it's using V5
|
|
||||||
expect(version).toBe(ENTITY_VERSION_V5);
|
|
||||||
|
|
||||||
// Verify time range
|
|
||||||
expect(payload.start).toBe(1758259531000);
|
|
||||||
expect(payload.end).toBe(1758261331000);
|
|
||||||
|
|
||||||
// Verify V3 payload structure (getDomainMetricsQueryPayload returns V3 format)
|
|
||||||
expect(payload.query).toBeDefined();
|
|
||||||
expect(payload.query.builder).toBeDefined();
|
|
||||||
expect(payload.query.builder.queryData).toBeDefined();
|
|
||||||
|
|
||||||
const queryData = payload.query.builder.queryData;
|
|
||||||
|
|
||||||
// Verify Query A - count with URL filter
|
|
||||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
|
||||||
expect(queryA).toBeDefined();
|
|
||||||
expect(queryA.dataSource).toBe('traces');
|
|
||||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
|
||||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
|
||||||
'count()',
|
|
||||||
);
|
|
||||||
// Verify exact domain filter expression structure
|
|
||||||
expect(queryA.filter.expression).toContain(
|
|
||||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
|
||||||
);
|
|
||||||
expect(queryA.filter.expression).toContain(
|
|
||||||
'url.full EXISTS OR http.url EXISTS',
|
|
||||||
);
|
|
||||||
|
|
||||||
// Verify Query B - p99 latency
|
|
||||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
|
||||||
expect(queryB).toBeDefined();
|
|
||||||
expect(queryB.aggregateOperator).toBe('p99');
|
|
||||||
expect(queryB.aggregations?.[0]).toBeDefined();
|
|
||||||
expect((queryB.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
|
||||||
'p99(duration_nano)',
|
|
||||||
);
|
|
||||||
// Verify exact domain filter expression structure
|
|
||||||
expect(queryB.filter.expression).toContain(
|
|
||||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Verify Query C - error count (disabled)
|
|
||||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
|
||||||
expect(queryC).toBeDefined();
|
|
||||||
expect(queryC.disabled).toBe(true);
|
|
||||||
expect(queryC.filter.expression).toContain(
|
|
||||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
|
||||||
);
|
|
||||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
|
||||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
|
||||||
'count()',
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(queryC.filter.expression).toContain('has_error = true');
|
|
||||||
|
|
||||||
// Verify Query D - max timestamp
|
|
||||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
|
||||||
expect(queryD).toBeDefined();
|
|
||||||
expect(queryD.aggregateOperator).toBe('max');
|
|
||||||
expect(queryD.aggregations?.[0]).toBeDefined();
|
|
||||||
expect((queryD.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
|
||||||
'max(timestamp)',
|
|
||||||
);
|
|
||||||
// Verify exact domain filter expression structure
|
|
||||||
expect(queryD.filter.expression).toContain(
|
|
||||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Verify Formula F1 - error rate calculation
|
|
||||||
const formulas = payload.query.builder.queryFormulas;
|
|
||||||
expect(formulas).toBeDefined();
|
|
||||||
expect(formulas.length).toBeGreaterThan(0);
|
|
||||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
|
||||||
expect(formulaF1).toBeDefined();
|
|
||||||
expect(formulaF1.expression).toBe('(C/A)*100');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('includes custom filters in filter expressions', async () => {
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
|
||||||
|
|
||||||
const customFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'test-1',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'my-service',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'test-2',
|
|
||||||
key: {
|
|
||||||
key: 'deployment.environment',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'production',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND' as const,
|
|
||||||
};
|
|
||||||
|
|
||||||
renderComponent({
|
|
||||||
...mockProps,
|
|
||||||
domainListFilters: customFilters,
|
|
||||||
});
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(GetMetricQueryRange).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
const [payload] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
|
||||||
const queryData = payload.query.builder.queryData;
|
|
||||||
|
|
||||||
// Verify all queries include the custom filters
|
|
||||||
queryData.forEach((query: any) => {
|
|
||||||
if (query.filter && query.filter.expression) {
|
|
||||||
expect(query.filter.expression).toContain('service.name');
|
|
||||||
expect(query.filter.expression).toContain('my-service');
|
|
||||||
expect(query.filter.expression).toContain('deployment.environment');
|
|
||||||
expect(query.filter.expression).toContain('production');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('2. Data Display State', () => {
|
|
||||||
it('displays metrics when data is successfully loaded', async () => {
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
|
||||||
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
// Wait for skeletons to disappear
|
|
||||||
await waitFor(() => {
|
|
||||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
|
||||||
expect(skeletons.length).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Verify all metric labels are displayed
|
|
||||||
expect(screen.getByText('EXTERNAL API')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
|
||||||
|
|
||||||
// Verify metric values are displayed
|
|
||||||
expect(screen.getByText('150')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('0.125s')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('3. Empty/Missing Data State', () => {
|
|
||||||
it('displays "-" for missing data values', async () => {
|
|
||||||
const emptyResponse = {
|
|
||||||
statusCode: 200,
|
|
||||||
error: null,
|
|
||||||
payload: {
|
|
||||||
data: {
|
|
||||||
result: [
|
|
||||||
{
|
|
||||||
table: {
|
|
||||||
rows: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(emptyResponse);
|
|
||||||
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
|
||||||
expect(skeletons.length).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
// When no data, all values should show "-"
|
|
||||||
const dashValues = screen.getAllByText('-');
|
|
||||||
expect(dashValues.length).toBeGreaterThan(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('4. Error State', () => {
|
|
||||||
it('displays error state when API call fails', async () => {
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockRejectedValue(new Error('API Error'));
|
|
||||||
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('retries API call when retry button is clicked', async () => {
|
|
||||||
let callCount = 0;
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockImplementation(() => {
|
|
||||||
callCount += 1;
|
|
||||||
if (callCount === 1) {
|
|
||||||
return Promise.reject(new Error('API Error'));
|
|
||||||
}
|
|
||||||
return Promise.resolve(mockSuccessResponse);
|
|
||||||
});
|
|
||||||
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
// Wait for error state
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Click retry
|
|
||||||
const retryButton = screen.getByTestId('retry-button');
|
|
||||||
retryButton.click();
|
|
||||||
|
|
||||||
// Wait for successful load
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByText('150')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(callCount).toBe(2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import { Color } from '@signozhq/design-tokens';
|
import { Color } from '@signozhq/design-tokens';
|
||||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
import {
|
import {
|
||||||
DomainMetricsResponseRow,
|
DomainMetricsResponseRow,
|
||||||
@@ -44,10 +44,10 @@ function DomainMetrics({
|
|||||||
queryKey: [
|
queryKey: [
|
||||||
REACT_QUERY_KEY.GET_DOMAIN_METRICS_DATA,
|
REACT_QUERY_KEY.GET_DOMAIN_METRICS_DATA,
|
||||||
payload,
|
payload,
|
||||||
ENTITY_VERSION_V5,
|
ENTITY_VERSION_V4,
|
||||||
],
|
],
|
||||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||||
enabled: !!payload,
|
enabled: !!payload,
|
||||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||||
})),
|
})),
|
||||||
@@ -132,9 +132,7 @@ function DomainMetrics({
|
|||||||
) : (
|
) : (
|
||||||
<Tooltip title={formattedDomainMetricsData.latency}>
|
<Tooltip title={formattedDomainMetricsData.latency}>
|
||||||
<span className="round-metric-tag">
|
<span className="round-metric-tag">
|
||||||
{formattedDomainMetricsData.latency !== '-'
|
{(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s
|
||||||
? `${(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s`
|
|
||||||
: '-'}
|
|
||||||
</span>
|
</span>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
@@ -145,27 +143,23 @@ function DomainMetrics({
|
|||||||
<Skeleton.Button active size="small" />
|
<Skeleton.Button active size="small" />
|
||||||
) : (
|
) : (
|
||||||
<Tooltip title={formattedDomainMetricsData.errorRate}>
|
<Tooltip title={formattedDomainMetricsData.errorRate}>
|
||||||
{formattedDomainMetricsData.errorRate !== '-' ? (
|
<Progress
|
||||||
<Progress
|
status="active"
|
||||||
status="active"
|
percent={Number(
|
||||||
percent={Number(
|
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||||
|
)}
|
||||||
|
strokeLinecap="butt"
|
||||||
|
size="small"
|
||||||
|
strokeColor={((): string => {
|
||||||
|
const errorRatePercent = Number(
|
||||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||||
)}
|
);
|
||||||
strokeLinecap="butt"
|
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||||
size="small"
|
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||||
strokeColor={((): string => {
|
return Color.BG_FOREST_500;
|
||||||
const errorRatePercent = Number(
|
})()}
|
||||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
className="progress-bar"
|
||||||
);
|
/>
|
||||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
|
||||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
|
||||||
return Color.BG_FOREST_500;
|
|
||||||
})()}
|
|
||||||
className="progress-bar"
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
'-'
|
|
||||||
)}
|
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
|
|||||||
@@ -1,419 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable react/jsx-props-no-spreading */
|
|
||||||
/* eslint-disable prefer-destructuring */
|
|
||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
|
||||||
import { render, screen, waitFor } from '@testing-library/react';
|
|
||||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
|
||||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
|
||||||
import { QueryClient, QueryClientProvider, UseQueryResult } from 'react-query';
|
|
||||||
import { SuccessResponse } from 'types/api';
|
|
||||||
|
|
||||||
import EndPointMetrics from './EndPointMetrics';
|
|
||||||
|
|
||||||
// Mock the API call
|
|
||||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
|
||||||
GetMetricQueryRange: jest.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock ErrorState component
|
|
||||||
jest.mock('./ErrorState', () => ({
|
|
||||||
__esModule: true,
|
|
||||||
default: jest.fn(({ refetch }) => (
|
|
||||||
<div data-testid="error-state">
|
|
||||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
|
||||||
Retry
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
)),
|
|
||||||
}));
|
|
||||||
|
|
||||||
describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
|
||||||
let queryClient: QueryClient;
|
|
||||||
|
|
||||||
const mockSuccessResponse = {
|
|
||||||
statusCode: 200,
|
|
||||||
error: null,
|
|
||||||
payload: {
|
|
||||||
data: {
|
|
||||||
result: [
|
|
||||||
{
|
|
||||||
table: {
|
|
||||||
rows: [
|
|
||||||
{
|
|
||||||
data: {
|
|
||||||
A: '85.5',
|
|
||||||
B: '245000000',
|
|
||||||
D: '2021-01-01T22:30:00Z',
|
|
||||||
F1: '3.2',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
queryClient = new QueryClient({
|
|
||||||
defaultOptions: {
|
|
||||||
queries: {
|
|
||||||
retry: false,
|
|
||||||
cacheTime: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
jest.clearAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
queryClient.clear();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Helper to create mock query result
|
|
||||||
const createMockQueryResult = (
|
|
||||||
response: any,
|
|
||||||
overrides?: Partial<UseQueryResult<SuccessResponse<any>, unknown>>,
|
|
||||||
): UseQueryResult<SuccessResponse<any>, unknown> =>
|
|
||||||
({
|
|
||||||
data: response,
|
|
||||||
error: null,
|
|
||||||
isError: false,
|
|
||||||
isIdle: false,
|
|
||||||
isLoading: false,
|
|
||||||
isLoadingError: false,
|
|
||||||
isRefetchError: false,
|
|
||||||
isRefetching: false,
|
|
||||||
isStale: true,
|
|
||||||
isSuccess: true,
|
|
||||||
status: 'success' as const,
|
|
||||||
dataUpdatedAt: Date.now(),
|
|
||||||
errorUpdateCount: 0,
|
|
||||||
errorUpdatedAt: 0,
|
|
||||||
failureCount: 0,
|
|
||||||
isFetched: true,
|
|
||||||
isFetchedAfterMount: true,
|
|
||||||
isFetching: false,
|
|
||||||
isPlaceholderData: false,
|
|
||||||
isPreviousData: false,
|
|
||||||
refetch: jest.fn(),
|
|
||||||
remove: jest.fn(),
|
|
||||||
...overrides,
|
|
||||||
} as UseQueryResult<SuccessResponse<any>, unknown>);
|
|
||||||
|
|
||||||
const renderComponent = (
|
|
||||||
endPointMetricsDataQuery: UseQueryResult<SuccessResponse<any>, unknown>,
|
|
||||||
): ReturnType<typeof render> =>
|
|
||||||
render(
|
|
||||||
<QueryClientProvider client={queryClient}>
|
|
||||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
|
||||||
</QueryClientProvider>,
|
|
||||||
);
|
|
||||||
|
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
|
||||||
describe('1. V5 Query Payload with Filters', () => {
|
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
|
||||||
it('sends correct V5 payload structure with domain and endpoint filters', async () => {
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
|
||||||
|
|
||||||
const domainName = 'api.example.com';
|
|
||||||
const startTime = 1758259531000;
|
|
||||||
const endTime = 1758261331000;
|
|
||||||
const filters = {
|
|
||||||
items: [],
|
|
||||||
op: 'AND' as const,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Get the actual payload that would be generated
|
|
||||||
const payloads = getEndPointDetailsQueryPayload(
|
|
||||||
domainName,
|
|
||||||
startTime,
|
|
||||||
endTime,
|
|
||||||
filters,
|
|
||||||
);
|
|
||||||
|
|
||||||
// First payload is for endpoint metrics
|
|
||||||
const metricsPayload = payloads[0];
|
|
||||||
|
|
||||||
// Verify it's using the correct structure (V3 format for V5 API)
|
|
||||||
expect(metricsPayload.query).toBeDefined();
|
|
||||||
expect(metricsPayload.query.builder).toBeDefined();
|
|
||||||
expect(metricsPayload.query.builder.queryData).toBeDefined();
|
|
||||||
|
|
||||||
const queryData = metricsPayload.query.builder.queryData;
|
|
||||||
|
|
||||||
// Verify Query A - rate with domain and client kind filters
|
|
||||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
|
||||||
expect(queryA).toBeDefined();
|
|
||||||
if (queryA) {
|
|
||||||
expect(queryA.dataSource).toBe('traces');
|
|
||||||
expect(queryA.aggregateOperator).toBe('rate');
|
|
||||||
expect(queryA.timeAggregation).toBe('rate');
|
|
||||||
// Verify exact domain filter expression structure
|
|
||||||
if (queryA.filter) {
|
|
||||||
expect(queryA.filter.expression).toContain(
|
|
||||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
|
||||||
);
|
|
||||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify Query B - p99 latency with duration_nano
|
|
||||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
|
||||||
expect(queryB).toBeDefined();
|
|
||||||
if (queryB) {
|
|
||||||
expect(queryB.aggregateOperator).toBe('p99');
|
|
||||||
if (queryB.aggregateAttribute) {
|
|
||||||
expect(queryB.aggregateAttribute.key).toBe('duration_nano');
|
|
||||||
}
|
|
||||||
expect(queryB.timeAggregation).toBe('p99');
|
|
||||||
// Verify exact domain filter expression structure
|
|
||||||
if (queryB.filter) {
|
|
||||||
expect(queryB.filter.expression).toContain(
|
|
||||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
|
||||||
);
|
|
||||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify Query C - error count (disabled)
|
|
||||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
|
||||||
expect(queryC).toBeDefined();
|
|
||||||
if (queryC) {
|
|
||||||
expect(queryC.disabled).toBe(true);
|
|
||||||
expect(queryC.aggregateOperator).toBe('count');
|
|
||||||
if (queryC.filter) {
|
|
||||||
expect(queryC.filter.expression).toContain(
|
|
||||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
|
||||||
);
|
|
||||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
|
||||||
expect(queryC.filter.expression).toContain('has_error = true');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify Query D - max timestamp for last used
|
|
||||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
|
||||||
expect(queryD).toBeDefined();
|
|
||||||
if (queryD) {
|
|
||||||
expect(queryD.aggregateOperator).toBe('max');
|
|
||||||
if (queryD.aggregateAttribute) {
|
|
||||||
expect(queryD.aggregateAttribute.key).toBe('timestamp');
|
|
||||||
}
|
|
||||||
expect(queryD.timeAggregation).toBe('max');
|
|
||||||
// Verify exact domain filter expression structure
|
|
||||||
if (queryD.filter) {
|
|
||||||
expect(queryD.filter.expression).toContain(
|
|
||||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
|
||||||
);
|
|
||||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify Query E - total count (disabled)
|
|
||||||
const queryE = queryData.find((q: any) => q.queryName === 'E');
|
|
||||||
expect(queryE).toBeDefined();
|
|
||||||
if (queryE) {
|
|
||||||
expect(queryE.disabled).toBe(true);
|
|
||||||
expect(queryE.aggregateOperator).toBe('count');
|
|
||||||
if (queryE.aggregateAttribute) {
|
|
||||||
expect(queryE.aggregateAttribute.key).toBe('span_id');
|
|
||||||
}
|
|
||||||
if (queryE.filter) {
|
|
||||||
expect(queryE.filter.expression).toContain(
|
|
||||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
|
||||||
);
|
|
||||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify Formula F1 - error rate calculation
|
|
||||||
const formulas = metricsPayload.query.builder.queryFormulas;
|
|
||||||
expect(formulas).toBeDefined();
|
|
||||||
expect(formulas.length).toBeGreaterThan(0);
|
|
||||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
|
||||||
expect(formulaF1).toBeDefined();
|
|
||||||
if (formulaF1) {
|
|
||||||
expect(formulaF1.expression).toBe('(C/E)*100');
|
|
||||||
expect(formulaF1.disabled).toBe(false);
|
|
||||||
expect(formulaF1.legend).toBe('error percentage');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
it('includes custom domainListFilters in all query expressions', async () => {
|
|
||||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
|
||||||
|
|
||||||
const customFilters = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'test-1',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'payment-service',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'test-2',
|
|
||||||
key: {
|
|
||||||
key: 'deployment.environment',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'staging',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND' as const,
|
|
||||||
};
|
|
||||||
|
|
||||||
const payloads = getEndPointDetailsQueryPayload(
|
|
||||||
'api.internal.com',
|
|
||||||
1758259531000,
|
|
||||||
1758261331000,
|
|
||||||
customFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const queryData = payloads[0].query.builder.queryData;
|
|
||||||
|
|
||||||
// Verify ALL queries (A, B, C, D, E) include the custom filters
|
|
||||||
const allQueryNames = ['A', 'B', 'C', 'D', 'E'];
|
|
||||||
allQueryNames.forEach((queryName) => {
|
|
||||||
const query = queryData.find((q: any) => q.queryName === queryName);
|
|
||||||
expect(query).toBeDefined();
|
|
||||||
if (query && query.filter && query.filter.expression) {
|
|
||||||
// Check for exact filter inclusion
|
|
||||||
expect(query.filter.expression).toContain('service.name');
|
|
||||||
expect(query.filter.expression).toContain('payment-service');
|
|
||||||
expect(query.filter.expression).toContain('deployment.environment');
|
|
||||||
expect(query.filter.expression).toContain('staging');
|
|
||||||
// Also verify domain filter is still present
|
|
||||||
expect(query.filter.expression).toContain(
|
|
||||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
|
||||||
);
|
|
||||||
// Verify client kind filter is present
|
|
||||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('2. Data Display State', () => {
|
|
||||||
it('displays metrics when data is successfully loaded', async () => {
|
|
||||||
const mockQuery = createMockQueryResult(mockSuccessResponse);
|
|
||||||
|
|
||||||
renderComponent(mockQuery);
|
|
||||||
|
|
||||||
// Wait for skeletons to disappear
|
|
||||||
await waitFor(() => {
|
|
||||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
|
||||||
expect(skeletons.length).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Verify all metric labels are displayed
|
|
||||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
|
||||||
|
|
||||||
// Verify metric values are displayed
|
|
||||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('245ms')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('3. Empty/Missing Data State', () => {
|
|
||||||
it("displays '-' for missing data values", async () => {
|
|
||||||
const emptyResponse = {
|
|
||||||
statusCode: 200,
|
|
||||||
error: null,
|
|
||||||
payload: {
|
|
||||||
data: {
|
|
||||||
result: [
|
|
||||||
{
|
|
||||||
table: {
|
|
||||||
rows: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockQuery = createMockQueryResult(emptyResponse);
|
|
||||||
|
|
||||||
renderComponent(mockQuery);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
|
||||||
expect(skeletons.length).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
// When no data, all values should show "-"
|
|
||||||
const dashValues = screen.getAllByText('-');
|
|
||||||
// Should have at least 2 dashes (rate and last used - latency shows "-", error % shows progress bar)
|
|
||||||
expect(dashValues.length).toBeGreaterThanOrEqual(2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('4. Error State', () => {
|
|
||||||
it('displays error state when API call fails', async () => {
|
|
||||||
const mockQuery = createMockQueryResult(null, {
|
|
||||||
isError: true,
|
|
||||||
isSuccess: false,
|
|
||||||
status: 'error',
|
|
||||||
error: new Error('API Error'),
|
|
||||||
});
|
|
||||||
|
|
||||||
renderComponent(mockQuery);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('retries API call when retry button is clicked', async () => {
|
|
||||||
const refetch = jest.fn().mockResolvedValue(mockSuccessResponse);
|
|
||||||
|
|
||||||
// Start with error state
|
|
||||||
const mockQuery = createMockQueryResult(null, {
|
|
||||||
isError: true,
|
|
||||||
isSuccess: false,
|
|
||||||
status: 'error',
|
|
||||||
error: new Error('API Error'),
|
|
||||||
refetch,
|
|
||||||
});
|
|
||||||
|
|
||||||
const { rerender } = renderComponent(mockQuery);
|
|
||||||
|
|
||||||
// Wait for error state
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Click retry
|
|
||||||
const retryButton = screen.getByTestId('retry-button');
|
|
||||||
retryButton.click();
|
|
||||||
|
|
||||||
// Verify refetch was called
|
|
||||||
expect(refetch).toHaveBeenCalledTimes(1);
|
|
||||||
|
|
||||||
// Simulate successful refetch by rerendering with success state
|
|
||||||
const successQuery = createMockQueryResult(mockSuccessResponse);
|
|
||||||
rerender(
|
|
||||||
<QueryClientProvider client={queryClient}>
|
|
||||||
<EndPointMetrics endPointMetricsDataQuery={successQuery} />
|
|
||||||
</QueryClientProvider>,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Wait for successful load
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,16 +1,12 @@
|
|||||||
import { Color } from '@signozhq/design-tokens';
|
import { Color } from '@signozhq/design-tokens';
|
||||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||||
import {
|
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||||
getDisplayValue,
|
|
||||||
getFormattedEndPointMetricsData,
|
|
||||||
} from 'container/ApiMonitoring/utils';
|
|
||||||
import { useMemo } from 'react';
|
import { useMemo } from 'react';
|
||||||
import { UseQueryResult } from 'react-query';
|
import { UseQueryResult } from 'react-query';
|
||||||
import { SuccessResponse } from 'types/api';
|
import { SuccessResponse } from 'types/api';
|
||||||
|
|
||||||
import ErrorState from './ErrorState';
|
import ErrorState from './ErrorState';
|
||||||
|
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
|
||||||
function EndPointMetrics({
|
function EndPointMetrics({
|
||||||
endPointMetricsDataQuery,
|
endPointMetricsDataQuery,
|
||||||
}: {
|
}: {
|
||||||
@@ -74,9 +70,7 @@ function EndPointMetrics({
|
|||||||
<Skeleton.Button active size="small" />
|
<Skeleton.Button active size="small" />
|
||||||
) : (
|
) : (
|
||||||
<Tooltip title={metricsData?.rate}>
|
<Tooltip title={metricsData?.rate}>
|
||||||
<span className="round-metric-tag">
|
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||||
{metricsData?.rate !== '-' ? `${metricsData?.rate} ops/sec` : '-'}
|
|
||||||
</span>
|
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
@@ -85,7 +79,7 @@ function EndPointMetrics({
|
|||||||
<Skeleton.Button active size="small" />
|
<Skeleton.Button active size="small" />
|
||||||
) : (
|
) : (
|
||||||
<Tooltip title={metricsData?.latency}>
|
<Tooltip title={metricsData?.latency}>
|
||||||
{metricsData?.latency !== '-' ? `${metricsData?.latency}ms` : '-'}
|
<span className="round-metric-tag">{metricsData?.latency}ms</span>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
@@ -94,25 +88,21 @@ function EndPointMetrics({
|
|||||||
<Skeleton.Button active size="small" />
|
<Skeleton.Button active size="small" />
|
||||||
) : (
|
) : (
|
||||||
<Tooltip title={metricsData?.errorRate}>
|
<Tooltip title={metricsData?.errorRate}>
|
||||||
{metricsData?.errorRate !== '-' ? (
|
<Progress
|
||||||
<Progress
|
status="active"
|
||||||
status="active"
|
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
strokeLinecap="butt"
|
||||||
strokeLinecap="butt"
|
size="small"
|
||||||
size="small"
|
strokeColor={((): string => {
|
||||||
strokeColor={((): string => {
|
const errorRatePercent = Number(
|
||||||
const errorRatePercent = Number(
|
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
);
|
||||||
);
|
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
return Color.BG_FOREST_500;
|
||||||
return Color.BG_FOREST_500;
|
})()}
|
||||||
})()}
|
className="progress-bar"
|
||||||
className="progress-bar"
|
/>
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
'-'
|
|
||||||
)}
|
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
@@ -120,9 +110,7 @@ function EndPointMetrics({
|
|||||||
{isLoading || isRefetching ? (
|
{isLoading || isRefetching ? (
|
||||||
<Skeleton.Button active size="small" />
|
<Skeleton.Button active size="small" />
|
||||||
) : (
|
) : (
|
||||||
<Tooltip title={metricsData?.lastUsed}>
|
<Tooltip title={metricsData?.lastUsed}>{metricsData?.lastUsed}</Tooltip>
|
||||||
{getDisplayValue(metricsData?.lastUsed)}
|
|
||||||
</Tooltip>
|
|
||||||
)}
|
)}
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { Card } from 'antd';
|
import { Card } from 'antd';
|
||||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
|
||||||
import GridCard from 'container/GridCardLayout/GridCard';
|
import GridCard from 'container/GridCardLayout/GridCard';
|
||||||
import { Widgets } from 'types/api/dashboard/getAll';
|
import { Widgets } from 'types/api/dashboard/getAll';
|
||||||
|
|
||||||
@@ -23,7 +22,6 @@ function MetricOverTimeGraph({
|
|||||||
customOnDragSelect={(): void => {}}
|
customOnDragSelect={(): void => {}}
|
||||||
customTimeRange={timeRange}
|
customTimeRange={timeRange}
|
||||||
customTimeRangeWindowForCoRelation="5m"
|
customTimeRangeWindowForCoRelation="5m"
|
||||||
version={ENTITY_VERSION_V5}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</Card>
|
</Card>
|
||||||
|
|||||||
@@ -8,11 +8,17 @@ import {
|
|||||||
endPointStatusCodeColumns,
|
endPointStatusCodeColumns,
|
||||||
extractPortAndEndpoint,
|
extractPortAndEndpoint,
|
||||||
formatDataForTable,
|
formatDataForTable,
|
||||||
|
getAllEndpointsWidgetData,
|
||||||
getCustomFiltersForBarChart,
|
getCustomFiltersForBarChart,
|
||||||
|
getEndPointDetailsQueryPayload,
|
||||||
|
getFormattedDependentServicesData,
|
||||||
getFormattedEndPointDropDownData,
|
getFormattedEndPointDropDownData,
|
||||||
|
getFormattedEndPointMetricsData,
|
||||||
getFormattedEndPointStatusCodeChartData,
|
getFormattedEndPointStatusCodeChartData,
|
||||||
getFormattedEndPointStatusCodeData,
|
getFormattedEndPointStatusCodeData,
|
||||||
getGroupByFiltersFromGroupByValues,
|
getGroupByFiltersFromGroupByValues,
|
||||||
|
getLatencyOverTimeWidgetData,
|
||||||
|
getRateOverTimeWidgetData,
|
||||||
getStatusCodeBarChartWidgetData,
|
getStatusCodeBarChartWidgetData,
|
||||||
getTopErrorsColumnsConfig,
|
getTopErrorsColumnsConfig,
|
||||||
getTopErrorsCoRelationQueryFilters,
|
getTopErrorsCoRelationQueryFilters,
|
||||||
@@ -43,13 +49,119 @@ jest.mock('../utils', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('API Monitoring Utils', () => {
|
describe('API Monitoring Utils', () => {
|
||||||
|
describe('getAllEndpointsWidgetData', () => {
|
||||||
|
it('should create a widget with correct configuration', () => {
|
||||||
|
// Arrange
|
||||||
|
const groupBy = [
|
||||||
|
{
|
||||||
|
dataType: DataTypes.String,
|
||||||
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
|
key: 'http.method',
|
||||||
|
type: '',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const filters = {
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
|
id: 'test-filter',
|
||||||
|
key: {
|
||||||
|
dataType: DataTypes.String,
|
||||||
|
key: 'test-key',
|
||||||
|
type: '',
|
||||||
|
},
|
||||||
|
op: '=',
|
||||||
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
|
value: 'test-value',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
op: 'AND',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getAllEndpointsWidgetData(
|
||||||
|
groupBy as BaseAutocompleteData[],
|
||||||
|
domainName,
|
||||||
|
filters as IBuilderQuery['filters'],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.id).toBeDefined();
|
||||||
|
// Title is a React component, not a string
|
||||||
|
expect(result.title).toBeDefined();
|
||||||
|
expect(result.panelTypes).toBe(PANEL_TYPES.TABLE);
|
||||||
|
|
||||||
|
// Check that each query includes the domainName filter
|
||||||
|
result.query.builder.queryData.forEach((query) => {
|
||||||
|
const serverNameFilter = query.filters?.items?.find(
|
||||||
|
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||||
|
);
|
||||||
|
expect(serverNameFilter).toBeDefined();
|
||||||
|
expect(serverNameFilter?.value).toBe(domainName);
|
||||||
|
|
||||||
|
// Check that the custom filters were included
|
||||||
|
const testFilter = query.filters?.items?.find(
|
||||||
|
(item) => item.id === 'test-filter',
|
||||||
|
);
|
||||||
|
expect(testFilter).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify groupBy was included in queries
|
||||||
|
if (result.query.builder.queryData[0].groupBy) {
|
||||||
|
const hasCustomGroupBy = result.query.builder.queryData[0].groupBy.some(
|
||||||
|
(item) => item && item.key === 'http.method',
|
||||||
|
);
|
||||||
|
expect(hasCustomGroupBy).toBe(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty groupBy correctly', () => {
|
||||||
|
// Arrange
|
||||||
|
const groupBy: any[] = [];
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const filters = { items: [], op: 'AND' };
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getAllEndpointsWidgetData(groupBy, domainName, filters);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
// Should only include default groupBy
|
||||||
|
if (result.query.builder.queryData[0].groupBy) {
|
||||||
|
expect(result.query.builder.queryData[0].groupBy.length).toBeGreaterThan(0);
|
||||||
|
// Check that it doesn't have extra group by fields (only defaults)
|
||||||
|
const defaultGroupByLength =
|
||||||
|
result.query.builder.queryData[0].groupBy.length;
|
||||||
|
const resultWithCustomGroupBy = getAllEndpointsWidgetData(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
dataType: DataTypes.String,
|
||||||
|
key: 'custom.field',
|
||||||
|
type: '',
|
||||||
|
},
|
||||||
|
] as BaseAutocompleteData[],
|
||||||
|
domainName,
|
||||||
|
filters,
|
||||||
|
);
|
||||||
|
// Custom groupBy should have more fields than default
|
||||||
|
if (resultWithCustomGroupBy.query.builder.queryData[0].groupBy) {
|
||||||
|
expect(
|
||||||
|
resultWithCustomGroupBy.query.builder.queryData[0].groupBy.length,
|
||||||
|
).toBeGreaterThan(defaultGroupByLength);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// New tests for formatDataForTable
|
// New tests for formatDataForTable
|
||||||
describe('formatDataForTable', () => {
|
describe('formatDataForTable', () => {
|
||||||
it('should format rows correctly with valid data', () => {
|
it('should format rows correctly with valid data', () => {
|
||||||
const columns = APIMonitoringColumnsMock;
|
const columns = APIMonitoringColumnsMock;
|
||||||
const data = [
|
const data = [
|
||||||
[
|
[
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
|
||||||
'test-domain', // domainName
|
'test-domain', // domainName
|
||||||
'10', // endpoints
|
'10', // endpoints
|
||||||
'25', // rps
|
'25', // rps
|
||||||
@@ -107,7 +219,6 @@ describe('API Monitoring Utils', () => {
|
|||||||
const groupBy = [
|
const groupBy = [
|
||||||
{
|
{
|
||||||
id: 'group-by-1',
|
id: 'group-by-1',
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
|
||||||
key: 'http.method',
|
key: 'http.method',
|
||||||
dataType: DataTypes.String,
|
dataType: DataTypes.String,
|
||||||
type: '',
|
type: '',
|
||||||
@@ -341,6 +452,243 @@ describe('API Monitoring Utils', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('getEndPointDetailsQueryPayload', () => {
|
||||||
|
it('should generate proper query payload with all parameters', () => {
|
||||||
|
// Arrange
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const startTime = 1609459200000; // 2021-01-01
|
||||||
|
const endTime = 1609545600000; // 2021-01-02
|
||||||
|
const filters = {
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
id: 'test-filter',
|
||||||
|
key: {
|
||||||
|
dataType: 'string',
|
||||||
|
key: 'test.key',
|
||||||
|
type: '',
|
||||||
|
},
|
||||||
|
op: '=',
|
||||||
|
value: 'test-value',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
op: 'AND',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getEndPointDetailsQueryPayload(
|
||||||
|
domainName,
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
filters as IBuilderQuery['filters'],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toHaveLength(6); // Should return 6 queries
|
||||||
|
|
||||||
|
// Check that each query includes proper parameters
|
||||||
|
result.forEach((query) => {
|
||||||
|
expect(query).toHaveProperty('start', startTime);
|
||||||
|
expect(query).toHaveProperty('end', endTime);
|
||||||
|
|
||||||
|
// Should have query property with builder data
|
||||||
|
expect(query).toHaveProperty('query');
|
||||||
|
expect(query.query).toHaveProperty('builder');
|
||||||
|
|
||||||
|
// All queries should include the domain filter
|
||||||
|
const {
|
||||||
|
query: {
|
||||||
|
builder: { queryData },
|
||||||
|
},
|
||||||
|
} = query;
|
||||||
|
queryData.forEach((qd) => {
|
||||||
|
if (qd.filters && qd.filters.items) {
|
||||||
|
const serverNameFilter = qd.filters?.items?.find(
|
||||||
|
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||||
|
);
|
||||||
|
expect(serverNameFilter).toBeDefined();
|
||||||
|
// Only check if the serverNameFilter exists, as the actual value might vary
|
||||||
|
// depending on implementation details or domain defaults
|
||||||
|
if (serverNameFilter) {
|
||||||
|
expect(typeof serverNameFilter.value).toBe('string');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should include our custom filter
|
||||||
|
const customFilter = qd.filters?.items?.find(
|
||||||
|
(item) => item.id === 'test-filter',
|
||||||
|
);
|
||||||
|
expect(customFilter).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getRateOverTimeWidgetData', () => {
|
||||||
|
it('should generate widget configuration for rate over time', () => {
|
||||||
|
// Arrange
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const endPointName = '/api/test';
|
||||||
|
const filters = { items: [], op: 'AND' };
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getRateOverTimeWidgetData(
|
||||||
|
domainName,
|
||||||
|
endPointName,
|
||||||
|
filters as IBuilderQuery['filters'],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result).toHaveProperty('title', 'Rate Over Time');
|
||||||
|
// Check only title since description might vary
|
||||||
|
|
||||||
|
// Check query configuration
|
||||||
|
expect(result).toHaveProperty('query');
|
||||||
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
|
expect(result).toHaveProperty('query.builder.queryData');
|
||||||
|
|
||||||
|
const queryData = result.query.builder.queryData[0];
|
||||||
|
|
||||||
|
// Should have domain filter
|
||||||
|
const domainFilter = queryData.filters?.items?.find(
|
||||||
|
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||||
|
);
|
||||||
|
expect(domainFilter).toBeDefined();
|
||||||
|
if (domainFilter) {
|
||||||
|
expect(typeof domainFilter.value).toBe('string');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should have 'rate' time aggregation
|
||||||
|
expect(queryData).toHaveProperty('timeAggregation', 'rate');
|
||||||
|
|
||||||
|
// Should have proper legend that includes endpoint info
|
||||||
|
expect(queryData).toHaveProperty('legend');
|
||||||
|
expect(
|
||||||
|
typeof queryData.legend === 'string' ? queryData.legend : '',
|
||||||
|
).toContain('/api/test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle case without endpoint name', () => {
|
||||||
|
// Arrange
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const endPointName = '';
|
||||||
|
const filters = { items: [], op: 'AND' };
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getRateOverTimeWidgetData(
|
||||||
|
domainName,
|
||||||
|
endPointName,
|
||||||
|
filters as IBuilderQuery['filters'],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
|
||||||
|
const queryData = result.query.builder.queryData[0];
|
||||||
|
|
||||||
|
// Legend should be domain name only
|
||||||
|
expect(queryData).toHaveProperty('legend', domainName);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getLatencyOverTimeWidgetData', () => {
|
||||||
|
it('should generate widget configuration for latency over time', () => {
|
||||||
|
// Arrange
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const endPointName = '/api/test';
|
||||||
|
const filters = { items: [], op: 'AND' };
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getLatencyOverTimeWidgetData(
|
||||||
|
domainName,
|
||||||
|
endPointName,
|
||||||
|
filters as IBuilderQuery['filters'],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result).toHaveProperty('title', 'Latency Over Time');
|
||||||
|
// Check only title since description might vary
|
||||||
|
|
||||||
|
// Check query configuration
|
||||||
|
expect(result).toHaveProperty('query');
|
||||||
|
expect(result).toHaveProperty('query.builder.queryData');
|
||||||
|
|
||||||
|
const queryData = result.query.builder.queryData[0];
|
||||||
|
|
||||||
|
// Should have domain filter
|
||||||
|
const domainFilter = queryData.filters?.items?.find(
|
||||||
|
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||||
|
);
|
||||||
|
expect(domainFilter).toBeDefined();
|
||||||
|
if (domainFilter) {
|
||||||
|
expect(typeof domainFilter.value).toBe('string');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should use duration_nano as the aggregate attribute
|
||||||
|
expect(queryData.aggregateAttribute).toHaveProperty('key', 'duration_nano');
|
||||||
|
|
||||||
|
// Should have 'p99' time aggregation
|
||||||
|
expect(queryData).toHaveProperty('timeAggregation', 'p99');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle case without endpoint name', () => {
|
||||||
|
// Arrange
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const endPointName = '';
|
||||||
|
const filters = { items: [], op: 'AND' };
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getLatencyOverTimeWidgetData(
|
||||||
|
domainName,
|
||||||
|
endPointName,
|
||||||
|
filters as IBuilderQuery['filters'],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
|
||||||
|
const queryData = result.query.builder.queryData[0];
|
||||||
|
|
||||||
|
// Legend should be domain name only
|
||||||
|
expect(queryData).toHaveProperty('legend', domainName);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Changed approach to verify end-to-end behavior for URL with port
|
||||||
|
it('should format legends appropriately for complete URLs with ports', () => {
|
||||||
|
// Arrange
|
||||||
|
const domainName = 'test-domain';
|
||||||
|
const endPointName = 'http://example.com:8080/api/test';
|
||||||
|
const filters = { items: [], op: 'AND' };
|
||||||
|
|
||||||
|
// Extract what we expect the function to extract
|
||||||
|
const expectedParts = extractPortAndEndpoint(endPointName);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getLatencyOverTimeWidgetData(
|
||||||
|
domainName,
|
||||||
|
endPointName,
|
||||||
|
filters as IBuilderQuery['filters'],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
const queryData = result.query.builder.queryData[0];
|
||||||
|
|
||||||
|
// Check that legend is present and is a string
|
||||||
|
expect(queryData).toHaveProperty('legend');
|
||||||
|
expect(typeof queryData.legend).toBe('string');
|
||||||
|
|
||||||
|
// If the URL has a port and endpoint, the legend should reflect that appropriately
|
||||||
|
// (Testing the integration rather than the exact formatting)
|
||||||
|
if (expectedParts.port !== '-') {
|
||||||
|
// Verify that both components are incorporated into the legend in some way
|
||||||
|
// This tests the behavior without relying on the exact implementation details
|
||||||
|
const legendStr = queryData.legend as string;
|
||||||
|
expect(legendStr).not.toBe(domainName); // Legend should be different when URL has port/endpoint
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('getFormattedEndPointDropDownData', () => {
|
describe('getFormattedEndPointDropDownData', () => {
|
||||||
it('should format endpoint dropdown data correctly', () => {
|
it('should format endpoint dropdown data correctly', () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
@@ -350,7 +698,6 @@ describe('API Monitoring Utils', () => {
|
|||||||
data: {
|
data: {
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
[URL_PATH_KEY]: '/api/users',
|
[URL_PATH_KEY]: '/api/users',
|
||||||
'url.full': 'http://example.com/api/users',
|
|
||||||
A: 150, // count or other metric
|
A: 150, // count or other metric
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -358,7 +705,6 @@ describe('API Monitoring Utils', () => {
|
|||||||
data: {
|
data: {
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
[URL_PATH_KEY]: '/api/orders',
|
[URL_PATH_KEY]: '/api/orders',
|
||||||
'url.full': 'http://example.com/api/orders',
|
|
||||||
A: 75,
|
A: 75,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -442,6 +788,87 @@ describe('API Monitoring Utils', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('getFormattedEndPointMetricsData', () => {
|
||||||
|
it('should format endpoint metrics data correctly', () => {
|
||||||
|
// Arrange
|
||||||
|
const mockData = [
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: '50', // rate
|
||||||
|
B: '15000000', // latency in nanoseconds
|
||||||
|
C: '5', // required by type
|
||||||
|
D: '1640995200000000', // timestamp in nanoseconds
|
||||||
|
F1: '5.5', // error rate
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.key).toBeDefined();
|
||||||
|
expect(result.rate).toBe('50');
|
||||||
|
expect(result.latency).toBe(15); // Should be converted from ns to ms
|
||||||
|
expect(result.errorRate).toBe(5.5);
|
||||||
|
expect(typeof result.lastUsed).toBe('string'); // Time formatting is tested elsewhere
|
||||||
|
});
|
||||||
|
|
||||||
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
|
it('should handle undefined values in data', () => {
|
||||||
|
// Arrange
|
||||||
|
const mockData = [
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
A: undefined,
|
||||||
|
B: 'n/a',
|
||||||
|
C: '', // required by type
|
||||||
|
D: undefined,
|
||||||
|
F1: 'n/a',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.rate).toBe('-');
|
||||||
|
expect(result.latency).toBe('-');
|
||||||
|
expect(result.errorRate).toBe(0);
|
||||||
|
expect(result.lastUsed).toBe('-');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty input array', () => {
|
||||||
|
// Act
|
||||||
|
const result = getFormattedEndPointMetricsData([]);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.rate).toBe('-');
|
||||||
|
expect(result.latency).toBe('-');
|
||||||
|
expect(result.errorRate).toBe(0);
|
||||||
|
expect(result.lastUsed).toBe('-');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle undefined input', () => {
|
||||||
|
// Arrange
|
||||||
|
const undefinedInput = undefined as any;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getFormattedEndPointMetricsData(undefinedInput);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.rate).toBe('-');
|
||||||
|
expect(result.latency).toBe('-');
|
||||||
|
expect(result.errorRate).toBe(0);
|
||||||
|
expect(result.lastUsed).toBe('-');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('getFormattedEndPointStatusCodeData', () => {
|
describe('getFormattedEndPointStatusCodeData', () => {
|
||||||
it('should format status code data correctly', () => {
|
it('should format status code data correctly', () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
@@ -578,6 +1005,139 @@ describe('API Monitoring Utils', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('getFormattedDependentServicesData', () => {
|
||||||
|
it('should format dependent services data correctly', () => {
|
||||||
|
// Arrange
|
||||||
|
const mockData = [
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||||
|
'service.name': 'auth-service',
|
||||||
|
A: '500', // count
|
||||||
|
B: '120000000', // latency in nanoseconds
|
||||||
|
C: '15', // rate
|
||||||
|
F1: '2.5', // error percentage
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
'service.name': 'db-service',
|
||||||
|
A: '300',
|
||||||
|
B: '80000000',
|
||||||
|
C: '10',
|
||||||
|
F1: '1.2',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getFormattedDependentServicesData(mockData as any);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.length).toBe(2);
|
||||||
|
|
||||||
|
// Check first service
|
||||||
|
expect(result[0].key).toBeDefined();
|
||||||
|
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||||
|
expect(result[0].serviceData.count).toBe(500);
|
||||||
|
expect(typeof result[0].serviceData.percentage).toBe('number');
|
||||||
|
expect(result[0].latency).toBe(120); // Should be converted from ns to ms
|
||||||
|
expect(result[0].rate).toBe('15');
|
||||||
|
expect(result[0].errorPercentage).toBe('2.5');
|
||||||
|
|
||||||
|
// Check second service
|
||||||
|
expect(result[1].serviceData.serviceName).toBe('db-service');
|
||||||
|
expect(result[1].serviceData.count).toBe(300);
|
||||||
|
expect(result[1].latency).toBe(80);
|
||||||
|
expect(result[1].rate).toBe('10');
|
||||||
|
expect(result[1].errorPercentage).toBe('1.2');
|
||||||
|
|
||||||
|
// Verify percentage calculation
|
||||||
|
const totalCount = 500 + 300;
|
||||||
|
expect(result[0].serviceData.percentage).toBeCloseTo(
|
||||||
|
(500 / totalCount) * 100,
|
||||||
|
2,
|
||||||
|
);
|
||||||
|
expect(result[1].serviceData.percentage).toBeCloseTo(
|
||||||
|
(300 / totalCount) * 100,
|
||||||
|
2,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle undefined values in data', () => {
|
||||||
|
// Arrange
|
||||||
|
const mockData = [
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
'service.name': 'auth-service',
|
||||||
|
A: 'n/a',
|
||||||
|
B: undefined,
|
||||||
|
C: 'n/a',
|
||||||
|
F1: undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getFormattedDependentServicesData(mockData as any);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.length).toBe(1);
|
||||||
|
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||||
|
expect(result[0].serviceData.count).toBe('-');
|
||||||
|
expect(result[0].serviceData.percentage).toBe(0);
|
||||||
|
expect(result[0].latency).toBe('-');
|
||||||
|
expect(result[0].rate).toBe('-');
|
||||||
|
expect(result[0].errorPercentage).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty input array', () => {
|
||||||
|
// Act
|
||||||
|
const result = getFormattedDependentServicesData([]);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle undefined input', () => {
|
||||||
|
// Arrange
|
||||||
|
const undefinedInput = undefined as any;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getFormattedDependentServicesData(undefinedInput);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing service name', () => {
|
||||||
|
// Arrange
|
||||||
|
const mockData = [
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
// Missing service.name
|
||||||
|
A: '200',
|
||||||
|
B: '50000000',
|
||||||
|
C: '8',
|
||||||
|
F1: '0.5',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = getFormattedDependentServicesData(mockData as any);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.length).toBe(1);
|
||||||
|
expect(result[0].serviceData.serviceName).toBe('-');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('getFormattedEndPointStatusCodeChartData', () => {
|
describe('getFormattedEndPointStatusCodeChartData', () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
jest.resetAllMocks();
|
jest.resetAllMocks();
|
||||||
|
|||||||
@@ -1,221 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
|
||||||
/**
|
|
||||||
* V5 Migration Tests for All Endpoints Widget (Endpoint Overview)
|
|
||||||
*
|
|
||||||
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
|
|
||||||
* - Filter format change: filters.items[] → filter.expression
|
|
||||||
* - Aggregation format: aggregateAttribute → aggregations[] array
|
|
||||||
* - Domain filter: (net.peer.name OR server.address)
|
|
||||||
* - Kind filter: kind_string = 'Client'
|
|
||||||
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
|
|
||||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
|
||||||
*/
|
|
||||||
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
|
|
||||||
import {
|
|
||||||
BaseAutocompleteData,
|
|
||||||
DataTypes,
|
|
||||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
|
||||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
|
||||||
|
|
||||||
describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
|
||||||
const mockDomainName = 'api.example.com';
|
|
||||||
const emptyFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
const emptyGroupBy: BaseAutocompleteData[] = [];
|
|
||||||
|
|
||||||
describe('1. V5 Format Migration - All Four Queries', () => {
|
|
||||||
it('all queries use filter.expression format (not filters.items)', () => {
|
|
||||||
const widget = getAllEndpointsWidgetData(
|
|
||||||
emptyGroupBy,
|
|
||||||
mockDomainName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const { queryData } = widget.query.builder;
|
|
||||||
|
|
||||||
// All 4 queries must use V5 filter.expression format
|
|
||||||
queryData.forEach((query) => {
|
|
||||||
expect(query.filter).toBeDefined();
|
|
||||||
expect(query.filter?.expression).toBeDefined();
|
|
||||||
expect(typeof query.filter?.expression).toBe('string');
|
|
||||||
// OLD V4 format should NOT exist
|
|
||||||
expect(query).not.toHaveProperty('filters');
|
|
||||||
});
|
|
||||||
|
|
||||||
// Verify we have exactly 4 queries
|
|
||||||
expect(queryData).toHaveLength(4);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('all queries use aggregations array format (not aggregateAttribute)', () => {
|
|
||||||
const widget = getAllEndpointsWidgetData(
|
|
||||||
emptyGroupBy,
|
|
||||||
mockDomainName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
|
||||||
|
|
||||||
// Query A: count()
|
|
||||||
expect(queryA.aggregations).toBeDefined();
|
|
||||||
expect(Array.isArray(queryA.aggregations)).toBe(true);
|
|
||||||
expect(queryA.aggregations).toEqual([{ expression: 'count()' }]);
|
|
||||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
|
||||||
|
|
||||||
// Query B: p99(duration_nano)
|
|
||||||
expect(queryB.aggregations).toBeDefined();
|
|
||||||
expect(Array.isArray(queryB.aggregations)).toBe(true);
|
|
||||||
expect(queryB.aggregations).toEqual([{ expression: 'p99(duration_nano)' }]);
|
|
||||||
expect(queryB).not.toHaveProperty('aggregateAttribute');
|
|
||||||
|
|
||||||
// Query C: max(timestamp)
|
|
||||||
expect(queryC.aggregations).toBeDefined();
|
|
||||||
expect(Array.isArray(queryC.aggregations)).toBe(true);
|
|
||||||
expect(queryC.aggregations).toEqual([{ expression: 'max(timestamp)' }]);
|
|
||||||
expect(queryC).not.toHaveProperty('aggregateAttribute');
|
|
||||||
|
|
||||||
// Query D: count() (disabled, for errors)
|
|
||||||
expect(queryD.aggregations).toBeDefined();
|
|
||||||
expect(Array.isArray(queryD.aggregations)).toBe(true);
|
|
||||||
expect(queryD.aggregations).toEqual([{ expression: 'count()' }]);
|
|
||||||
expect(queryD).not.toHaveProperty('aggregateAttribute');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('all queries have correct base filter expressions', () => {
|
|
||||||
const widget = getAllEndpointsWidgetData(
|
|
||||||
emptyGroupBy,
|
|
||||||
mockDomainName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
|
||||||
|
|
||||||
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
|
|
||||||
|
|
||||||
// Queries A, B, C have identical base filter
|
|
||||||
expect(queryA.filter?.expression).toBe(
|
|
||||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
|
||||||
);
|
|
||||||
expect(queryB.filter?.expression).toBe(
|
|
||||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
|
||||||
);
|
|
||||||
expect(queryC.filter?.expression).toBe(
|
|
||||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Query D has additional has_error filter
|
|
||||||
expect(queryD.filter?.expression).toBe(
|
|
||||||
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('2. GroupBy Structure', () => {
|
|
||||||
it('default groupBy includes both http.url and url.full with type attribute', () => {
|
|
||||||
const widget = getAllEndpointsWidgetData(
|
|
||||||
emptyGroupBy,
|
|
||||||
mockDomainName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const { queryData } = widget.query.builder;
|
|
||||||
|
|
||||||
// All queries should have the same default groupBy
|
|
||||||
queryData.forEach((query) => {
|
|
||||||
expect(query.groupBy).toHaveLength(2);
|
|
||||||
|
|
||||||
// http.url
|
|
||||||
expect(query.groupBy).toContainEqual({
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
isColumn: false,
|
|
||||||
isJSON: false,
|
|
||||||
key: 'http.url',
|
|
||||||
type: 'attribute',
|
|
||||||
});
|
|
||||||
|
|
||||||
// url.full
|
|
||||||
expect(query.groupBy).toContainEqual({
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
isColumn: false,
|
|
||||||
isJSON: false,
|
|
||||||
key: 'url.full',
|
|
||||||
type: 'attribute',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('custom groupBy is appended after defaults', () => {
|
|
||||||
const customGroupBy: BaseAutocompleteData[] = [
|
|
||||||
{
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
key: 'service.name',
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
key: 'deployment.environment',
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const widget = getAllEndpointsWidgetData(
|
|
||||||
customGroupBy,
|
|
||||||
mockDomainName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const { queryData } = widget.query.builder;
|
|
||||||
|
|
||||||
// All queries should have defaults + custom groupBy
|
|
||||||
queryData.forEach((query) => {
|
|
||||||
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
|
|
||||||
|
|
||||||
// First two should be defaults (http.url, url.full)
|
|
||||||
expect(query.groupBy[0].key).toBe('http.url');
|
|
||||||
expect(query.groupBy[1].key).toBe('url.full');
|
|
||||||
|
|
||||||
// Last two should be custom (matching subset of properties)
|
|
||||||
expect(query.groupBy[2]).toMatchObject({
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
key: 'service.name',
|
|
||||||
type: 'resource',
|
|
||||||
});
|
|
||||||
expect(query.groupBy[3]).toMatchObject({
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
key: 'deployment.environment',
|
|
||||||
type: 'resource',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('3. Query-Specific Validations', () => {
|
|
||||||
it('query D has has_error filter and is disabled', () => {
|
|
||||||
const widget = getAllEndpointsWidgetData(
|
|
||||||
emptyGroupBy,
|
|
||||||
mockDomainName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
|
||||||
|
|
||||||
// Query D should be disabled
|
|
||||||
expect(queryD.disabled).toBe(true);
|
|
||||||
|
|
||||||
// Queries A, B, C should NOT be disabled
|
|
||||||
expect(queryA.disabled).toBe(false);
|
|
||||||
expect(queryB.disabled).toBe(false);
|
|
||||||
expect(queryC.disabled).toBe(false);
|
|
||||||
|
|
||||||
// Query D should have has_error in filter
|
|
||||||
expect(queryD.filter?.expression).toContain('has_error = true');
|
|
||||||
|
|
||||||
// Queries A, B, C should NOT have has_error
|
|
||||||
expect(queryA.filter?.expression).not.toContain('has_error');
|
|
||||||
expect(queryB.filter?.expression).not.toContain('has_error');
|
|
||||||
expect(queryC.filter?.expression).not.toContain('has_error');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -0,0 +1,211 @@
|
|||||||
|
import { render, screen } from '@testing-library/react';
|
||||||
|
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||||
|
import { SuccessResponse } from 'types/api';
|
||||||
|
|
||||||
|
import EndPointMetrics from '../Explorer/Domains/DomainDetails/components/EndPointMetrics';
|
||||||
|
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||||
|
|
||||||
|
// Create a partial mock of the UseQueryResult interface for testing
|
||||||
|
interface MockQueryResult {
|
||||||
|
isLoading: boolean;
|
||||||
|
isRefetching: boolean;
|
||||||
|
isError: boolean;
|
||||||
|
data?: any;
|
||||||
|
refetch: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mock the utils function
|
||||||
|
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||||
|
getFormattedEndPointMetricsData: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock the ErrorState component
|
||||||
|
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||||
|
<div data-testid="error-state-mock">
|
||||||
|
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||||
|
Retry
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock antd components
|
||||||
|
jest.mock('antd', () => {
|
||||||
|
const originalModule = jest.requireActual('antd');
|
||||||
|
return {
|
||||||
|
...originalModule,
|
||||||
|
Progress: jest
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(() => <div data-testid="progress-bar-mock" />),
|
||||||
|
Skeleton: {
|
||||||
|
Button: jest
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(() => <div data-testid="skeleton-button-mock" />),
|
||||||
|
},
|
||||||
|
Tooltip: jest
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(({ children }) => (
|
||||||
|
<div data-testid="tooltip-mock">{children}</div>
|
||||||
|
)),
|
||||||
|
Typography: {
|
||||||
|
Text: jest.fn().mockImplementation(({ children, className }) => (
|
||||||
|
<div data-testid={`typography-${className}`} className={className}>
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('EndPointMetrics', () => {
|
||||||
|
// Common metric data to use in tests
|
||||||
|
const mockMetricsData = {
|
||||||
|
key: 'test-key',
|
||||||
|
rate: '42',
|
||||||
|
latency: 99,
|
||||||
|
errorRate: 5.5,
|
||||||
|
lastUsed: '5 minutes ago',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Basic props for tests
|
||||||
|
const refetchFn = jest.fn();
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(
|
||||||
|
mockMetricsData,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders loading state correctly', () => {
|
||||||
|
const mockQuery: MockQueryResult = {
|
||||||
|
isLoading: true,
|
||||||
|
isRefetching: false,
|
||||||
|
isError: false,
|
||||||
|
data: undefined,
|
||||||
|
refetch: refetchFn,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||||
|
|
||||||
|
// Verify skeleton loaders are visible
|
||||||
|
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||||
|
expect(skeletonElements.length).toBe(4);
|
||||||
|
|
||||||
|
// Verify labels are visible even during loading
|
||||||
|
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders error state correctly', () => {
|
||||||
|
const mockQuery: MockQueryResult = {
|
||||||
|
isLoading: false,
|
||||||
|
isRefetching: false,
|
||||||
|
isError: true,
|
||||||
|
data: undefined,
|
||||||
|
refetch: refetchFn,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||||
|
|
||||||
|
// Verify error state is shown
|
||||||
|
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||||
|
expect(ErrorState).toHaveBeenCalledWith(
|
||||||
|
{ refetch: expect.any(Function) },
|
||||||
|
expect.anything(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders data correctly when loaded', () => {
|
||||||
|
const mockData = {
|
||||||
|
payload: {
|
||||||
|
data: {
|
||||||
|
result: [
|
||||||
|
{
|
||||||
|
table: {
|
||||||
|
rows: [
|
||||||
|
{ data: { A: '42', B: '99000000', D: '1609459200000000', F1: '5.5' } },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} as SuccessResponse<any>;
|
||||||
|
|
||||||
|
const mockQuery: MockQueryResult = {
|
||||||
|
isLoading: false,
|
||||||
|
isRefetching: false,
|
||||||
|
isError: false,
|
||||||
|
data: mockData,
|
||||||
|
refetch: refetchFn,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||||
|
|
||||||
|
// Verify the utils function was called with the data
|
||||||
|
expect(getFormattedEndPointMetricsData).toHaveBeenCalledWith(
|
||||||
|
mockData.payload.data.result[0].table.rows,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify data is displayed
|
||||||
|
expect(
|
||||||
|
screen.getByText(`${mockMetricsData.rate} ops/sec`),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
expect(screen.getByText(`${mockMetricsData.latency}ms`)).toBeInTheDocument();
|
||||||
|
expect(screen.getByText(mockMetricsData.lastUsed)).toBeInTheDocument();
|
||||||
|
expect(screen.getByTestId('progress-bar-mock')).toBeInTheDocument(); // For error rate
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles refetching state correctly', () => {
|
||||||
|
const mockQuery: MockQueryResult = {
|
||||||
|
isLoading: false,
|
||||||
|
isRefetching: true,
|
||||||
|
isError: false,
|
||||||
|
data: undefined,
|
||||||
|
refetch: refetchFn,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||||
|
|
||||||
|
// Verify skeleton loaders are visible during refetching
|
||||||
|
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||||
|
expect(skeletonElements.length).toBe(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles null metrics data gracefully', () => {
|
||||||
|
// Mock the utils function to return null to simulate missing data
|
||||||
|
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(null);
|
||||||
|
|
||||||
|
const mockData = {
|
||||||
|
payload: {
|
||||||
|
data: {
|
||||||
|
result: [
|
||||||
|
{
|
||||||
|
table: {
|
||||||
|
rows: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} as SuccessResponse<any>;
|
||||||
|
|
||||||
|
const mockQuery: MockQueryResult = {
|
||||||
|
isLoading: false,
|
||||||
|
isRefetching: false,
|
||||||
|
isError: false,
|
||||||
|
data: mockData,
|
||||||
|
refetch: refetchFn,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||||
|
|
||||||
|
// Even with null data, the component should render without crashing
|
||||||
|
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,173 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
|
||||||
/**
|
|
||||||
* V5 Migration Tests for Endpoint Dropdown Query
|
|
||||||
*
|
|
||||||
* These tests validate the migration from V4 to V5 format for the third payload
|
|
||||||
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
|
|
||||||
* - Filter format change: filters.items[] → filter.expression
|
|
||||||
* - Domain handling: (net.peer.name OR server.address)
|
|
||||||
* - Kind filter: kind_string = 'Client'
|
|
||||||
* - Existence check: (http.url EXISTS OR url.full EXISTS)
|
|
||||||
* - Aggregation: count() expression
|
|
||||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
|
||||||
*/
|
|
||||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
|
||||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
|
||||||
|
|
||||||
describe('EndpointDropdown - V5 Migration Validation', () => {
|
|
||||||
const mockDomainName = 'api.example.com';
|
|
||||||
const mockStartTime = 1000;
|
|
||||||
const mockEndTime = 2000;
|
|
||||||
const emptyFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('1. V5 Format Migration - Structure and Base Filters', () => {
|
|
||||||
it('migrates to V5 format with correct filter expression structure, aggregations, and groupBy', () => {
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Third payload is the endpoint dropdown query (index 2)
|
|
||||||
const dropdownQuery = payload[2];
|
|
||||||
const queryA = dropdownQuery.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// CRITICAL V5 MIGRATION: filter.expression (not filters.items)
|
|
||||||
expect(queryA.filter).toBeDefined();
|
|
||||||
expect(queryA.filter?.expression).toBeDefined();
|
|
||||||
expect(typeof queryA.filter?.expression).toBe('string');
|
|
||||||
expect(queryA).not.toHaveProperty('filters');
|
|
||||||
|
|
||||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
|
||||||
expect(queryA.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Base filter 2: Kind
|
|
||||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
|
||||||
|
|
||||||
// Base filter 3: Existence check
|
|
||||||
expect(queryA.filter?.expression).toContain(
|
|
||||||
'(http.url EXISTS OR url.full EXISTS)',
|
|
||||||
);
|
|
||||||
|
|
||||||
// V5 Aggregation format: aggregations array (not aggregateAttribute)
|
|
||||||
expect(queryA.aggregations).toBeDefined();
|
|
||||||
expect(Array.isArray(queryA.aggregations)).toBe(true);
|
|
||||||
expect(queryA.aggregations?.[0]).toEqual({
|
|
||||||
expression: 'count()',
|
|
||||||
});
|
|
||||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
|
||||||
|
|
||||||
// GroupBy: Both http.url and url.full
|
|
||||||
expect(queryA.groupBy).toHaveLength(2);
|
|
||||||
expect(queryA.groupBy).toContainEqual({
|
|
||||||
key: 'http.url',
|
|
||||||
dataType: 'string',
|
|
||||||
type: 'attribute',
|
|
||||||
});
|
|
||||||
expect(queryA.groupBy).toContainEqual({
|
|
||||||
key: 'url.full',
|
|
||||||
dataType: 'string',
|
|
||||||
type: 'attribute',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('2. Custom Filters Integration', () => {
|
|
||||||
it('merges custom filters into filter expression with AND logic', () => {
|
|
||||||
const customFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'test-1',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'user-service',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'test-2',
|
|
||||||
key: {
|
|
||||||
key: 'deployment.environment',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'production',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
customFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dropdownQuery = payload[2];
|
|
||||||
const expression =
|
|
||||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
|
|
||||||
// Exact filter expression with custom filters merged
|
|
||||||
expect(expression).toBe(
|
|
||||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('3. HTTP URL Filter Special Handling', () => {
|
|
||||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
|
||||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'http-url-filter',
|
|
||||||
key: {
|
|
||||||
key: 'http.url',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'tag',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: '/api/users',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'service-filter',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'user-service',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
filtersWithHttpUrl,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dropdownQuery = payload[2];
|
|
||||||
const expression =
|
|
||||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
|
|
||||||
// CRITICAL: Exact filter expression with http.url converted to OR logic
|
|
||||||
expect(expression).toBe(
|
|
||||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,173 +0,0 @@
|
|||||||
import {
|
|
||||||
getLatencyOverTimeWidgetData,
|
|
||||||
getRateOverTimeWidgetData,
|
|
||||||
} from 'container/ApiMonitoring/utils';
|
|
||||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
|
||||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
|
||||||
|
|
||||||
describe('MetricOverTime - V5 Migration Validation', () => {
|
|
||||||
const mockDomainName = 'api.example.com';
|
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
|
||||||
const mockEndpointName = '/api/users';
|
|
||||||
const emptyFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('1. Rate Over Time - V5 Payload Structure', () => {
|
|
||||||
it('generates V5 filter expression format (not V3 filters.items)', () => {
|
|
||||||
const widget = getRateOverTimeWidgetData(
|
|
||||||
mockDomainName,
|
|
||||||
mockEndpointName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const queryData = widget.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// CRITICAL: Must use V5 format (filter.expression), not V3 format (filters.items)
|
|
||||||
expect(queryData.filter).toBeDefined();
|
|
||||||
expect(queryData?.filter?.expression).toBeDefined();
|
|
||||||
expect(typeof queryData?.filter?.expression).toBe('string');
|
|
||||||
|
|
||||||
// OLD V3 format should NOT exist
|
|
||||||
expect(queryData).not.toHaveProperty('filters.items');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
|
||||||
const widget = getRateOverTimeWidgetData(
|
|
||||||
mockDomainName,
|
|
||||||
mockEndpointName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const queryData = widget.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// Verify EXACT new filter format with OR operator
|
|
||||||
expect(queryData?.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Endpoint name is used in legend, not filter
|
|
||||||
expect(queryData.legend).toContain('/api/users');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('merges custom filters into filter expression', () => {
|
|
||||||
const customFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'test-1',
|
|
||||||
key: {
|
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
|
||||||
value: 'user-service',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'test-2',
|
|
||||||
key: {
|
|
||||||
key: 'deployment.environment',
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'production',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const widget = getRateOverTimeWidgetData(
|
|
||||||
mockDomainName,
|
|
||||||
mockEndpointName,
|
|
||||||
customFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const queryData = widget.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// Verify domain filter is present
|
|
||||||
expect(queryData?.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Verify custom filters are merged into the expression
|
|
||||||
expect(queryData?.filter?.expression).toContain('service.name');
|
|
||||||
expect(queryData?.filter?.expression).toContain('user-service');
|
|
||||||
expect(queryData?.filter?.expression).toContain('deployment.environment');
|
|
||||||
expect(queryData?.filter?.expression).toContain('production');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('2. Latency Over Time - V5 Payload Structure', () => {
|
|
||||||
it('generates V5 filter expression format (not V3 filters.items)', () => {
|
|
||||||
const widget = getLatencyOverTimeWidgetData(
|
|
||||||
mockDomainName,
|
|
||||||
mockEndpointName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const queryData = widget.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// CRITICAL: Must use V5 format (filter.expression), not V3 format (filters.items)
|
|
||||||
expect(queryData.filter).toBeDefined();
|
|
||||||
expect(queryData?.filter?.expression).toBeDefined();
|
|
||||||
expect(typeof queryData?.filter?.expression).toBe('string');
|
|
||||||
|
|
||||||
// OLD V3 format should NOT exist
|
|
||||||
expect(queryData).not.toHaveProperty('filters.items');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
|
||||||
const widget = getLatencyOverTimeWidgetData(
|
|
||||||
mockDomainName,
|
|
||||||
mockEndpointName,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const queryData = widget.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// Verify EXACT new filter format with OR operator
|
|
||||||
expect(queryData.filter).toBeDefined();
|
|
||||||
expect(queryData?.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Endpoint name is used in legend, not filter
|
|
||||||
expect(queryData.legend).toContain('/api/users');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('merges custom filters into filter expression', () => {
|
|
||||||
const customFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'test-1',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'user-service',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const widget = getLatencyOverTimeWidgetData(
|
|
||||||
mockDomainName,
|
|
||||||
mockEndpointName,
|
|
||||||
customFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const queryData = widget.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// Verify domain filter is present
|
|
||||||
expect(queryData?.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,237 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
|
||||||
/**
|
|
||||||
* V5 Migration Tests for Status Code Bar Chart Queries
|
|
||||||
*
|
|
||||||
* These tests validate the migration to V5 format for the bar chart payloads
|
|
||||||
* in getEndPointDetailsQueryPayload (5th and 6th payloads):
|
|
||||||
* - Number of Calls Chart (count aggregation)
|
|
||||||
* - Latency Chart (p99 aggregation)
|
|
||||||
*
|
|
||||||
* V5 Changes:
|
|
||||||
* - Filter format change: filters.items[] → filter.expression
|
|
||||||
* - Domain filter: (net.peer.name OR server.address)
|
|
||||||
* - Kind filter: kind_string = 'Client'
|
|
||||||
* - stepInterval: 60 → null
|
|
||||||
* - Grouped by response_status_code
|
|
||||||
*/
|
|
||||||
import { TraceAggregation } from 'api/v5/v5';
|
|
||||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
|
||||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
|
||||||
|
|
||||||
describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
|
||||||
const mockDomainName = '0.0.0.0';
|
|
||||||
const mockStartTime = 1762573673000;
|
|
||||||
const mockEndTime = 1762832873000;
|
|
||||||
const emptyFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('1. Number of Calls Chart - V5 Payload Structure', () => {
|
|
||||||
it('generates correct V5 payload for count aggregation grouped by status code', () => {
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
// 5th payload (index 4) is the number of calls bar chart
|
|
||||||
const callsChartQuery = payload[4];
|
|
||||||
const queryA = callsChartQuery.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// V5 format: filter.expression (not filters.items)
|
|
||||||
expect(queryA.filter).toBeDefined();
|
|
||||||
expect(queryA.filter?.expression).toBeDefined();
|
|
||||||
expect(typeof queryA.filter?.expression).toBe('string');
|
|
||||||
expect(queryA).not.toHaveProperty('filters.items');
|
|
||||||
|
|
||||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
|
||||||
expect(queryA.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Base filter 2: Kind
|
|
||||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
|
||||||
|
|
||||||
// Aggregation: count
|
|
||||||
expect(queryA.queryName).toBe('A');
|
|
||||||
expect(queryA.aggregateOperator).toBe('count');
|
|
||||||
expect(queryA.disabled).toBe(false);
|
|
||||||
|
|
||||||
// Grouped by response_status_code
|
|
||||||
expect(queryA.groupBy).toContainEqual(
|
|
||||||
expect.objectContaining({
|
|
||||||
key: 'response_status_code',
|
|
||||||
dataType: 'string',
|
|
||||||
type: 'span',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
// V5 critical: stepInterval should be null
|
|
||||||
expect(queryA.stepInterval).toBeNull();
|
|
||||||
|
|
||||||
// Time aggregation
|
|
||||||
expect(queryA.timeAggregation).toBe('rate');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('2. Latency Chart - V5 Payload Structure', () => {
|
|
||||||
it('generates correct V5 payload for p99 aggregation grouped by status code', () => {
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
// 6th payload (index 5) is the latency bar chart
|
|
||||||
const latencyChartQuery = payload[5];
|
|
||||||
const queryA = latencyChartQuery.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// V5 format: filter.expression (not filters.items)
|
|
||||||
expect(queryA.filter).toBeDefined();
|
|
||||||
expect(queryA.filter?.expression).toBeDefined();
|
|
||||||
expect(typeof queryA.filter?.expression).toBe('string');
|
|
||||||
expect(queryA).not.toHaveProperty('filters.items');
|
|
||||||
|
|
||||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
|
||||||
expect(queryA.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Base filter 2: Kind
|
|
||||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
|
||||||
|
|
||||||
// Aggregation: p99 on duration_nano
|
|
||||||
expect(queryA.queryName).toBe('A');
|
|
||||||
expect(queryA.aggregateOperator).toBe('p99');
|
|
||||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
|
||||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
|
||||||
'p99(duration_nano)',
|
|
||||||
);
|
|
||||||
expect(queryA.disabled).toBe(false);
|
|
||||||
|
|
||||||
// Grouped by response_status_code
|
|
||||||
expect(queryA.groupBy).toContainEqual(
|
|
||||||
expect.objectContaining({
|
|
||||||
key: 'response_status_code',
|
|
||||||
dataType: 'string',
|
|
||||||
type: 'span',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
// V5 critical: stepInterval should be null
|
|
||||||
expect(queryA.stepInterval).toBeNull();
|
|
||||||
|
|
||||||
// Time aggregation
|
|
||||||
expect(queryA.timeAggregation).toBe('p99');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('3. Custom Filters Integration', () => {
|
|
||||||
it('merges custom filters into filter expression for both charts', () => {
|
|
||||||
const customFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'test-1',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'user-service',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'test-2',
|
|
||||||
key: {
|
|
||||||
key: 'deployment.environment',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'production',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
customFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const callsChartQuery = payload[4];
|
|
||||||
const latencyChartQuery = payload[5];
|
|
||||||
|
|
||||||
const callsExpression =
|
|
||||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
const latencyExpression =
|
|
||||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
|
|
||||||
// Both charts should have the same filter expression
|
|
||||||
expect(callsExpression).toBe(latencyExpression);
|
|
||||||
|
|
||||||
// Verify base filters
|
|
||||||
expect(callsExpression).toContain('net.peer.name');
|
|
||||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
|
||||||
|
|
||||||
// Verify custom filters are merged
|
|
||||||
expect(callsExpression).toContain('service.name');
|
|
||||||
expect(callsExpression).toContain('user-service');
|
|
||||||
expect(callsExpression).toContain('deployment.environment');
|
|
||||||
expect(callsExpression).toContain('production');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('4. HTTP URL Filter Handling', () => {
|
|
||||||
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
|
|
||||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'http-url-filter',
|
|
||||||
key: {
|
|
||||||
key: 'http.url',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'tag',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: '/api/metrics',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
filtersWithHttpUrl,
|
|
||||||
);
|
|
||||||
|
|
||||||
const callsChartQuery = payload[4];
|
|
||||||
const latencyChartQuery = payload[5];
|
|
||||||
|
|
||||||
const callsExpression =
|
|
||||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
const latencyExpression =
|
|
||||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
|
|
||||||
// CRITICAL: http.url converted to OR logic
|
|
||||||
expect(callsExpression).toContain(
|
|
||||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
|
||||||
);
|
|
||||||
expect(latencyExpression).toContain(
|
|
||||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Base filters still present
|
|
||||||
expect(callsExpression).toContain('net.peer.name');
|
|
||||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,226 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
/* eslint-disable sonarjs/no-duplicate-string */
|
|
||||||
/**
|
|
||||||
* V5 Migration Tests for Status Code Table Query
|
|
||||||
*
|
|
||||||
* These tests validate the migration from V4 to V5 format for the second payload
|
|
||||||
* in getEndPointDetailsQueryPayload (status code table data):
|
|
||||||
* - Filter format change: filters.items[] → filter.expression
|
|
||||||
* - URL handling: Special logic for (http.url OR url.full)
|
|
||||||
* - Domain filter: (net.peer.name OR server.address)
|
|
||||||
* - Kind filter: kind_string = 'Client'
|
|
||||||
* - Kind filter: response_status_code EXISTS
|
|
||||||
* - Three queries: A (count), B (p99 latency), C (rate)
|
|
||||||
* - All grouped by response_status_code
|
|
||||||
*/
|
|
||||||
import { TraceAggregation } from 'api/v5/v5';
|
|
||||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
|
||||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
|
||||||
|
|
||||||
describe('StatusCodeTable - V5 Migration Validation', () => {
|
|
||||||
const mockDomainName = 'api.example.com';
|
|
||||||
const mockStartTime = 1000;
|
|
||||||
const mockEndTime = 2000;
|
|
||||||
const emptyFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('1. V5 Format Migration with Base Filters', () => {
|
|
||||||
it('migrates to V5 format with correct filter expression structure and base filters', () => {
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Second payload is the status code table query
|
|
||||||
const statusCodeQuery = payload[1];
|
|
||||||
const queryA = statusCodeQuery.query.builder.queryData[0];
|
|
||||||
|
|
||||||
// CRITICAL V5 MIGRATION: filter.expression (not filters.items)
|
|
||||||
expect(queryA.filter).toBeDefined();
|
|
||||||
expect(queryA.filter?.expression).toBeDefined();
|
|
||||||
expect(typeof queryA.filter?.expression).toBe('string');
|
|
||||||
expect(queryA).not.toHaveProperty('filters.items');
|
|
||||||
|
|
||||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
|
||||||
expect(queryA.filter?.expression).toContain(
|
|
||||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Base filter 2: Kind
|
|
||||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
|
||||||
|
|
||||||
// Base filter 3: response_status_code EXISTS
|
|
||||||
expect(queryA.filter?.expression).toContain('response_status_code EXISTS');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('2. Three Queries Structure and Consistency', () => {
|
|
||||||
it('generates three queries (count, p99, rate) all grouped by response_status_code with identical filters', () => {
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
emptyFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const statusCodeQuery = payload[1];
|
|
||||||
const [queryA, queryB, queryC] = statusCodeQuery.query.builder.queryData;
|
|
||||||
|
|
||||||
// Query A: Count
|
|
||||||
expect(queryA.queryName).toBe('A');
|
|
||||||
expect(queryA.aggregateOperator).toBe('count');
|
|
||||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
|
||||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
|
||||||
'count(span_id)',
|
|
||||||
);
|
|
||||||
expect(queryA.disabled).toBe(false);
|
|
||||||
|
|
||||||
// Query B: P99 Latency
|
|
||||||
expect(queryB.queryName).toBe('B');
|
|
||||||
expect(queryB.aggregateOperator).toBe('p99');
|
|
||||||
expect((queryB.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
|
||||||
'p99(duration_nano)',
|
|
||||||
);
|
|
||||||
expect(queryB.disabled).toBe(false);
|
|
||||||
|
|
||||||
// Query C: Rate
|
|
||||||
expect(queryC.queryName).toBe('C');
|
|
||||||
expect(queryC.aggregateOperator).toBe('rate');
|
|
||||||
expect(queryC.disabled).toBe(false);
|
|
||||||
|
|
||||||
// All group by response_status_code
|
|
||||||
[queryA, queryB, queryC].forEach((query) => {
|
|
||||||
expect(query.groupBy).toContainEqual(
|
|
||||||
expect.objectContaining({
|
|
||||||
key: 'response_status_code',
|
|
||||||
dataType: 'string',
|
|
||||||
type: 'span',
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
// CRITICAL: All have identical filter expressions
|
|
||||||
expect(queryA.filter?.expression).toBe(queryB.filter?.expression);
|
|
||||||
expect(queryB.filter?.expression).toBe(queryC.filter?.expression);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('3. Custom Filters Integration', () => {
|
|
||||||
it('merges custom filters into filter expression with AND logic', () => {
|
|
||||||
const customFilters: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'test-1',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'user-service',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'test-2',
|
|
||||||
key: {
|
|
||||||
key: 'deployment.environment',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'production',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
customFilters,
|
|
||||||
);
|
|
||||||
|
|
||||||
const statusCodeQuery = payload[1];
|
|
||||||
const expression =
|
|
||||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
|
|
||||||
// Base filters present
|
|
||||||
expect(expression).toContain('net.peer.name');
|
|
||||||
expect(expression).toContain("kind_string = 'Client'");
|
|
||||||
expect(expression).toContain('response_status_code EXISTS');
|
|
||||||
|
|
||||||
// Custom filters merged
|
|
||||||
expect(expression).toContain('service.name');
|
|
||||||
expect(expression).toContain('user-service');
|
|
||||||
expect(expression).toContain('deployment.environment');
|
|
||||||
expect(expression).toContain('production');
|
|
||||||
|
|
||||||
// All three queries have the same merged expression
|
|
||||||
const queries = statusCodeQuery.query.builder.queryData;
|
|
||||||
expect(queries[0].filter?.expression).toBe(queries[1].filter?.expression);
|
|
||||||
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('4. HTTP URL Filter Handling', () => {
|
|
||||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
|
||||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
id: 'http-url-filter',
|
|
||||||
key: {
|
|
||||||
key: 'http.url',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'tag',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: '/api/users',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'service-filter',
|
|
||||||
key: {
|
|
||||||
key: 'service.name',
|
|
||||||
dataType: 'string' as any,
|
|
||||||
type: 'resource',
|
|
||||||
},
|
|
||||||
op: '=',
|
|
||||||
value: 'user-service',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
|
|
||||||
const payload = getEndPointDetailsQueryPayload(
|
|
||||||
mockDomainName,
|
|
||||||
mockStartTime,
|
|
||||||
mockEndTime,
|
|
||||||
filtersWithHttpUrl,
|
|
||||||
);
|
|
||||||
|
|
||||||
const statusCodeQuery = payload[1];
|
|
||||||
const expression =
|
|
||||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
|
||||||
|
|
||||||
// CRITICAL: http.url converted to OR logic
|
|
||||||
expect(expression).toContain(
|
|
||||||
"(http.url = '/api/users' OR url.full = '/api/users')",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Other filters still present
|
|
||||||
expect(expression).toContain('service.name');
|
|
||||||
expect(expression).toContain('user-service');
|
|
||||||
|
|
||||||
// Base filters present
|
|
||||||
expect(expression).toContain('net.peer.name');
|
|
||||||
expect(expression).toContain("kind_string = 'Client'");
|
|
||||||
expect(expression).toContain('response_status_code EXISTS');
|
|
||||||
|
|
||||||
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
|
|
||||||
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,11 +1,9 @@
|
|||||||
import { BuilderQuery } from 'api/v5/v5';
|
|
||||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||||
import { rest, server } from 'mocks-server/server';
|
import { rest, server } from 'mocks-server/server';
|
||||||
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
|
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
|
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
|
||||||
import { getTopErrorsQueryPayload } from '../utils';
|
|
||||||
|
|
||||||
// Mock the EndPointsDropDown component to avoid issues
|
// Mock the EndPointsDropDown component to avoid issues
|
||||||
jest.mock(
|
jest.mock(
|
||||||
@@ -38,7 +36,6 @@ describe('TopErrors', () => {
|
|||||||
const V5_QUERY_RANGE_API_PATH = '*/api/v5/query_range';
|
const V5_QUERY_RANGE_API_PATH = '*/api/v5/query_range';
|
||||||
|
|
||||||
const mockProps = {
|
const mockProps = {
|
||||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
|
||||||
domainName: 'test-domain',
|
domainName: 'test-domain',
|
||||||
timeRange: {
|
timeRange: {
|
||||||
startTime: 1000000000,
|
startTime: 1000000000,
|
||||||
@@ -308,14 +305,45 @@ describe('TopErrors', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('sends query_range v5 API call with required filters including has_error', async () => {
|
it('sends query_range v5 API call with required filters including has_error', async () => {
|
||||||
// let capturedRequest: any;
|
let capturedRequest: any;
|
||||||
|
|
||||||
const topErrorsPayload = getTopErrorsQueryPayload(
|
// Override the v5 API mock to capture the request
|
||||||
'test-domain',
|
server.use(
|
||||||
mockProps.timeRange.startTime,
|
rest.post(V5_QUERY_RANGE_API_PATH, async (req, res, ctx) => {
|
||||||
mockProps.timeRange.endTime,
|
capturedRequest = await req.json();
|
||||||
{ items: [], op: 'AND' },
|
return res(
|
||||||
false,
|
ctx.status(200),
|
||||||
|
ctx.json({
|
||||||
|
data: {
|
||||||
|
data: {
|
||||||
|
results: [
|
||||||
|
{
|
||||||
|
columns: [
|
||||||
|
{
|
||||||
|
name: 'http.url',
|
||||||
|
fieldDataType: 'string',
|
||||||
|
fieldContext: 'attribute',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'response_status_code',
|
||||||
|
fieldDataType: 'string',
|
||||||
|
fieldContext: 'span',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'status_message',
|
||||||
|
fieldDataType: 'string',
|
||||||
|
fieldContext: 'span',
|
||||||
|
},
|
||||||
|
{ name: 'count()', fieldDataType: 'int64', fieldContext: '' },
|
||||||
|
],
|
||||||
|
data: [['/api/test', '500', 'Internal Server Error', 10]],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||||
@@ -323,18 +351,20 @@ describe('TopErrors', () => {
|
|||||||
|
|
||||||
// Wait for the API call to be made
|
// Wait for the API call to be made
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(topErrorsPayload).toBeDefined();
|
expect(capturedRequest).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Extract the filter expression from the captured request
|
// Extract the filter expression from the captured request
|
||||||
// getTopErrorsQueryPayload returns a builder_query with TraceBuilderQuery spec
|
const filterExpression =
|
||||||
const builderQuery = topErrorsPayload.compositeQuery.queries[0]
|
capturedRequest.compositeQuery.queries[0].spec.filter.expression;
|
||||||
.spec as BuilderQuery;
|
|
||||||
const filterExpression = builderQuery.filter?.expression;
|
|
||||||
|
|
||||||
// Verify all required filters are present
|
// Verify all required filters are present
|
||||||
|
expect(filterExpression).toContain(`kind_string = 'Client'`);
|
||||||
|
expect(filterExpression).toContain(`(http.url EXISTS OR url.full EXISTS)`);
|
||||||
expect(filterExpression).toContain(
|
expect(filterExpression).toContain(
|
||||||
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
|
`(net.peer.name = 'test-domain' OR server.address = 'test-domain')`,
|
||||||
);
|
);
|
||||||
|
expect(filterExpression).toContain(`has_error = true`);
|
||||||
|
expect(filterExpression).toContain(`status_message EXISTS`); // toggle is on by default
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -112,8 +112,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
|||||||
setShowPaymentFailedWarning,
|
setShowPaymentFailedWarning,
|
||||||
] = useState<boolean>(false);
|
] = useState<boolean>(false);
|
||||||
|
|
||||||
const errorBoundaryRef = useRef<Sentry.ErrorBoundary>(null);
|
|
||||||
|
|
||||||
const [showSlowApiWarning, setShowSlowApiWarning] = useState(false);
|
const [showSlowApiWarning, setShowSlowApiWarning] = useState(false);
|
||||||
const [slowApiWarningShown, setSlowApiWarningShown] = useState(false);
|
const [slowApiWarningShown, setSlowApiWarningShown] = useState(false);
|
||||||
|
|
||||||
@@ -380,13 +378,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
|||||||
getChangelogByVersionResponse.isSuccess,
|
getChangelogByVersionResponse.isSuccess,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// reset error boundary on route change
|
|
||||||
useEffect(() => {
|
|
||||||
if (errorBoundaryRef.current) {
|
|
||||||
errorBoundaryRef.current.resetErrorBoundary();
|
|
||||||
}
|
|
||||||
}, [pathname]);
|
|
||||||
|
|
||||||
const isToDisplayLayout = isLoggedIn;
|
const isToDisplayLayout = isLoggedIn;
|
||||||
|
|
||||||
const routeKey = useMemo(() => getRouteKey(pathname), [pathname]);
|
const routeKey = useMemo(() => getRouteKey(pathname), [pathname]);
|
||||||
@@ -845,10 +836,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
|||||||
})}
|
})}
|
||||||
data-overlayscrollbars-initialize
|
data-overlayscrollbars-initialize
|
||||||
>
|
>
|
||||||
<Sentry.ErrorBoundary
|
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||||
fallback={<ErrorBoundaryFallback />}
|
|
||||||
ref={errorBoundaryRef}
|
|
||||||
>
|
|
||||||
<LayoutContent data-overlayscrollbars-initialize>
|
<LayoutContent data-overlayscrollbars-initialize>
|
||||||
<OverlayScrollbar>
|
<OverlayScrollbar>
|
||||||
<ChildrenContainer>
|
<ChildrenContainer>
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ function ExplorerOptionWrapper({
|
|||||||
isOneChartPerQuery,
|
isOneChartPerQuery,
|
||||||
splitedQueries,
|
splitedQueries,
|
||||||
signalSource,
|
signalSource,
|
||||||
handleChangeSelectedView,
|
|
||||||
}: ExplorerOptionsWrapperProps): JSX.Element {
|
}: ExplorerOptionsWrapperProps): JSX.Element {
|
||||||
const [isExplorerOptionHidden, setIsExplorerOptionHidden] = useState(false);
|
const [isExplorerOptionHidden, setIsExplorerOptionHidden] = useState(false);
|
||||||
|
|
||||||
@@ -39,7 +38,6 @@ function ExplorerOptionWrapper({
|
|||||||
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
|
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
|
||||||
isOneChartPerQuery={isOneChartPerQuery}
|
isOneChartPerQuery={isOneChartPerQuery}
|
||||||
splitedQueries={splitedQueries}
|
splitedQueries={splitedQueries}
|
||||||
handleChangeSelectedView={handleChangeSelectedView}
|
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -72,11 +72,10 @@ import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
|||||||
import { ViewProps } from 'types/api/saveViews/types';
|
import { ViewProps } from 'types/api/saveViews/types';
|
||||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||||
import { USER_ROLES } from 'types/roles';
|
import { USER_ROLES } from 'types/roles';
|
||||||
import { panelTypeToExplorerView } from 'utils/explorerUtils';
|
|
||||||
|
|
||||||
import { PreservedViewsTypes } from './constants';
|
import { PreservedViewsTypes } from './constants';
|
||||||
import ExplorerOptionsHideArea from './ExplorerOptionsHideArea';
|
import ExplorerOptionsHideArea from './ExplorerOptionsHideArea';
|
||||||
import { ChangeViewFunctionType, PreservedViewsInLocalStorage } from './types';
|
import { PreservedViewsInLocalStorage } from './types';
|
||||||
import {
|
import {
|
||||||
DATASOURCE_VS_ROUTES,
|
DATASOURCE_VS_ROUTES,
|
||||||
generateRGBAFromHex,
|
generateRGBAFromHex,
|
||||||
@@ -99,7 +98,6 @@ function ExplorerOptions({
|
|||||||
setIsExplorerOptionHidden,
|
setIsExplorerOptionHidden,
|
||||||
isOneChartPerQuery = false,
|
isOneChartPerQuery = false,
|
||||||
splitedQueries = [],
|
splitedQueries = [],
|
||||||
handleChangeSelectedView,
|
|
||||||
}: ExplorerOptionsProps): JSX.Element {
|
}: ExplorerOptionsProps): JSX.Element {
|
||||||
const [isExport, setIsExport] = useState<boolean>(false);
|
const [isExport, setIsExport] = useState<boolean>(false);
|
||||||
const [isSaveModalOpen, setIsSaveModalOpen] = useState(false);
|
const [isSaveModalOpen, setIsSaveModalOpen] = useState(false);
|
||||||
@@ -414,22 +412,13 @@ function ExplorerOptions({
|
|||||||
if (!currentViewDetails) return;
|
if (!currentViewDetails) return;
|
||||||
const { query, name, id, panelType: currentPanelType } = currentViewDetails;
|
const { query, name, id, panelType: currentPanelType } = currentViewDetails;
|
||||||
|
|
||||||
if (handleChangeSelectedView) {
|
handleExplorerTabChange(currentPanelType, {
|
||||||
handleChangeSelectedView(panelTypeToExplorerView[currentPanelType], {
|
query,
|
||||||
query,
|
name,
|
||||||
name,
|
id,
|
||||||
id,
|
});
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// to remove this after traces cleanup
|
|
||||||
handleExplorerTabChange(currentPanelType, {
|
|
||||||
query,
|
|
||||||
name,
|
|
||||||
id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
[viewsData, handleExplorerTabChange, handleChangeSelectedView],
|
[viewsData, handleExplorerTabChange],
|
||||||
);
|
);
|
||||||
|
|
||||||
const updatePreservedViewInLocalStorage = (option: {
|
const updatePreservedViewInLocalStorage = (option: {
|
||||||
@@ -535,10 +524,6 @@ function ExplorerOptions({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (handleChangeSelectedView) {
|
|
||||||
handleChangeSelectedView(panelTypeToExplorerView[PANEL_TYPES.LIST]);
|
|
||||||
}
|
|
||||||
|
|
||||||
history.replace(DATASOURCE_VS_ROUTES[sourcepage]);
|
history.replace(DATASOURCE_VS_ROUTES[sourcepage]);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1035,7 +1020,6 @@ export interface ExplorerOptionsProps {
|
|||||||
setIsExplorerOptionHidden?: Dispatch<SetStateAction<boolean>>;
|
setIsExplorerOptionHidden?: Dispatch<SetStateAction<boolean>>;
|
||||||
isOneChartPerQuery?: boolean;
|
isOneChartPerQuery?: boolean;
|
||||||
splitedQueries?: Query[];
|
splitedQueries?: Query[];
|
||||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ExplorerOptions.defaultProps = {
|
ExplorerOptions.defaultProps = {
|
||||||
@@ -1045,7 +1029,6 @@ ExplorerOptions.defaultProps = {
|
|||||||
isOneChartPerQuery: false,
|
isOneChartPerQuery: false,
|
||||||
splitedQueries: [],
|
splitedQueries: [],
|
||||||
signalSource: '',
|
signalSource: '',
|
||||||
handleChangeSelectedView: undefined,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default ExplorerOptions;
|
export default ExplorerOptions;
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ import { NotificationInstance } from 'antd/es/notification/interface';
|
|||||||
import { AxiosResponse } from 'axios';
|
import { AxiosResponse } from 'axios';
|
||||||
import { SaveViewWithNameProps } from 'components/ExplorerCard/types';
|
import { SaveViewWithNameProps } from 'components/ExplorerCard/types';
|
||||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
import { ICurrentQueryData } from 'hooks/useHandleExplorerTabChange';
|
|
||||||
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
|
||||||
import { Dispatch, SetStateAction } from 'react';
|
import { Dispatch, SetStateAction } from 'react';
|
||||||
import { UseMutateAsyncFunction } from 'react-query';
|
import { UseMutateAsyncFunction } from 'react-query';
|
||||||
import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery';
|
import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery';
|
||||||
@@ -40,8 +38,3 @@ export type PreservedViewType =
|
|||||||
export type PreservedViewsInLocalStorage = Partial<
|
export type PreservedViewsInLocalStorage = Partial<
|
||||||
Record<PreservedViewType, { key: string; value: string }>
|
Record<PreservedViewType, { key: string; value: string }>
|
||||||
>;
|
>;
|
||||||
|
|
||||||
export type ChangeViewFunctionType = (
|
|
||||||
view: ExplorerViews,
|
|
||||||
querySearchParameters?: ICurrentQueryData,
|
|
||||||
) => void;
|
|
||||||
|
|||||||
@@ -49,29 +49,17 @@ function GridTableComponent({
|
|||||||
panelType,
|
panelType,
|
||||||
queryRangeRequest,
|
queryRangeRequest,
|
||||||
decimalPrecision,
|
decimalPrecision,
|
||||||
hiddenColumns = [],
|
|
||||||
...props
|
...props
|
||||||
}: GridTableComponentProps): JSX.Element {
|
}: GridTableComponentProps): JSX.Element {
|
||||||
const { t } = useTranslation(['valueGraph']);
|
const { t } = useTranslation(['valueGraph']);
|
||||||
|
|
||||||
// create columns and dataSource in the ui friendly structure
|
// create columns and dataSource in the ui friendly structure
|
||||||
// use the query from the widget here to extract the legend information
|
// use the query from the widget here to extract the legend information
|
||||||
const { columns: allColumns, dataSource: originalDataSource } = useMemo(
|
const { columns, dataSource: originalDataSource } = useMemo(
|
||||||
() => createColumnsAndDataSource((data as unknown) as TableData, query),
|
() => createColumnsAndDataSource((data as unknown) as TableData, query),
|
||||||
[query, data],
|
[query, data],
|
||||||
);
|
);
|
||||||
|
|
||||||
// Filter out hidden columns from being displayed
|
|
||||||
const columns = useMemo(
|
|
||||||
() =>
|
|
||||||
allColumns.filter(
|
|
||||||
(column) =>
|
|
||||||
!('dataIndex' in column) ||
|
|
||||||
!hiddenColumns.includes(column.dataIndex as string),
|
|
||||||
),
|
|
||||||
[allColumns, hiddenColumns],
|
|
||||||
);
|
|
||||||
|
|
||||||
const createDataInCorrectFormat = useCallback(
|
const createDataInCorrectFormat = useCallback(
|
||||||
(dataSource: RowData[]): RowData[] =>
|
(dataSource: RowData[]): RowData[] =>
|
||||||
dataSource.map((d) => {
|
dataSource.map((d) => {
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ export type GridTableComponentProps = {
|
|||||||
contextLinks?: ContextLinksData;
|
contextLinks?: ContextLinksData;
|
||||||
panelType?: PANEL_TYPES;
|
panelType?: PANEL_TYPES;
|
||||||
queryRangeRequest?: QueryRangeRequestV5;
|
queryRangeRequest?: QueryRangeRequestV5;
|
||||||
hiddenColumns?: string[];
|
|
||||||
} & Pick<LogsExplorerTableProps, 'data'> &
|
} & Pick<LogsExplorerTableProps, 'data'> &
|
||||||
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;
|
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,10 @@ import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
|||||||
import { FontSize } from 'container/OptionsMenu/types';
|
import { FontSize } from 'container/OptionsMenu/types';
|
||||||
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
||||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||||
|
import {
|
||||||
|
LOG_FIELD_BODY_KEY,
|
||||||
|
LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
} from 'lib/logs/flatLogData';
|
||||||
import { useCallback, useEffect, useMemo } from 'react';
|
import { useCallback, useEffect, useMemo } from 'react';
|
||||||
import { useQuery } from 'react-query';
|
import { useQuery } from 'react-query';
|
||||||
import { Virtuoso } from 'react-virtuoso';
|
import { Virtuoso } from 'react-virtuoso';
|
||||||
@@ -75,11 +79,15 @@ function EntityLogs({
|
|||||||
dataType: 'string',
|
dataType: 'string',
|
||||||
type: '',
|
type: '',
|
||||||
name: 'body',
|
name: 'body',
|
||||||
|
displayName: 'Body',
|
||||||
|
key: LOG_FIELD_BODY_KEY,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
dataType: 'string',
|
dataType: 'string',
|
||||||
type: '',
|
type: '',
|
||||||
name: 'timestamp',
|
name: 'timestamp',
|
||||||
|
displayName: 'Timestamp',
|
||||||
|
key: LOG_FIELD_TIMESTAMP_KEY,
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -68,7 +68,7 @@
|
|||||||
.template-list-item {
|
.template-list-item {
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 8px;
|
gap: 8px;
|
||||||
padding: 8px 12px;
|
padding: 4px 12px;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
height: 32px;
|
height: 32px;
|
||||||
@@ -76,10 +76,8 @@
|
|||||||
|
|
||||||
.template-icon {
|
.template-icon {
|
||||||
display: flex;
|
display: flex;
|
||||||
height: 20px;
|
height: 14px;
|
||||||
width: 20px;
|
width: 14px;
|
||||||
border-radius: 2px;
|
|
||||||
padding: 4px;
|
|
||||||
align-items: center;
|
align-items: center;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
}
|
}
|
||||||
@@ -99,17 +97,6 @@
|
|||||||
&.active {
|
&.active {
|
||||||
border-radius: 3px;
|
border-radius: 3px;
|
||||||
background: rgba(171, 189, 255, 0.08);
|
background: rgba(171, 189, 255, 0.08);
|
||||||
position: relative;
|
|
||||||
|
|
||||||
&::before {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
top: 0;
|
|
||||||
bottom: 0;
|
|
||||||
left: 0;
|
|
||||||
width: 2px;
|
|
||||||
background: var(--bg-robin-500);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -172,38 +159,18 @@
|
|||||||
display: flex;
|
display: flex;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
padding: 16px;
|
margin: 24px;
|
||||||
height: calc(100% - 144px);
|
height: calc(100% - 144px);
|
||||||
position: relative;
|
position: relative;
|
||||||
|
|
||||||
&-container {
|
|
||||||
position: relative;
|
|
||||||
width: 100%;
|
|
||||||
max-width: 100%;
|
|
||||||
padding: 48px 24px;
|
|
||||||
border-radius: 4px;
|
|
||||||
border: 1px solid var(--bg-ink-50);
|
|
||||||
background: linear-gradient(98.66deg, #7a97fa 4.42%, #f977ff 96.6%);
|
|
||||||
max-height: 100%;
|
|
||||||
|
|
||||||
&::before {
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
inset: 0;
|
|
||||||
background: url('/public/Images/grains.png');
|
|
||||||
background-size: contain;
|
|
||||||
background-repeat: repeat;
|
|
||||||
opacity: 0.1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
img {
|
img {
|
||||||
position: relative;
|
|
||||||
width: 100%;
|
width: 100%;
|
||||||
max-width: 100%;
|
max-width: 100%;
|
||||||
max-height: 540px;
|
padding: 24px;
|
||||||
|
border: 1px solid var(--bg-ink-50);
|
||||||
|
background: var(--bg-ink-300);
|
||||||
|
max-height: 100%;
|
||||||
object-fit: contain;
|
object-fit: contain;
|
||||||
border-radius: 4px;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||||
import './DashboardTemplatesModal.styles.scss';
|
import './DashboardTemplatesModal.styles.scss';
|
||||||
|
|
||||||
import { Color } from '@signozhq/design-tokens';
|
|
||||||
import { Button, Input, Modal, Typography } from 'antd';
|
import { Button, Input, Modal, Typography } from 'antd';
|
||||||
import ApacheIcon from 'assets/CustomIcons/ApacheIcon';
|
import ApacheIcon from 'assets/CustomIcons/ApacheIcon';
|
||||||
import DockerIcon from 'assets/CustomIcons/DockerIcon';
|
import DockerIcon from 'assets/CustomIcons/DockerIcon';
|
||||||
@@ -17,14 +16,7 @@ import NginxIcon from 'assets/CustomIcons/NginxIcon';
|
|||||||
import PostgreSQLIcon from 'assets/CustomIcons/PostgreSQLIcon';
|
import PostgreSQLIcon from 'assets/CustomIcons/PostgreSQLIcon';
|
||||||
import RedisIcon from 'assets/CustomIcons/RedisIcon';
|
import RedisIcon from 'assets/CustomIcons/RedisIcon';
|
||||||
import cx from 'classnames';
|
import cx from 'classnames';
|
||||||
import {
|
import { ConciergeBell, DraftingCompass, Drill, Plus, X } from 'lucide-react';
|
||||||
ConciergeBell,
|
|
||||||
DraftingCompass,
|
|
||||||
Drill,
|
|
||||||
Plus,
|
|
||||||
Search,
|
|
||||||
X,
|
|
||||||
} from 'lucide-react';
|
|
||||||
import { ChangeEvent, useState } from 'react';
|
import { ChangeEvent, useState } from 'react';
|
||||||
import { DashboardTemplate } from 'types/api/dashboard/getAll';
|
import { DashboardTemplate } from 'types/api/dashboard/getAll';
|
||||||
|
|
||||||
@@ -170,9 +162,7 @@ export default function DashboardTemplatesModal({
|
|||||||
<div className="new-dashboard-templates-list">
|
<div className="new-dashboard-templates-list">
|
||||||
<Input
|
<Input
|
||||||
className="new-dashboard-templates-search"
|
className="new-dashboard-templates-search"
|
||||||
placeholder="Search..."
|
placeholder="🔍 Search..."
|
||||||
size="middle"
|
|
||||||
prefix={<Search size={12} color={Color.TEXT_VANILLA_400} />}
|
|
||||||
onChange={handleDashboardTemplateSearch}
|
onChange={handleDashboardTemplateSearch}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -222,12 +212,10 @@ export default function DashboardTemplatesModal({
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="template-preview-image">
|
<div className="template-preview-image">
|
||||||
<div className="template-preview-image-container">
|
<img
|
||||||
<img
|
src={selectedDashboardTemplate.previewImage}
|
||||||
src={selectedDashboardTemplate.previewImage}
|
alt={`${selectedDashboardTemplate.name}-preview`}
|
||||||
alt={`${selectedDashboardTemplate.name}-preview`}
|
/>
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -7,9 +7,10 @@ import './DashboardList.styles.scss';
|
|||||||
import { Color } from '@signozhq/design-tokens';
|
import { Color } from '@signozhq/design-tokens';
|
||||||
import {
|
import {
|
||||||
Button,
|
Button,
|
||||||
|
Dropdown,
|
||||||
Flex,
|
Flex,
|
||||||
Input,
|
Input,
|
||||||
// MenuProps,
|
MenuProps,
|
||||||
Modal,
|
Modal,
|
||||||
Popover,
|
Popover,
|
||||||
Skeleton,
|
Skeleton,
|
||||||
@@ -46,14 +47,14 @@ import {
|
|||||||
Ellipsis,
|
Ellipsis,
|
||||||
EllipsisVertical,
|
EllipsisVertical,
|
||||||
Expand,
|
Expand,
|
||||||
// ExternalLink,
|
ExternalLink,
|
||||||
FileJson,
|
FileJson,
|
||||||
// Github,
|
Github,
|
||||||
HdmiPort,
|
HdmiPort,
|
||||||
// LayoutGrid,
|
LayoutGrid,
|
||||||
Link2,
|
Link2,
|
||||||
Plus,
|
Plus,
|
||||||
// Radius,
|
Radius,
|
||||||
RotateCw,
|
RotateCw,
|
||||||
Search,
|
Search,
|
||||||
SquareArrowOutUpRight,
|
SquareArrowOutUpRight,
|
||||||
@@ -70,6 +71,7 @@ import {
|
|||||||
Key,
|
Key,
|
||||||
useCallback,
|
useCallback,
|
||||||
useEffect,
|
useEffect,
|
||||||
|
useMemo,
|
||||||
useRef,
|
useRef,
|
||||||
useState,
|
useState,
|
||||||
} from 'react';
|
} from 'react';
|
||||||
@@ -595,61 +597,61 @@ function DashboardsList(): JSX.Element {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
// const getCreateDashboardItems = useMemo(() => {
|
const getCreateDashboardItems = useMemo(() => {
|
||||||
// const menuItems: MenuProps['items'] = [
|
const menuItems: MenuProps['items'] = [
|
||||||
// {
|
{
|
||||||
// label: (
|
label: (
|
||||||
// <div
|
<div
|
||||||
// className="create-dashboard-menu-item"
|
className="create-dashboard-menu-item"
|
||||||
// onClick={(): void => onModalHandler(false)}
|
onClick={(): void => onModalHandler(false)}
|
||||||
// >
|
>
|
||||||
// <Radius size={14} /> Import JSON
|
<Radius size={14} /> Import JSON
|
||||||
// </div>
|
</div>
|
||||||
// ),
|
),
|
||||||
// key: '1',
|
key: '1',
|
||||||
// },
|
},
|
||||||
// {
|
{
|
||||||
// label: (
|
label: (
|
||||||
// <a
|
<a
|
||||||
// href="https://signoz.io/docs/dashboards/dashboard-templates/overview/"
|
href="https://signoz.io/docs/dashboards/dashboard-templates/overview/"
|
||||||
// target="_blank"
|
target="_blank"
|
||||||
// rel="noopener noreferrer"
|
rel="noopener noreferrer"
|
||||||
// >
|
>
|
||||||
// <Flex
|
<Flex
|
||||||
// justify="space-between"
|
justify="space-between"
|
||||||
// align="center"
|
align="center"
|
||||||
// style={{ width: '100%' }}
|
style={{ width: '100%' }}
|
||||||
// gap="small"
|
gap="small"
|
||||||
// >
|
>
|
||||||
// <div className="create-dashboard-menu-item">
|
<div className="create-dashboard-menu-item">
|
||||||
// <Github size={14} /> View templates
|
<Github size={14} /> View templates
|
||||||
// </div>
|
</div>
|
||||||
// <ExternalLink size={14} />
|
<ExternalLink size={14} />
|
||||||
// </Flex>
|
</Flex>
|
||||||
// </a>
|
</a>
|
||||||
// ),
|
),
|
||||||
// key: '2',
|
key: '2',
|
||||||
// },
|
},
|
||||||
// ];
|
];
|
||||||
|
|
||||||
// if (createNewDashboard) {
|
if (createNewDashboard) {
|
||||||
// menuItems.unshift({
|
menuItems.unshift({
|
||||||
// label: (
|
label: (
|
||||||
// <div
|
<div
|
||||||
// className="create-dashboard-menu-item"
|
className="create-dashboard-menu-item"
|
||||||
// onClick={(): void => {
|
onClick={(): void => {
|
||||||
// onNewDashboardHandler();
|
onNewDashboardHandler();
|
||||||
// }}
|
}}
|
||||||
// >
|
>
|
||||||
// <LayoutGrid size={14} /> Create dashboard
|
<LayoutGrid size={14} /> Create dashboard
|
||||||
// </div>
|
</div>
|
||||||
// ),
|
),
|
||||||
// key: '0',
|
key: '0',
|
||||||
// });
|
});
|
||||||
// }
|
}
|
||||||
|
|
||||||
// return menuItems;
|
return menuItems;
|
||||||
// }, [createNewDashboard, onNewDashboardHandler]);
|
}, [createNewDashboard, onNewDashboardHandler]);
|
||||||
|
|
||||||
const showPaginationItem = (total: number, range: number[]): JSX.Element => (
|
const showPaginationItem = (total: number, range: number[]): JSX.Element => (
|
||||||
<>
|
<>
|
||||||
@@ -761,16 +763,23 @@ function DashboardsList(): JSX.Element {
|
|||||||
|
|
||||||
{createNewDashboard && (
|
{createNewDashboard && (
|
||||||
<section className="actions">
|
<section className="actions">
|
||||||
<Button
|
<Dropdown
|
||||||
type="text"
|
overlayClassName="new-dashboard-menu"
|
||||||
className="new-dashboard"
|
menu={{ items: getCreateDashboardItems }}
|
||||||
icon={<Plus size={14} />}
|
placement="bottomRight"
|
||||||
onClick={(): void => {
|
trigger={['click']}
|
||||||
logEvent('Dashboard List: New dashboard clicked', {});
|
|
||||||
}}
|
|
||||||
>
|
>
|
||||||
New Dashboard
|
<Button
|
||||||
</Button>
|
type="text"
|
||||||
|
className="new-dashboard"
|
||||||
|
icon={<Plus size={14} />}
|
||||||
|
onClick={(): void => {
|
||||||
|
logEvent('Dashboard List: New dashboard clicked', {});
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
New Dashboard
|
||||||
|
</Button>
|
||||||
|
</Dropdown>
|
||||||
<Button
|
<Button
|
||||||
type="text"
|
type="text"
|
||||||
className="learn-more"
|
className="learn-more"
|
||||||
@@ -798,17 +807,23 @@ function DashboardsList(): JSX.Element {
|
|||||||
onChange={handleSearch}
|
onChange={handleSearch}
|
||||||
/>
|
/>
|
||||||
{createNewDashboard && (
|
{createNewDashboard && (
|
||||||
<Button
|
<Dropdown
|
||||||
type="primary"
|
overlayClassName="new-dashboard-menu"
|
||||||
className="periscope-btn primary btn"
|
menu={{ items: getCreateDashboardItems }}
|
||||||
icon={<Plus size={14} />}
|
placement="bottomRight"
|
||||||
onClick={(): void => {
|
trigger={['click']}
|
||||||
logEvent('Dashboard List: New dashboard clicked', {});
|
|
||||||
setShowNewDashboardTemplatesModal(true);
|
|
||||||
}}
|
|
||||||
>
|
>
|
||||||
New dashboard
|
<Button
|
||||||
</Button>
|
type="primary"
|
||||||
|
className="periscope-btn primary btn"
|
||||||
|
icon={<Plus size={14} />}
|
||||||
|
onClick={(): void => {
|
||||||
|
logEvent('Dashboard List: New dashboard clicked', {});
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
New dashboard
|
||||||
|
</Button>
|
||||||
|
</Dropdown>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import InfinityTableView from 'container/LogsExplorerList/InfinityTableView';
|
|||||||
import { InfinityWrapperStyled } from 'container/LogsExplorerList/styles';
|
import { InfinityWrapperStyled } from 'container/LogsExplorerList/styles';
|
||||||
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
|
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
|
||||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||||
import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
|
|
||||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||||
import { useEventSource } from 'providers/EventSource';
|
import { useEventSource } from 'providers/EventSource';
|
||||||
@@ -57,10 +56,7 @@ function LiveLogsList({ logs, isLoading }: LiveLogsListProps): JSX.Element {
|
|||||||
[formattedLogs, activeLogId],
|
[formattedLogs, activeLogId],
|
||||||
);
|
);
|
||||||
|
|
||||||
const selectedFields = convertKeysToColumnFields([
|
const selectedFields = convertKeysToColumnFields(options.selectColumns);
|
||||||
...defaultLogsSelectedColumns,
|
|
||||||
...options.selectColumns,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const getItemContent = useCallback(
|
const getItemContent = useCallback(
|
||||||
(_: number, log: ILog): JSX.Element => {
|
(_: number, log: ILog): JSX.Element => {
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQuery
|
|||||||
import { logsQueryRangeEmptyResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
import { logsQueryRangeEmptyResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
||||||
import { server } from 'mocks-server/server';
|
import { server } from 'mocks-server/server';
|
||||||
import { rest } from 'msw';
|
import { rest } from 'msw';
|
||||||
|
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||||
import { render, screen } from 'tests/test-utils';
|
import { render, screen } from 'tests/test-utils';
|
||||||
@@ -121,12 +122,12 @@ describe('LogsExplorerList - empty states', () => {
|
|||||||
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
|
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
|
||||||
<PreferenceContextProvider>
|
<PreferenceContextProvider>
|
||||||
<LogsExplorerViews
|
<LogsExplorerViews
|
||||||
|
selectedView={ExplorerViews.LIST}
|
||||||
setIsLoadingQueries={(): void => {}}
|
setIsLoadingQueries={(): void => {}}
|
||||||
listQueryKeyRef={{ current: {} }}
|
listQueryKeyRef={{ current: {} }}
|
||||||
chartQueryKeyRef={{ current: {} }}
|
chartQueryKeyRef={{ current: {} }}
|
||||||
setWarning={(): void => {}}
|
setWarning={(): void => {}}
|
||||||
showLiveLogs={false}
|
showLiveLogs={false}
|
||||||
handleChangeSelectedView={(): void => {}}
|
|
||||||
/>
|
/>
|
||||||
</PreferenceContextProvider>
|
</PreferenceContextProvider>
|
||||||
</QueryBuilderContext.Provider>,
|
</QueryBuilderContext.Provider>,
|
||||||
@@ -186,12 +187,12 @@ describe('LogsExplorerList - empty states', () => {
|
|||||||
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
|
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
|
||||||
<PreferenceContextProvider>
|
<PreferenceContextProvider>
|
||||||
<LogsExplorerViews
|
<LogsExplorerViews
|
||||||
|
selectedView={ExplorerViews.LIST}
|
||||||
setIsLoadingQueries={(): void => {}}
|
setIsLoadingQueries={(): void => {}}
|
||||||
listQueryKeyRef={{ current: {} }}
|
listQueryKeyRef={{ current: {} }}
|
||||||
chartQueryKeyRef={{ current: {} }}
|
chartQueryKeyRef={{ current: {} }}
|
||||||
setWarning={(): void => {}}
|
setWarning={(): void => {}}
|
||||||
showLiveLogs={false}
|
showLiveLogs={false}
|
||||||
handleChangeSelectedView={(): void => {}}
|
|
||||||
/>
|
/>
|
||||||
</PreferenceContextProvider>
|
</PreferenceContextProvider>
|
||||||
</QueryBuilderContext.Provider>,
|
</QueryBuilderContext.Provider>,
|
||||||
|
|||||||
@@ -13,7 +13,9 @@ export const convertKeysToColumnFields = (
|
|||||||
.filter((item) => !isEmpty(item.name))
|
.filter((item) => !isEmpty(item.name))
|
||||||
.map((item) => ({
|
.map((item) => ({
|
||||||
dataType: item.fieldDataType ?? '',
|
dataType: item.fieldDataType ?? '',
|
||||||
name: item.name,
|
name: item.name ?? '',
|
||||||
|
key: item.key ?? '',
|
||||||
|
displayName: item.displayName ?? '',
|
||||||
type: item.fieldContext ?? '',
|
type: item.fieldContext ?? '',
|
||||||
}));
|
}));
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,210 +0,0 @@
|
|||||||
import {
|
|
||||||
initialQueryBuilderFormValues,
|
|
||||||
OPERATORS,
|
|
||||||
PANEL_TYPES,
|
|
||||||
} from 'constants/queryBuilder';
|
|
||||||
import { getPaginationQueryDataV2 } from 'lib/newQueryBuilder/getPaginationQueryData';
|
|
||||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
|
||||||
import {
|
|
||||||
IBuilderQuery,
|
|
||||||
Query,
|
|
||||||
TagFilter,
|
|
||||||
} from 'types/api/queryBuilder/queryBuilderData';
|
|
||||||
import { Filter } from 'types/api/v5/queryRange';
|
|
||||||
import { LogsAggregatorOperator } from 'types/common/queryBuilder';
|
|
||||||
import { v4 } from 'uuid';
|
|
||||||
|
|
||||||
export const getListQuery = (
|
|
||||||
stagedQuery: Query | null,
|
|
||||||
): IBuilderQuery | null => {
|
|
||||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
|
||||||
|
|
||||||
return stagedQuery.builder.queryData[0] ?? null;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getFrequencyChartData = (
|
|
||||||
stagedQuery: Query | null,
|
|
||||||
activeLogId: string | null,
|
|
||||||
): Query | null => {
|
|
||||||
if (!stagedQuery) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const baseFirstQuery = getListQuery(stagedQuery);
|
|
||||||
|
|
||||||
if (!baseFirstQuery) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
let updatedFilterExpression = baseFirstQuery.filter?.expression || '';
|
|
||||||
if (activeLogId) {
|
|
||||||
updatedFilterExpression = `${updatedFilterExpression} id <= '${activeLogId}'`.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
const modifiedQueryData: IBuilderQuery = {
|
|
||||||
...baseFirstQuery,
|
|
||||||
disabled: false,
|
|
||||||
aggregateOperator: LogsAggregatorOperator.COUNT,
|
|
||||||
filter: {
|
|
||||||
...baseFirstQuery.filter,
|
|
||||||
expression: updatedFilterExpression || '',
|
|
||||||
},
|
|
||||||
...(activeLogId && {
|
|
||||||
filters: {
|
|
||||||
...baseFirstQuery.filters,
|
|
||||||
items: [
|
|
||||||
...(baseFirstQuery?.filters?.items || []),
|
|
||||||
{
|
|
||||||
id: v4(),
|
|
||||||
key: {
|
|
||||||
key: 'id',
|
|
||||||
type: '',
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
},
|
|
||||||
op: OPERATORS['<='],
|
|
||||||
value: activeLogId,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
groupBy: [
|
|
||||||
{
|
|
||||||
key: 'severity_text',
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
type: '',
|
|
||||||
id: 'severity_text--string----true',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
legend: '{{severity_text}}',
|
|
||||||
orderBy: [],
|
|
||||||
having: {
|
|
||||||
expression: '',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const modifiedQuery: Query = {
|
|
||||||
...stagedQuery,
|
|
||||||
builder: {
|
|
||||||
...stagedQuery.builder,
|
|
||||||
queryData: [modifiedQueryData], // single query data required for list chart
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
return modifiedQuery;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getQueryByPanelType = (
|
|
||||||
query: Query | null,
|
|
||||||
selectedPanelType: PANEL_TYPES,
|
|
||||||
params: {
|
|
||||||
page?: number;
|
|
||||||
pageSize?: number;
|
|
||||||
filters?: TagFilter;
|
|
||||||
filter?: Filter;
|
|
||||||
activeLogId?: string | null;
|
|
||||||
orderBy?: string;
|
|
||||||
},
|
|
||||||
): Query | null => {
|
|
||||||
if (!query) return null;
|
|
||||||
|
|
||||||
let queryData: IBuilderQuery[] = query.builder.queryData.map((item) => ({
|
|
||||||
...item,
|
|
||||||
}));
|
|
||||||
|
|
||||||
if (selectedPanelType === PANEL_TYPES.LIST) {
|
|
||||||
const { activeLogId = null, orderBy = 'timestamp:desc' } = params;
|
|
||||||
|
|
||||||
const paginateData = getPaginationQueryDataV2({
|
|
||||||
page: params.page ?? 1,
|
|
||||||
pageSize: params.pageSize ?? 10,
|
|
||||||
});
|
|
||||||
|
|
||||||
let updatedFilters = params.filters;
|
|
||||||
let updatedFilterExpression = params.filter?.expression || '';
|
|
||||||
if (activeLogId) {
|
|
||||||
updatedFilters = {
|
|
||||||
...params.filters,
|
|
||||||
items: [
|
|
||||||
...(params.filters?.items || []),
|
|
||||||
{
|
|
||||||
id: v4(),
|
|
||||||
key: {
|
|
||||||
key: 'id',
|
|
||||||
type: '',
|
|
||||||
dataType: DataTypes.String,
|
|
||||||
},
|
|
||||||
op: OPERATORS['<='],
|
|
||||||
value: activeLogId,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
op: 'AND',
|
|
||||||
};
|
|
||||||
updatedFilterExpression = `${updatedFilterExpression} id <= '${activeLogId}'`.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create orderBy array based on orderDirection
|
|
||||||
const [columnName, order] = orderBy.split(':');
|
|
||||||
|
|
||||||
const newOrderBy = [
|
|
||||||
{ columnName: columnName || 'timestamp', order: order || 'desc' },
|
|
||||||
{ columnName: 'id', order: order || 'desc' },
|
|
||||||
];
|
|
||||||
|
|
||||||
queryData = [
|
|
||||||
{
|
|
||||||
...(getListQuery(query) || initialQueryBuilderFormValues),
|
|
||||||
...paginateData,
|
|
||||||
...(updatedFilters ? { filters: updatedFilters } : {}),
|
|
||||||
filter: { expression: updatedFilterExpression || '' },
|
|
||||||
groupBy: [],
|
|
||||||
having: {
|
|
||||||
expression: '',
|
|
||||||
},
|
|
||||||
orderBy: newOrderBy,
|
|
||||||
disabled: false,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
const data: Query = {
|
|
||||||
...query,
|
|
||||||
builder: {
|
|
||||||
...query.builder,
|
|
||||||
queryData,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getExportQueryData = (
|
|
||||||
query: Query | null,
|
|
||||||
panelType: PANEL_TYPES,
|
|
||||||
): Query | null => {
|
|
||||||
if (!query) return null;
|
|
||||||
|
|
||||||
if (panelType === PANEL_TYPES.LIST) {
|
|
||||||
const listQuery = getListQuery(query);
|
|
||||||
if (!listQuery) return null;
|
|
||||||
|
|
||||||
return {
|
|
||||||
...query,
|
|
||||||
builder: {
|
|
||||||
...query.builder,
|
|
||||||
queryData: [
|
|
||||||
{
|
|
||||||
...listQuery,
|
|
||||||
orderBy: [
|
|
||||||
{
|
|
||||||
columnName: 'timestamp',
|
|
||||||
order: 'desc',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
limit: null,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return query;
|
|
||||||
};
|
|
||||||
@@ -11,29 +11,29 @@ import { QueryParams } from 'constants/query';
|
|||||||
import {
|
import {
|
||||||
initialFilters,
|
initialFilters,
|
||||||
initialQueriesMap,
|
initialQueriesMap,
|
||||||
|
initialQueryBuilderFormValues,
|
||||||
|
OPERATORS,
|
||||||
PANEL_TYPES,
|
PANEL_TYPES,
|
||||||
} from 'constants/queryBuilder';
|
} from 'constants/queryBuilder';
|
||||||
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
|
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
|
||||||
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
|
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
|
||||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
|
||||||
import GoToTop from 'container/GoToTop';
|
import GoToTop from 'container/GoToTop';
|
||||||
|
import {} from 'container/LiveLogs/constants';
|
||||||
import LogsExplorerChart from 'container/LogsExplorerChart';
|
import LogsExplorerChart from 'container/LogsExplorerChart';
|
||||||
import LogsExplorerList from 'container/LogsExplorerList';
|
import LogsExplorerList from 'container/LogsExplorerList';
|
||||||
import LogsExplorerTable from 'container/LogsExplorerTable';
|
import LogsExplorerTable from 'container/LogsExplorerTable';
|
||||||
import {
|
|
||||||
getExportQueryData,
|
|
||||||
getFrequencyChartData,
|
|
||||||
getListQuery,
|
|
||||||
getQueryByPanelType,
|
|
||||||
} from 'container/LogsExplorerViews/explorerUtils';
|
|
||||||
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
|
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
|
||||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||||
import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange';
|
import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange';
|
||||||
|
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
|
||||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||||
|
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||||
import { isEmpty, isUndefined } from 'lodash-es';
|
import { getPaginationQueryDataV2 } from 'lib/newQueryBuilder/getPaginationQueryData';
|
||||||
|
import { cloneDeep, defaultTo, isEmpty, isUndefined, set } from 'lodash-es';
|
||||||
import LiveLogs from 'pages/LiveLogs';
|
import LiveLogs from 'pages/LiveLogs';
|
||||||
|
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||||
import {
|
import {
|
||||||
Dispatch,
|
Dispatch,
|
||||||
memo,
|
memo,
|
||||||
@@ -52,10 +52,15 @@ import { Warning } from 'types/api';
|
|||||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||||
import APIError from 'types/api/error';
|
import APIError from 'types/api/error';
|
||||||
import { ILog } from 'types/api/logs/log';
|
import { ILog } from 'types/api/logs/log';
|
||||||
import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||||
|
import {
|
||||||
|
IBuilderQuery,
|
||||||
|
Query,
|
||||||
|
TagFilter,
|
||||||
|
} from 'types/api/queryBuilder/queryBuilderData';
|
||||||
import { Filter } from 'types/api/v5/queryRange';
|
import { Filter } from 'types/api/v5/queryRange';
|
||||||
import { QueryDataV3 } from 'types/api/widgets/getQuery';
|
import { QueryDataV3 } from 'types/api/widgets/getQuery';
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource, LogsAggregatorOperator } from 'types/common/queryBuilder';
|
||||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||||
import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink';
|
import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink';
|
||||||
import { v4 } from 'uuid';
|
import { v4 } from 'uuid';
|
||||||
@@ -63,13 +68,14 @@ import { v4 } from 'uuid';
|
|||||||
import LogsActionsContainer from './LogsActionsContainer';
|
import LogsActionsContainer from './LogsActionsContainer';
|
||||||
|
|
||||||
function LogsExplorerViewsContainer({
|
function LogsExplorerViewsContainer({
|
||||||
|
selectedView,
|
||||||
setIsLoadingQueries,
|
setIsLoadingQueries,
|
||||||
listQueryKeyRef,
|
listQueryKeyRef,
|
||||||
chartQueryKeyRef,
|
chartQueryKeyRef,
|
||||||
setWarning,
|
setWarning,
|
||||||
showLiveLogs,
|
showLiveLogs,
|
||||||
handleChangeSelectedView,
|
|
||||||
}: {
|
}: {
|
||||||
|
selectedView: ExplorerViews;
|
||||||
setIsLoadingQueries: React.Dispatch<React.SetStateAction<boolean>>;
|
setIsLoadingQueries: React.Dispatch<React.SetStateAction<boolean>>;
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
listQueryKeyRef: MutableRefObject<any>;
|
listQueryKeyRef: MutableRefObject<any>;
|
||||||
@@ -77,14 +83,19 @@ function LogsExplorerViewsContainer({
|
|||||||
chartQueryKeyRef: MutableRefObject<any>;
|
chartQueryKeyRef: MutableRefObject<any>;
|
||||||
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
||||||
showLiveLogs: boolean;
|
showLiveLogs: boolean;
|
||||||
handleChangeSelectedView: ChangeViewFunctionType;
|
|
||||||
}): JSX.Element {
|
}): JSX.Element {
|
||||||
const { safeNavigate } = useSafeNavigate();
|
const { safeNavigate } = useSafeNavigate();
|
||||||
const dispatch = useDispatch();
|
const dispatch = useDispatch();
|
||||||
|
|
||||||
const [showFrequencyChart, setShowFrequencyChart] = useState(
|
const [showFrequencyChart, setShowFrequencyChart] = useState(false);
|
||||||
() => getFromLocalstorage(LOCALSTORAGE.SHOW_FREQUENCY_CHART) === 'true',
|
|
||||||
);
|
useEffect(() => {
|
||||||
|
const frequencyChart = getFromLocalstorage(LOCALSTORAGE.SHOW_FREQUENCY_CHART);
|
||||||
|
setShowFrequencyChart(frequencyChart === 'true');
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// this is to respect the panel type present in the URL rather than defaulting it to list always.
|
||||||
|
const panelTypes = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
|
||||||
|
|
||||||
const { activeLogId } = useCopyLogLink();
|
const { activeLogId } = useCopyLogLink();
|
||||||
|
|
||||||
@@ -106,9 +117,14 @@ function LogsExplorerViewsContainer({
|
|||||||
stagedQuery,
|
stagedQuery,
|
||||||
panelType,
|
panelType,
|
||||||
updateAllQueriesOperators,
|
updateAllQueriesOperators,
|
||||||
|
handleSetConfig,
|
||||||
} = useQueryBuilder();
|
} = useQueryBuilder();
|
||||||
|
|
||||||
const selectedPanelType = panelType || PANEL_TYPES.LIST;
|
const [selectedPanelType, setSelectedPanelType] = useState<PANEL_TYPES>(
|
||||||
|
panelType || PANEL_TYPES.LIST,
|
||||||
|
);
|
||||||
|
|
||||||
|
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||||
|
|
||||||
// State
|
// State
|
||||||
const [page, setPage] = useState<number>(1);
|
const [page, setPage] = useState<number>(1);
|
||||||
@@ -119,9 +135,27 @@ function LogsExplorerViewsContainer({
|
|||||||
|
|
||||||
const [orderBy, setOrderBy] = useState<string>('timestamp:desc');
|
const [orderBy, setOrderBy] = useState<string>('timestamp:desc');
|
||||||
|
|
||||||
const listQuery = useMemo(() => getListQuery(stagedQuery) || null, [
|
const listQuery = useMemo(() => {
|
||||||
stagedQuery,
|
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
||||||
]);
|
|
||||||
|
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
|
||||||
|
}, [stagedQuery]);
|
||||||
|
|
||||||
|
const isMultipleQueries = useMemo(
|
||||||
|
() =>
|
||||||
|
currentQuery?.builder?.queryData?.length > 1 ||
|
||||||
|
currentQuery?.builder?.queryFormulas?.length > 0,
|
||||||
|
[currentQuery],
|
||||||
|
);
|
||||||
|
|
||||||
|
const isGroupByExist = useMemo(() => {
|
||||||
|
const groupByCount: number = currentQuery?.builder?.queryData?.reduce<number>(
|
||||||
|
(acc, query) => acc + query.groupBy.length,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
return groupByCount > 0;
|
||||||
|
}, [currentQuery]);
|
||||||
|
|
||||||
const isLimit: boolean = useMemo(() => {
|
const isLimit: boolean = useMemo(() => {
|
||||||
if (!listQuery) return false;
|
if (!listQuery) return false;
|
||||||
@@ -131,9 +165,66 @@ function LogsExplorerViewsContainer({
|
|||||||
}, [logs.length, listQuery]);
|
}, [logs.length, listQuery]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const modifiedQuery = getFrequencyChartData(stagedQuery, activeLogId);
|
if (!stagedQuery || !listQuery) {
|
||||||
|
setListChartQuery(null);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let updatedFilterExpression = listQuery.filter?.expression || '';
|
||||||
|
if (activeLogId) {
|
||||||
|
updatedFilterExpression = `${updatedFilterExpression} id <= '${activeLogId}'`.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
const modifiedQueryData: IBuilderQuery = {
|
||||||
|
...listQuery,
|
||||||
|
aggregateOperator: LogsAggregatorOperator.COUNT,
|
||||||
|
groupBy: [
|
||||||
|
{
|
||||||
|
key: 'severity_text',
|
||||||
|
dataType: DataTypes.String,
|
||||||
|
type: '',
|
||||||
|
id: 'severity_text--string----true',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
legend: '{{severity_text}}',
|
||||||
|
filter: {
|
||||||
|
...listQuery?.filter,
|
||||||
|
expression: updatedFilterExpression || '',
|
||||||
|
},
|
||||||
|
...(activeLogId && {
|
||||||
|
filters: {
|
||||||
|
...listQuery?.filters,
|
||||||
|
items: [
|
||||||
|
...(listQuery?.filters?.items || []),
|
||||||
|
{
|
||||||
|
id: v4(),
|
||||||
|
key: {
|
||||||
|
key: 'id',
|
||||||
|
type: '',
|
||||||
|
dataType: DataTypes.String,
|
||||||
|
},
|
||||||
|
op: OPERATORS['<='],
|
||||||
|
value: activeLogId,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
op: 'AND',
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const modifiedQuery: Query = {
|
||||||
|
...stagedQuery,
|
||||||
|
builder: {
|
||||||
|
...stagedQuery.builder,
|
||||||
|
queryData: stagedQuery.builder.queryData.map((item) => ({
|
||||||
|
...item,
|
||||||
|
...modifiedQueryData,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
setListChartQuery(modifiedQuery);
|
setListChartQuery(modifiedQuery);
|
||||||
}, [stagedQuery, activeLogId]);
|
}, [stagedQuery, listQuery, activeLogId]);
|
||||||
|
|
||||||
const exportDefaultQuery = useMemo(
|
const exportDefaultQuery = useMemo(
|
||||||
() =>
|
() =>
|
||||||
@@ -155,9 +246,7 @@ function LogsExplorerViewsContainer({
|
|||||||
ENTITY_VERSION_V5,
|
ENTITY_VERSION_V5,
|
||||||
{
|
{
|
||||||
enabled:
|
enabled:
|
||||||
showFrequencyChart &&
|
showFrequencyChart && !!listChartQuery && panelType === PANEL_TYPES.LIST,
|
||||||
!!listChartQuery &&
|
|
||||||
selectedPanelType === PANEL_TYPES.LIST,
|
|
||||||
},
|
},
|
||||||
{},
|
{},
|
||||||
undefined,
|
undefined,
|
||||||
@@ -175,7 +264,7 @@ function LogsExplorerViewsContainer({
|
|||||||
error,
|
error,
|
||||||
} = useGetExplorerQueryRange(
|
} = useGetExplorerQueryRange(
|
||||||
requestData,
|
requestData,
|
||||||
selectedPanelType,
|
panelType,
|
||||||
ENTITY_VERSION_V5,
|
ENTITY_VERSION_V5,
|
||||||
{
|
{
|
||||||
keepPreviousData: true,
|
keepPreviousData: true,
|
||||||
@@ -207,13 +296,77 @@ function LogsExplorerViewsContainer({
|
|||||||
filters: TagFilter;
|
filters: TagFilter;
|
||||||
filter: Filter;
|
filter: Filter;
|
||||||
},
|
},
|
||||||
): Query | null =>
|
): Query | null => {
|
||||||
getQueryByPanelType(query, selectedPanelType, {
|
if (!query) return null;
|
||||||
...params,
|
|
||||||
activeLogId,
|
const paginateData = getPaginationQueryDataV2({
|
||||||
orderBy,
|
page: params.page,
|
||||||
}),
|
pageSize: params.pageSize,
|
||||||
[activeLogId, orderBy, selectedPanelType],
|
});
|
||||||
|
|
||||||
|
// Add filter for activeLogId if present
|
||||||
|
let updatedFilters = params.filters;
|
||||||
|
let updatedFilterExpression = params.filter?.expression || '';
|
||||||
|
if (activeLogId) {
|
||||||
|
updatedFilters = {
|
||||||
|
...params.filters,
|
||||||
|
items: [
|
||||||
|
...(params.filters?.items || []),
|
||||||
|
{
|
||||||
|
id: v4(),
|
||||||
|
key: {
|
||||||
|
key: 'id',
|
||||||
|
type: '',
|
||||||
|
dataType: DataTypes.String,
|
||||||
|
},
|
||||||
|
op: OPERATORS['<='],
|
||||||
|
value: activeLogId,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
op: 'AND',
|
||||||
|
};
|
||||||
|
updatedFilterExpression = `${updatedFilterExpression} id <= '${activeLogId}'`.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create orderBy array based on orderDirection
|
||||||
|
const [columnName, order] = orderBy.split(':');
|
||||||
|
|
||||||
|
const newOrderBy = [
|
||||||
|
{ columnName: columnName || 'timestamp', order: order || 'desc' },
|
||||||
|
{ columnName: 'id', order: order || 'desc' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const queryData: IBuilderQuery[] =
|
||||||
|
query.builder.queryData.length > 1
|
||||||
|
? query.builder.queryData.map((item) => ({
|
||||||
|
...item,
|
||||||
|
...(selectedView !== ExplorerViews.LIST ? { order: [] } : {}),
|
||||||
|
}))
|
||||||
|
: [
|
||||||
|
{
|
||||||
|
...(listQuery || initialQueryBuilderFormValues),
|
||||||
|
...paginateData,
|
||||||
|
...(updatedFilters ? { filters: updatedFilters } : {}),
|
||||||
|
filter: {
|
||||||
|
expression: updatedFilterExpression || '',
|
||||||
|
},
|
||||||
|
...(selectedView === ExplorerViews.LIST
|
||||||
|
? { order: newOrderBy, orderBy: newOrderBy }
|
||||||
|
: { order: [] }),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const data: Query = {
|
||||||
|
...query,
|
||||||
|
builder: {
|
||||||
|
...query.builder,
|
||||||
|
queryData,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
[activeLogId, orderBy, listQuery, selectedView],
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -259,7 +412,7 @@ function LogsExplorerViewsContainer({
|
|||||||
if (!logEventCalledRef.current && !isUndefined(data?.payload)) {
|
if (!logEventCalledRef.current && !isUndefined(data?.payload)) {
|
||||||
const currentData = data?.payload?.data?.newResult?.data?.result || [];
|
const currentData = data?.payload?.data?.newResult?.data?.result || [];
|
||||||
logEvent('Logs Explorer: Page visited', {
|
logEvent('Logs Explorer: Page visited', {
|
||||||
panelType: selectedPanelType,
|
panelType,
|
||||||
isEmpty: !currentData?.[0]?.list,
|
isEmpty: !currentData?.[0]?.list,
|
||||||
});
|
});
|
||||||
logEventCalledRef.current = true;
|
logEventCalledRef.current = true;
|
||||||
@@ -267,24 +420,31 @@ function LogsExplorerViewsContainer({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [data?.payload]);
|
}, [data?.payload]);
|
||||||
|
|
||||||
|
const getUpdatedQueryForExport = useCallback((): Query => {
|
||||||
|
const updatedQuery = cloneDeep(currentQuery);
|
||||||
|
|
||||||
|
set(updatedQuery, 'builder.queryData[0].pageSize', 10);
|
||||||
|
|
||||||
|
return updatedQuery;
|
||||||
|
}, [currentQuery]);
|
||||||
|
|
||||||
const handleExport = useCallback(
|
const handleExport = useCallback(
|
||||||
(dashboard: Dashboard | null, isNewDashboard?: boolean): void => {
|
(dashboard: Dashboard | null, isNewDashboard?: boolean): void => {
|
||||||
if (!dashboard || !selectedPanelType) return;
|
if (!dashboard || !panelType) return;
|
||||||
|
|
||||||
const panelTypeParam = AVAILABLE_EXPORT_PANEL_TYPES.includes(
|
const panelTypeParam = AVAILABLE_EXPORT_PANEL_TYPES.includes(panelType)
|
||||||
selectedPanelType,
|
? panelType
|
||||||
)
|
|
||||||
? selectedPanelType
|
|
||||||
: PANEL_TYPES.TIME_SERIES;
|
: PANEL_TYPES.TIME_SERIES;
|
||||||
|
|
||||||
const widgetId = v4();
|
const widgetId = v4();
|
||||||
|
|
||||||
const query = getExportQueryData(requestData, selectedPanelType);
|
const query =
|
||||||
|
panelType === PANEL_TYPES.LIST
|
||||||
if (!query) return;
|
? getUpdatedQueryForExport()
|
||||||
|
: exportDefaultQuery;
|
||||||
|
|
||||||
logEvent('Logs Explorer: Add to dashboard successful', {
|
logEvent('Logs Explorer: Add to dashboard successful', {
|
||||||
panelType: selectedPanelType,
|
panelType,
|
||||||
isNewDashboard,
|
isNewDashboard,
|
||||||
dashboardName: dashboard?.data?.title,
|
dashboardName: dashboard?.data?.title,
|
||||||
});
|
});
|
||||||
@@ -298,9 +458,36 @@ function LogsExplorerViewsContainer({
|
|||||||
|
|
||||||
safeNavigate(dashboardEditView);
|
safeNavigate(dashboardEditView);
|
||||||
},
|
},
|
||||||
[safeNavigate, requestData, selectedPanelType],
|
[getUpdatedQueryForExport, exportDefaultQuery, safeNavigate, panelType],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const shouldChangeView = isMultipleQueries || isGroupByExist;
|
||||||
|
|
||||||
|
if (selectedPanelType === PANEL_TYPES.LIST && shouldChangeView) {
|
||||||
|
handleExplorerTabChange(PANEL_TYPES.TIME_SERIES);
|
||||||
|
setSelectedPanelType(PANEL_TYPES.TIME_SERIES);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (panelType) {
|
||||||
|
setSelectedPanelType(panelType);
|
||||||
|
}
|
||||||
|
}, [
|
||||||
|
isMultipleQueries,
|
||||||
|
isGroupByExist,
|
||||||
|
selectedPanelType,
|
||||||
|
selectedView,
|
||||||
|
handleExplorerTabChange,
|
||||||
|
panelType,
|
||||||
|
]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (selectedView && selectedView === ExplorerViews.LIST && handleSetConfig) {
|
||||||
|
handleSetConfig(defaultTo(panelTypes, PANEL_TYPES.LIST), DataSource.LOGS);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [handleSetConfig, panelTypes]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const currentData = data?.payload?.data?.newResult?.data?.result || [];
|
const currentData = data?.payload?.data?.newResult?.data?.result || [];
|
||||||
if (currentData.length > 0 && currentData[0].list) {
|
if (currentData.length > 0 && currentData[0].list) {
|
||||||
@@ -359,17 +546,19 @@ function LogsExplorerViewsContainer({
|
|||||||
pageSize,
|
pageSize,
|
||||||
minTime,
|
minTime,
|
||||||
activeLogId,
|
activeLogId,
|
||||||
selectedPanelType,
|
panelType,
|
||||||
|
selectedView,
|
||||||
dispatch,
|
dispatch,
|
||||||
selectedTime,
|
selectedTime,
|
||||||
maxTime,
|
maxTime,
|
||||||
orderBy,
|
orderBy,
|
||||||
|
selectedPanelType,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const chartData = useMemo(() => {
|
const chartData = useMemo(() => {
|
||||||
if (!stagedQuery) return [];
|
if (!stagedQuery) return [];
|
||||||
|
|
||||||
if (selectedPanelType === PANEL_TYPES.LIST) {
|
if (panelType === PANEL_TYPES.LIST) {
|
||||||
if (listChartData && listChartData.payload.data?.result.length > 0) {
|
if (listChartData && listChartData.payload.data?.result.length > 0) {
|
||||||
return listChartData.payload.data.result;
|
return listChartData.payload.data.result;
|
||||||
}
|
}
|
||||||
@@ -389,7 +578,7 @@ function LogsExplorerViewsContainer({
|
|||||||
const firstPayloadQueryArray = firstPayloadQuery ? [firstPayloadQuery] : [];
|
const firstPayloadQueryArray = firstPayloadQuery ? [firstPayloadQuery] : [];
|
||||||
|
|
||||||
return isGroupByExist ? data.payload.data.result : firstPayloadQueryArray;
|
return isGroupByExist ? data.payload.data.result : firstPayloadQueryArray;
|
||||||
}, [stagedQuery, selectedPanelType, data, listChartData, listQuery]);
|
}, [stagedQuery, panelType, data, listChartData, listQuery]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (
|
if (
|
||||||
@@ -450,7 +639,7 @@ function LogsExplorerViewsContainer({
|
|||||||
className="logs-frequency-chart"
|
className="logs-frequency-chart"
|
||||||
isLoading={isFetchingListChartData || isLoadingListChartData}
|
isLoading={isFetchingListChartData || isLoadingListChartData}
|
||||||
data={chartData}
|
data={chartData}
|
||||||
isLogsExplorerViews={selectedPanelType === PANEL_TYPES.LIST}
|
isLogsExplorerViews={panelType === PANEL_TYPES.LIST}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@@ -506,7 +695,6 @@ function LogsExplorerViewsContainer({
|
|||||||
query={exportDefaultQuery}
|
query={exportDefaultQuery}
|
||||||
onExport={handleExport}
|
onExport={handleExport}
|
||||||
sourcepage={DataSource.LOGS}
|
sourcepage={DataSource.LOGS}
|
||||||
handleChangeSelectedView={handleChangeSelectedView}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -5,12 +5,12 @@ import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQuery
|
|||||||
import { logsQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
import { logsQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
||||||
import { server } from 'mocks-server/server';
|
import { server } from 'mocks-server/server';
|
||||||
import { rest } from 'msw';
|
import { rest } from 'msw';
|
||||||
|
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||||
import { fireEvent, render, RenderResult, waitFor } from 'tests/test-utils';
|
import { fireEvent, render, RenderResult, waitFor } from 'tests/test-utils';
|
||||||
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
import { LogsAggregatorOperator } from 'types/common/queryBuilder';
|
|
||||||
|
|
||||||
import LogsExplorerViews from '..';
|
import LogsExplorerViews from '..';
|
||||||
import {
|
import {
|
||||||
@@ -152,12 +152,12 @@ const renderer = (): RenderResult =>
|
|||||||
>
|
>
|
||||||
<PreferenceContextProvider>
|
<PreferenceContextProvider>
|
||||||
<LogsExplorerViews
|
<LogsExplorerViews
|
||||||
|
selectedView={ExplorerViews.LIST}
|
||||||
setIsLoadingQueries={(): void => {}}
|
setIsLoadingQueries={(): void => {}}
|
||||||
listQueryKeyRef={{ current: {} }}
|
listQueryKeyRef={{ current: {} }}
|
||||||
chartQueryKeyRef={{ current: {} }}
|
chartQueryKeyRef={{ current: {} }}
|
||||||
setWarning={(): void => {}}
|
setWarning={(): void => {}}
|
||||||
showLiveLogs={false}
|
showLiveLogs={false}
|
||||||
handleChangeSelectedView={(): void => {}}
|
|
||||||
/>
|
/>
|
||||||
</PreferenceContextProvider>
|
</PreferenceContextProvider>
|
||||||
</VirtuosoMockContext.Provider>,
|
</VirtuosoMockContext.Provider>,
|
||||||
@@ -218,12 +218,12 @@ describe('LogsExplorerViews -', () => {
|
|||||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue}>
|
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue}>
|
||||||
<PreferenceContextProvider>
|
<PreferenceContextProvider>
|
||||||
<LogsExplorerViews
|
<LogsExplorerViews
|
||||||
|
selectedView={ExplorerViews.LIST}
|
||||||
setIsLoadingQueries={(): void => {}}
|
setIsLoadingQueries={(): void => {}}
|
||||||
listQueryKeyRef={{ current: {} }}
|
listQueryKeyRef={{ current: {} }}
|
||||||
chartQueryKeyRef={{ current: {} }}
|
chartQueryKeyRef={{ current: {} }}
|
||||||
setWarning={(): void => {}}
|
setWarning={(): void => {}}
|
||||||
showLiveLogs={false}
|
showLiveLogs={false}
|
||||||
handleChangeSelectedView={(): void => {}}
|
|
||||||
/>
|
/>
|
||||||
</PreferenceContextProvider>
|
</PreferenceContextProvider>
|
||||||
</QueryBuilderContext.Provider>,
|
</QueryBuilderContext.Provider>,
|
||||||
@@ -295,12 +295,12 @@ describe('LogsExplorerViews -', () => {
|
|||||||
<QueryBuilderContext.Provider value={customContext as any}>
|
<QueryBuilderContext.Provider value={customContext as any}>
|
||||||
<PreferenceContextProvider>
|
<PreferenceContextProvider>
|
||||||
<LogsExplorerViews
|
<LogsExplorerViews
|
||||||
|
selectedView={ExplorerViews.LIST}
|
||||||
setIsLoadingQueries={(): void => {}}
|
setIsLoadingQueries={(): void => {}}
|
||||||
listQueryKeyRef={{ current: {} }}
|
listQueryKeyRef={{ current: {} }}
|
||||||
chartQueryKeyRef={{ current: {} }}
|
chartQueryKeyRef={{ current: {} }}
|
||||||
setWarning={(): void => {}}
|
setWarning={(): void => {}}
|
||||||
showLiveLogs={false}
|
showLiveLogs={false}
|
||||||
handleChangeSelectedView={(): void => {}}
|
|
||||||
/>
|
/>
|
||||||
</PreferenceContextProvider>
|
</PreferenceContextProvider>
|
||||||
</QueryBuilderContext.Provider>,
|
</QueryBuilderContext.Provider>,
|
||||||
@@ -323,120 +323,4 @@ describe('LogsExplorerViews -', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Queries by View', () => {
|
|
||||||
it('builds Frequency Chart query with COUNT and severity_text grouping and activeLogId bound', async () => {
|
|
||||||
// Enable frequency chart via localstorage and provide activeLogId
|
|
||||||
(useCopyLogLink as jest.Mock).mockReturnValue({
|
|
||||||
activeLogId: ACTIVE_LOG_ID,
|
|
||||||
});
|
|
||||||
// Ensure default mock return exists
|
|
||||||
(useGetExplorerQueryRange as jest.Mock).mockReturnValue({
|
|
||||||
data: { payload: logsQueryRangeSuccessNewFormatResponse },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Render with LIST panel type so the frequency chart hook runs with TIME_SERIES
|
|
||||||
render(
|
|
||||||
<VirtuosoMockContext.Provider
|
|
||||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
|
||||||
>
|
|
||||||
<PreferenceContextProvider>
|
|
||||||
<QueryBuilderContext.Provider
|
|
||||||
value={
|
|
||||||
{ ...mockQueryBuilderContextValue, panelType: PANEL_TYPES.LIST } as any
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<LogsExplorerViews
|
|
||||||
setIsLoadingQueries={(): void => {}}
|
|
||||||
listQueryKeyRef={{ current: {} }}
|
|
||||||
chartQueryKeyRef={{ current: {} }}
|
|
||||||
setWarning={(): void => {}}
|
|
||||||
showLiveLogs={false}
|
|
||||||
handleChangeSelectedView={(): void => {}}
|
|
||||||
/>
|
|
||||||
</QueryBuilderContext.Provider>
|
|
||||||
</PreferenceContextProvider>
|
|
||||||
</VirtuosoMockContext.Provider>,
|
|
||||||
);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
const chartCall = (useGetExplorerQueryRange as jest.Mock).mock.calls.find(
|
|
||||||
(call) => call[1] === PANEL_TYPES.TIME_SERIES && call[0],
|
|
||||||
);
|
|
||||||
expect(chartCall).toBeDefined();
|
|
||||||
if (chartCall) {
|
|
||||||
const frequencyQuery = chartCall[0];
|
|
||||||
const first = frequencyQuery.builder.queryData[0];
|
|
||||||
// Panel type used for chart fetch
|
|
||||||
expect(chartCall[1]).toBe(PANEL_TYPES.TIME_SERIES);
|
|
||||||
// Transformations
|
|
||||||
expect(first.aggregateOperator).toBe(LogsAggregatorOperator.COUNT);
|
|
||||||
expect(first.groupBy?.[0]?.key).toBe('severity_text');
|
|
||||||
expect(first.legend).toBe('{{severity_text}}');
|
|
||||||
expect(Array.isArray(first.orderBy) && first.orderBy.length === 0).toBe(
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
expect(first.having?.expression).toBe('');
|
|
||||||
// activeLogId constraints
|
|
||||||
expect(first.filter?.expression).toContain(`id <= '${ACTIVE_LOG_ID}'`);
|
|
||||||
expect(
|
|
||||||
first.filters?.items?.some(
|
|
||||||
(it: any) =>
|
|
||||||
it.key?.key === 'id' && it.op === '<=' && it.value === ACTIVE_LOG_ID,
|
|
||||||
),
|
|
||||||
).toBe(true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('builds List View query with orderBy and clears groupBy/having', async () => {
|
|
||||||
(useCopyLogLink as jest.Mock).mockReturnValue({ activeLogId: undefined });
|
|
||||||
(useGetExplorerQueryRange as jest.Mock).mockReturnValue({
|
|
||||||
data: { payload: logsQueryRangeSuccessNewFormatResponse },
|
|
||||||
});
|
|
||||||
|
|
||||||
render(
|
|
||||||
<VirtuosoMockContext.Provider
|
|
||||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
|
||||||
>
|
|
||||||
<PreferenceContextProvider>
|
|
||||||
<QueryBuilderContext.Provider
|
|
||||||
value={
|
|
||||||
{ ...mockQueryBuilderContextValue, panelType: PANEL_TYPES.LIST } as any
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<LogsExplorerViews
|
|
||||||
setIsLoadingQueries={(): void => {}}
|
|
||||||
listQueryKeyRef={{ current: {} }}
|
|
||||||
chartQueryKeyRef={{ current: {} }}
|
|
||||||
setWarning={(): void => {}}
|
|
||||||
showLiveLogs={false}
|
|
||||||
handleChangeSelectedView={(): void => {}}
|
|
||||||
/>
|
|
||||||
</QueryBuilderContext.Provider>
|
|
||||||
</PreferenceContextProvider>
|
|
||||||
</VirtuosoMockContext.Provider>,
|
|
||||||
);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
const listCall = (useGetExplorerQueryRange as jest.Mock).mock.calls.find(
|
|
||||||
(call) => call[1] === PANEL_TYPES.LIST && call[0],
|
|
||||||
);
|
|
||||||
expect(listCall).toBeDefined();
|
|
||||||
if (listCall) {
|
|
||||||
const listQueryArg = listCall[0];
|
|
||||||
const first = listQueryArg.builder.queryData[0];
|
|
||||||
expect(first.groupBy?.length ?? 0).toBe(0);
|
|
||||||
expect(first.having?.expression).toBe('');
|
|
||||||
// Default orderBy should be timestamp desc, then id desc
|
|
||||||
expect(first.orderBy).toEqual([
|
|
||||||
{ columnName: 'timestamp', order: 'desc' },
|
|
||||||
{ columnName: 'id', order: 'desc' },
|
|
||||||
]);
|
|
||||||
// Ensure the query is enabled for fetch
|
|
||||||
expect(first.disabled).toBe(false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import { ColumnsType } from 'antd/es/table';
|
import { ColumnsType } from 'antd/es/table';
|
||||||
import { Typography } from 'antd/lib';
|
import { Typography } from 'antd/lib';
|
||||||
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
|
||||||
|
import {
|
||||||
|
LOG_FIELD_BODY_KEY,
|
||||||
|
LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
} from 'lib/logs/flatLogData';
|
||||||
// import Typography from 'antd/es/typography/Typography';
|
// import Typography from 'antd/es/typography/Typography';
|
||||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||||
import { ReactNode } from 'react';
|
import { ReactNode } from 'react';
|
||||||
@@ -18,15 +22,15 @@ export const getLogPanelColumnsList = (
|
|||||||
|
|
||||||
const columns: ColumnsType<RowData> =
|
const columns: ColumnsType<RowData> =
|
||||||
selectedLogFields?.map((field: IField) => {
|
selectedLogFields?.map((field: IField) => {
|
||||||
const { name } = field;
|
const { name, key, displayName } = field;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
title: name,
|
title: displayName,
|
||||||
dataIndex: name,
|
dataIndex: key,
|
||||||
key: name,
|
key,
|
||||||
width: name === 'body' ? 350 : 100,
|
width: key === LOG_FIELD_BODY_KEY ? 350 : 100,
|
||||||
render: (value: ReactNode): JSX.Element => {
|
render: (value: ReactNode): JSX.Element => {
|
||||||
if (name === 'timestamp') {
|
if (key === LOG_FIELD_TIMESTAMP_KEY) {
|
||||||
return (
|
return (
|
||||||
<Typography.Text>
|
<Typography.Text>
|
||||||
{formatTimezoneAdjustedTimestamp(value as string)}
|
{formatTimezoneAdjustedTimestamp(value as string)}
|
||||||
@@ -34,7 +38,7 @@ export const getLogPanelColumnsList = (
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (name === 'body') {
|
if (key === LOG_FIELD_BODY_KEY) {
|
||||||
return (
|
return (
|
||||||
<Typography.Paragraph ellipsis={{ rows: 1 }} data-testid={name}>
|
<Typography.Paragraph ellipsis={{ rows: 1 }} data-testid={name}>
|
||||||
{value}
|
{value}
|
||||||
|
|||||||
@@ -115,25 +115,19 @@ describe('TopOperation API Integration', () => {
|
|||||||
|
|
||||||
server.use(
|
server.use(
|
||||||
rest.post(
|
rest.post(
|
||||||
'http://localhost/api/v2/service/top_operations',
|
'http://localhost/api/v1/service/top_operations',
|
||||||
async (req, res, ctx) => {
|
async (req, res, ctx) => {
|
||||||
const body = await req.json();
|
const body = await req.json();
|
||||||
apiCalls.push({ endpoint: TOP_OPERATIONS_ENDPOINT, body });
|
apiCalls.push({ endpoint: TOP_OPERATIONS_ENDPOINT, body });
|
||||||
return res(
|
return res(ctx.status(200), ctx.json(mockTopOperationsData));
|
||||||
ctx.status(200),
|
|
||||||
ctx.json({ status: 'success', data: mockTopOperationsData }),
|
|
||||||
);
|
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
rest.post(
|
rest.post(
|
||||||
'http://localhost/api/v2/service/entry_point_operations',
|
'http://localhost/api/v1/service/entry_point_operations',
|
||||||
async (req, res, ctx) => {
|
async (req, res, ctx) => {
|
||||||
const body = await req.json();
|
const body = await req.json();
|
||||||
apiCalls.push({ endpoint: ENTRY_POINT_OPERATIONS_ENDPOINT, body });
|
apiCalls.push({ endpoint: ENTRY_POINT_OPERATIONS_ENDPOINT, body });
|
||||||
return res(
|
return res(ctx.status(200), ctx.json({ data: mockEntryPointData }));
|
||||||
ctx.status(200),
|
|
||||||
ctx.json({ status: 'success', data: mockEntryPointData }),
|
|
||||||
);
|
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
@@ -168,7 +162,6 @@ describe('TopOperation API Integration', () => {
|
|||||||
end: `${defaultApiCallExpectation.end}`,
|
end: `${defaultApiCallExpectation.end}`,
|
||||||
service: defaultApiCallExpectation.service,
|
service: defaultApiCallExpectation.service,
|
||||||
tags: defaultApiCallExpectation.selectedTags,
|
tags: defaultApiCallExpectation.selectedTags,
|
||||||
limit: 5000,
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -202,7 +195,6 @@ describe('TopOperation API Integration', () => {
|
|||||||
end: `${defaultApiCallExpectation.end}`,
|
end: `${defaultApiCallExpectation.end}`,
|
||||||
service: defaultApiCallExpectation.service,
|
service: defaultApiCallExpectation.service,
|
||||||
tags: defaultApiCallExpectation.selectedTags,
|
tags: defaultApiCallExpectation.selectedTags,
|
||||||
limit: 5000,
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -558,8 +558,10 @@ export const getDefaultWidgetData = (
|
|||||||
decimalPrecision: PrecisionOptionsEnum.TWO, // default decimal precision
|
decimalPrecision: PrecisionOptionsEnum.TWO, // default decimal precision
|
||||||
selectedLogFields: defaultLogsSelectedColumns.map((field) => ({
|
selectedLogFields: defaultLogsSelectedColumns.map((field) => ({
|
||||||
...field,
|
...field,
|
||||||
|
key: field.key ?? '',
|
||||||
type: field.fieldContext ?? '',
|
type: field.fieldContext ?? '',
|
||||||
dataType: field.fieldDataType ?? '',
|
dataType: field.fieldDataType ?? '',
|
||||||
|
displayName: field.displayName || field.name,
|
||||||
})),
|
})),
|
||||||
selectedTracesFields: defaultTraceSelectedColumns,
|
selectedTracesFields: defaultTraceSelectedColumns,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -42,10 +42,12 @@ function AddColumnField({ config }: AddColumnFieldProps): JSX.Element | null {
|
|||||||
</SearchIconWrapper>
|
</SearchIconWrapper>
|
||||||
</Input.Group>
|
</Input.Group>
|
||||||
|
|
||||||
{config.value?.map(({ name }) => (
|
{config.value?.map((column) => (
|
||||||
<AddColumnItem direction="horizontal" key={name}>
|
<AddColumnItem direction="horizontal" key={column.key}>
|
||||||
<Typography>{name}</Typography>
|
<Typography>{column.displayName || column.name}</Typography>
|
||||||
<DeleteOutlinedIcon onClick={(): void => config.onRemove(name)} />
|
<DeleteOutlinedIcon
|
||||||
|
onClick={(): void => config.onRemove(column.key || column.name)}
|
||||||
|
/>
|
||||||
</AddColumnItem>
|
</AddColumnItem>
|
||||||
))}
|
))}
|
||||||
</AddColumnWrapper>
|
</AddColumnWrapper>
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||||
|
import {
|
||||||
|
LOG_FIELD_BODY_KEY,
|
||||||
|
LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
} from 'lib/logs/flatLogData';
|
||||||
|
|
||||||
import { FontSize, OptionsQuery } from './types';
|
import { FontSize, OptionsQuery } from './types';
|
||||||
|
|
||||||
@@ -16,15 +20,19 @@ export const defaultLogsSelectedColumns: TelemetryFieldKey[] = [
|
|||||||
name: 'timestamp',
|
name: 'timestamp',
|
||||||
signal: 'logs',
|
signal: 'logs',
|
||||||
fieldContext: 'log',
|
fieldContext: 'log',
|
||||||
fieldDataType: '',
|
fieldDataType: 'string',
|
||||||
isIndexed: false,
|
isIndexed: false,
|
||||||
|
key: LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
displayName: 'Timestamp',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'body',
|
name: 'body',
|
||||||
signal: 'logs',
|
signal: 'logs',
|
||||||
fieldContext: 'log',
|
fieldContext: 'log',
|
||||||
fieldDataType: '',
|
fieldDataType: 'string',
|
||||||
isIndexed: false,
|
isIndexed: false,
|
||||||
|
key: LOG_FIELD_BODY_KEY,
|
||||||
|
displayName: 'Body',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -34,29 +42,47 @@ export const defaultTraceSelectedColumns: TelemetryFieldKey[] = [
|
|||||||
signal: 'traces',
|
signal: 'traces',
|
||||||
fieldContext: 'resource',
|
fieldContext: 'resource',
|
||||||
fieldDataType: 'string',
|
fieldDataType: 'string',
|
||||||
|
key: 'resource.service.name:string',
|
||||||
|
displayName: 'Service Name',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'name',
|
name: 'name',
|
||||||
signal: 'traces',
|
signal: 'traces',
|
||||||
fieldContext: 'span',
|
fieldContext: 'span',
|
||||||
fieldDataType: 'string',
|
fieldDataType: 'string',
|
||||||
|
key: 'span.name:string',
|
||||||
|
displayName: 'Span Name',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'duration_nano',
|
name: 'duration_nano',
|
||||||
signal: 'traces',
|
signal: 'traces',
|
||||||
fieldContext: 'span',
|
fieldContext: 'span',
|
||||||
fieldDataType: '',
|
fieldDataType: 'number',
|
||||||
|
key: 'span.duration_nano:number',
|
||||||
|
displayName: 'Duration',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'http_method',
|
name: 'http_method',
|
||||||
signal: 'traces',
|
signal: 'traces',
|
||||||
fieldContext: 'span',
|
fieldContext: 'span',
|
||||||
fieldDataType: '',
|
fieldDataType: 'string',
|
||||||
|
key: 'span.http_method:string',
|
||||||
|
displayName: 'HTTP Method',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'response_status_code',
|
name: 'response_status_code',
|
||||||
signal: 'traces',
|
signal: 'traces',
|
||||||
fieldContext: 'span',
|
fieldContext: 'span',
|
||||||
fieldDataType: '',
|
fieldDataType: 'string',
|
||||||
|
key: 'span.response_status_code:string',
|
||||||
|
displayName: 'Status Code',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'timestamp',
|
||||||
|
signal: 'traces',
|
||||||
|
fieldContext: 'span',
|
||||||
|
fieldDataType: 'string',
|
||||||
|
key: 'span.timestamp:string',
|
||||||
|
displayName: 'Timestamp',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -16,11 +16,6 @@ import {
|
|||||||
QueryKeyRequestProps,
|
QueryKeyRequestProps,
|
||||||
QueryKeySuggestionsResponseProps,
|
QueryKeySuggestionsResponseProps,
|
||||||
} from 'types/api/querySuggestions/types';
|
} from 'types/api/querySuggestions/types';
|
||||||
import {
|
|
||||||
FieldContext,
|
|
||||||
FieldDataType,
|
|
||||||
SignalType,
|
|
||||||
} from 'types/api/v5/queryRange';
|
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@@ -35,7 +30,11 @@ import {
|
|||||||
OptionsMenuConfig,
|
OptionsMenuConfig,
|
||||||
OptionsQuery,
|
OptionsQuery,
|
||||||
} from './types';
|
} from './types';
|
||||||
import { getOptionsFromKeys } from './utils';
|
import {
|
||||||
|
createTelemetryFieldKey,
|
||||||
|
getOptionsFromKeys,
|
||||||
|
resolveColumnConflicts,
|
||||||
|
} from './utils';
|
||||||
|
|
||||||
interface UseOptionsMenuProps {
|
interface UseOptionsMenuProps {
|
||||||
storageKey?: string;
|
storageKey?: string;
|
||||||
@@ -104,28 +103,19 @@ const useOptionsMenu = ({
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
const attributesData = initialAttributesResult?.reduce(
|
// Collect all suggestions from all API responses first
|
||||||
(acc: TelemetryFieldKey[], attributeResponse): TelemetryFieldKey[] => {
|
const allSuggestions =
|
||||||
const suggestions =
|
initialAttributesResult?.flatMap((attributeResponse) =>
|
||||||
Object.values(attributeResponse?.data?.data?.data?.keys || {}).flat() ||
|
Object.values(attributeResponse?.data?.data?.data?.keys || {})
|
||||||
[];
|
.flat()
|
||||||
|
.map((suggestion) => createTelemetryFieldKey(suggestion)),
|
||||||
|
) || [];
|
||||||
|
|
||||||
const mappedSuggestions: TelemetryFieldKey[] = suggestions.map(
|
// Resolve conflicts and deduplicate once at the end for better performance
|
||||||
(suggestion) => ({
|
const attributesData = resolveColumnConflicts(allSuggestions);
|
||||||
name: suggestion.name,
|
|
||||||
signal: suggestion.signal as SignalType,
|
|
||||||
fieldDataType: suggestion.fieldDataType as FieldDataType,
|
|
||||||
fieldContext: suggestion.fieldContext as FieldContext,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
return [...acc, ...mappedSuggestions];
|
|
||||||
},
|
|
||||||
[],
|
|
||||||
);
|
|
||||||
|
|
||||||
let initialSelected: TelemetryFieldKey[] = (initialOptions?.selectColumns
|
let initialSelected: TelemetryFieldKey[] = (initialOptions?.selectColumns
|
||||||
?.map((column) => attributesData.find(({ name }) => name === column))
|
?.map((column) => attributesData.find(({ key }) => key === column))
|
||||||
.filter((e) => !!e) || []) as TelemetryFieldKey[];
|
.filter((e) => !!e) || []) as TelemetryFieldKey[];
|
||||||
|
|
||||||
if (dataSource === DataSource.TRACES) {
|
if (dataSource === DataSource.TRACES) {
|
||||||
@@ -133,13 +123,15 @@ const useOptionsMenu = ({
|
|||||||
?.map((col) => {
|
?.map((col) => {
|
||||||
if (col && Object.keys(AllTraceFilterKeyValue).includes(col?.name)) {
|
if (col && Object.keys(AllTraceFilterKeyValue).includes(col?.name)) {
|
||||||
const metaData = defaultTraceSelectedColumns.find(
|
const metaData = defaultTraceSelectedColumns.find(
|
||||||
(coln) => coln.name === col.name,
|
(coln) => coln.key === col.key,
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
if (metaData) {
|
||||||
...metaData,
|
return {
|
||||||
name: metaData?.name || '',
|
...metaData,
|
||||||
};
|
name: metaData.name,
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return col;
|
return col;
|
||||||
})
|
})
|
||||||
@@ -187,40 +179,23 @@ const useOptionsMenu = ({
|
|||||||
const searchedAttributesDataList = Object.values(
|
const searchedAttributesDataList = Object.values(
|
||||||
searchedAttributesDataV5?.data.data.keys || {},
|
searchedAttributesDataV5?.data.data.keys || {},
|
||||||
).flat();
|
).flat();
|
||||||
|
|
||||||
if (searchedAttributesDataList.length) {
|
if (searchedAttributesDataList.length) {
|
||||||
if (dataSource === DataSource.LOGS) {
|
// Map all attributes with proper key and displayName
|
||||||
const logsSelectedColumns: TelemetryFieldKey[] = defaultLogsSelectedColumns.map(
|
const mappedAttributes = searchedAttributesDataList.map((e) =>
|
||||||
(e) => ({
|
createTelemetryFieldKey(e),
|
||||||
...e,
|
);
|
||||||
name: e.name,
|
|
||||||
signal: e.signal as SignalType,
|
// Combine with default columns and resolve conflicts
|
||||||
fieldContext: e.fieldContext as FieldContext,
|
const allColumns =
|
||||||
fieldDataType: e.fieldDataType as FieldDataType,
|
dataSource === DataSource.LOGS
|
||||||
}),
|
? [...defaultLogsSelectedColumns, ...mappedAttributes]
|
||||||
);
|
: mappedAttributes;
|
||||||
return [
|
|
||||||
...logsSelectedColumns,
|
// Resolve conflicts with deduplication
|
||||||
...searchedAttributesDataList
|
return resolveColumnConflicts(allColumns);
|
||||||
.filter((attribute) => attribute.name !== 'body')
|
|
||||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
|
||||||
.map((e) => ({
|
|
||||||
...e,
|
|
||||||
name: e.name,
|
|
||||||
signal: e.signal as SignalType,
|
|
||||||
fieldContext: e.fieldContext as FieldContext,
|
|
||||||
fieldDataType: e.fieldDataType as FieldDataType,
|
|
||||||
})),
|
|
||||||
];
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
|
||||||
return searchedAttributesDataList.map((e) => ({
|
|
||||||
...e,
|
|
||||||
name: e.name,
|
|
||||||
signal: e.signal as SignalType,
|
|
||||||
fieldContext: e.fieldContext as FieldContext,
|
|
||||||
fieldDataType: e.fieldDataType as FieldDataType,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dataSource === DataSource.TRACES) {
|
if (dataSource === DataSource.TRACES) {
|
||||||
return defaultTraceSelectedColumns.map((e) => ({
|
return defaultTraceSelectedColumns.map((e) => ({
|
||||||
...e,
|
...e,
|
||||||
@@ -234,19 +209,10 @@ const useOptionsMenu = ({
|
|||||||
const initialOptionsQuery: OptionsQuery = useMemo(() => {
|
const initialOptionsQuery: OptionsQuery = useMemo(() => {
|
||||||
let defaultColumns: TelemetryFieldKey[] = defaultOptionsQuery.selectColumns;
|
let defaultColumns: TelemetryFieldKey[] = defaultOptionsQuery.selectColumns;
|
||||||
if (dataSource === DataSource.TRACES) {
|
if (dataSource === DataSource.TRACES) {
|
||||||
defaultColumns = defaultTraceSelectedColumns.map((e) => ({
|
defaultColumns = defaultTraceSelectedColumns;
|
||||||
...e,
|
|
||||||
name: e.name,
|
|
||||||
}));
|
|
||||||
} else if (dataSource === DataSource.LOGS) {
|
} else if (dataSource === DataSource.LOGS) {
|
||||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||||
defaultColumns = defaultLogsSelectedColumns.map((e) => ({
|
defaultColumns = defaultLogsSelectedColumns;
|
||||||
...e,
|
|
||||||
name: e.name,
|
|
||||||
signal: e.signal as SignalType,
|
|
||||||
fieldContext: e.fieldContext as FieldContext,
|
|
||||||
fieldDataType: e.fieldDataType as FieldDataType,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const finalSelectColumns = initialOptions?.selectColumns
|
const finalSelectColumns = initialOptions?.selectColumns
|
||||||
@@ -261,7 +227,7 @@ const useOptionsMenu = ({
|
|||||||
}, [dataSource, initialOptions, initialSelectedColumns]);
|
}, [dataSource, initialOptions, initialSelectedColumns]);
|
||||||
|
|
||||||
const selectedColumnKeys = useMemo(
|
const selectedColumnKeys = useMemo(
|
||||||
() => preferences?.columns?.map(({ name }) => name) || [],
|
() => preferences?.columns?.map(({ key }) => key) || [],
|
||||||
[preferences?.columns],
|
[preferences?.columns],
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -290,7 +256,7 @@ const useOptionsMenu = ({
|
|||||||
const column = [
|
const column = [
|
||||||
...searchedAttributeKeys,
|
...searchedAttributeKeys,
|
||||||
...(preferences?.columns || []),
|
...(preferences?.columns || []),
|
||||||
].find(({ name }) => name === key);
|
].find((column) => column.key === key);
|
||||||
|
|
||||||
if (!column) return acc;
|
if (!column) return acc;
|
||||||
return [...acc, column];
|
return [...acc, column];
|
||||||
@@ -319,7 +285,7 @@ const useOptionsMenu = ({
|
|||||||
const handleRemoveSelectedColumn = useCallback(
|
const handleRemoveSelectedColumn = useCallback(
|
||||||
(columnKey: string) => {
|
(columnKey: string) => {
|
||||||
const newSelectedColumns = preferences?.columns?.filter(
|
const newSelectedColumns = preferences?.columns?.filter(
|
||||||
({ name }) => name !== columnKey,
|
(column) => column.key !== columnKey,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!newSelectedColumns?.length && dataSource !== DataSource.LOGS) {
|
if (!newSelectedColumns?.length && dataSource !== DataSource.LOGS) {
|
||||||
|
|||||||
@@ -1,13 +1,163 @@
|
|||||||
import { SelectProps } from 'antd';
|
import { SelectProps } from 'antd';
|
||||||
import { TelemetryFieldKey } from 'api/v5/v5';
|
import { TelemetryFieldKey } from 'api/v5/v5';
|
||||||
|
import {
|
||||||
|
FieldContext,
|
||||||
|
FieldDataType,
|
||||||
|
SignalType,
|
||||||
|
} from 'types/api/v5/queryRange';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display name mapping for log fieldContext columns
|
||||||
|
* Provides user-friendly names for standard log fields
|
||||||
|
*/
|
||||||
|
const LOG_FIELD_DISPLAY_NAMES: Record<string, string> = {
|
||||||
|
body: 'Body',
|
||||||
|
severity_number: 'Severity Number',
|
||||||
|
severity_text: 'Severity Text',
|
||||||
|
span_id: 'Span ID',
|
||||||
|
trace_flags: 'Trace Flags',
|
||||||
|
trace_id: 'Trace ID',
|
||||||
|
scope_name: 'Scope Name',
|
||||||
|
scope_version: 'Scope Version',
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to create a TelemetryFieldKey with properly formatted key and displayName
|
||||||
|
* Ensures consistent key format: fieldContext.name:fieldDataType
|
||||||
|
* Uses display name map for log fieldContext columns
|
||||||
|
* @param suggestion - The raw suggestion data from the API
|
||||||
|
* @returns A TelemetryFieldKey with key and displayName fields properly set
|
||||||
|
*/
|
||||||
|
export function createTelemetryFieldKey(suggestion: any): TelemetryFieldKey {
|
||||||
|
let displayName = suggestion.displayName || suggestion.name;
|
||||||
|
|
||||||
|
// Use mapped display name for log fieldContext columns
|
||||||
|
// We need to check the fieldContext to avoid overriding non-log fields coming from attributes
|
||||||
|
if (
|
||||||
|
(suggestion.fieldContext === 'log' || suggestion.fieldContext === 'scope') &&
|
||||||
|
LOG_FIELD_DISPLAY_NAMES[suggestion.name]
|
||||||
|
) {
|
||||||
|
displayName = LOG_FIELD_DISPLAY_NAMES[suggestion.name];
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: suggestion.name,
|
||||||
|
displayName,
|
||||||
|
key: `${suggestion.fieldContext}.${suggestion.name}:${suggestion.fieldDataType}`,
|
||||||
|
signal: suggestion.signal as SignalType,
|
||||||
|
fieldDataType: suggestion.fieldDataType as FieldDataType,
|
||||||
|
fieldContext: suggestion.fieldContext as FieldContext,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a suffix for a column display name based on conflicts
|
||||||
|
* @param column - The column to generate suffix for
|
||||||
|
* @param hasContextConflict - Whether there's a conflict in fieldContext
|
||||||
|
* @param hasDataTypeConflict - Whether there's a conflict in fieldDataType
|
||||||
|
* @returns The suffix string to append to the column name
|
||||||
|
*/
|
||||||
|
function generateColumnSuffix(
|
||||||
|
column: TelemetryFieldKey,
|
||||||
|
hasContextConflict: boolean,
|
||||||
|
hasDataTypeConflict: boolean,
|
||||||
|
): string {
|
||||||
|
if (hasContextConflict && column.fieldContext) {
|
||||||
|
return ` (${column.fieldContext})`;
|
||||||
|
}
|
||||||
|
if (!hasContextConflict && hasDataTypeConflict && column.fieldDataType) {
|
||||||
|
return ` (${column.fieldDataType})`;
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates display names for conflicting columns in the columnsByKey map
|
||||||
|
* @param columns - Array of columns with the same name
|
||||||
|
* @param columnsByKey - Map to update with new display names
|
||||||
|
*/
|
||||||
|
function updateConflictingDisplayNames(
|
||||||
|
columns: TelemetryFieldKey[],
|
||||||
|
columnsByKey: Map<string, TelemetryFieldKey>,
|
||||||
|
): void {
|
||||||
|
const contexts = new Set(columns.map((c) => c.fieldContext));
|
||||||
|
const dataTypes = new Set(columns.map((c) => c.fieldDataType));
|
||||||
|
const hasContextConflict = contexts.size > 1;
|
||||||
|
const hasDataTypeConflict = dataTypes.size > 1;
|
||||||
|
|
||||||
|
if (!hasContextConflict && !hasDataTypeConflict) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
columns.forEach((column) => {
|
||||||
|
// Skip if already has a custom displayName (not just the name)
|
||||||
|
if (column.displayName && column.displayName !== column.name) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const suffix = generateColumnSuffix(
|
||||||
|
column,
|
||||||
|
hasContextConflict,
|
||||||
|
hasDataTypeConflict,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (suffix) {
|
||||||
|
columnsByKey.set(column.key || column.name, {
|
||||||
|
...column,
|
||||||
|
displayName: `${column.name}${suffix}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes a list of TelemetryFieldKeys and updates displayName for conflicting columns
|
||||||
|
* Adds suffix with fieldContext and/or fieldDataType when columns have the same name
|
||||||
|
* but different context or data type.
|
||||||
|
* Note: 'log' & 'scope' fieldContext suffix is hidden as it's the default context for logs.
|
||||||
|
* Also deduplicates columns with the same key.
|
||||||
|
* @param suggestions - Array of TelemetryFieldKey objects to process
|
||||||
|
* @returns Array with updated displayNames for conflicting columns and no duplicates
|
||||||
|
*/
|
||||||
|
export function resolveColumnConflicts(
|
||||||
|
suggestions: TelemetryFieldKey[],
|
||||||
|
): TelemetryFieldKey[] {
|
||||||
|
// Use Map for O(1) deduplication by key
|
||||||
|
const columnsByKey = new Map<string, TelemetryFieldKey>();
|
||||||
|
// Track columns by name to detect conflicts
|
||||||
|
const columnsByName = new Map<string, TelemetryFieldKey[]>();
|
||||||
|
|
||||||
|
// First pass: deduplicate by key and group by name
|
||||||
|
suggestions.forEach((suggestion) => {
|
||||||
|
// Skip duplicates (same key)
|
||||||
|
if (columnsByKey.has(suggestion.key || suggestion.name)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
columnsByKey.set(suggestion.key || suggestion.name, suggestion);
|
||||||
|
|
||||||
|
// Group by name for conflict detection
|
||||||
|
const existing = columnsByName.get(suggestion.name) || [];
|
||||||
|
columnsByName.set(suggestion.name, [...existing, suggestion]);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Second pass: resolve conflicts for columns with same name
|
||||||
|
columnsByName.forEach((columns) => {
|
||||||
|
if (columns.length > 1) {
|
||||||
|
updateConflictingDisplayNames(columns, columnsByKey);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return Array.from(columnsByKey.values());
|
||||||
|
}
|
||||||
|
|
||||||
export const getOptionsFromKeys = (
|
export const getOptionsFromKeys = (
|
||||||
keys: TelemetryFieldKey[],
|
keys: TelemetryFieldKey[],
|
||||||
selectedKeys: (string | undefined)[],
|
selectedKeys: (string | undefined)[],
|
||||||
): SelectProps['options'] => {
|
): SelectProps['options'] => {
|
||||||
const options = keys.map(({ name }) => ({
|
const options = keys.map(({ key, displayName, name }) => ({
|
||||||
label: name,
|
label: displayName || name,
|
||||||
value: name,
|
value: key,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return options.filter(
|
return options.filter(
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ function TablePanelWrapper({
|
|||||||
panelType={widget.panelTypes}
|
panelType={widget.panelTypes}
|
||||||
queryRangeRequest={queryRangeRequest}
|
queryRangeRequest={queryRangeRequest}
|
||||||
decimalPrecision={widget.decimalPrecision}
|
decimalPrecision={widget.decimalPrecision}
|
||||||
hiddenColumns={widget.hiddenColumns}
|
|
||||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||||
{...GRID_TABLE_CONFIG}
|
{...GRID_TABLE_CONFIG}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ export type QueryTableProps = Omit<
|
|||||||
query: Query;
|
query: Query;
|
||||||
renderActionCell?: (record: RowData) => ReactNode;
|
renderActionCell?: (record: RowData) => ReactNode;
|
||||||
modifyColumns?: (columns: ColumnsType<RowData>) => ColumnsType<RowData>;
|
modifyColumns?: (columns: ColumnsType<RowData>) => ColumnsType<RowData>;
|
||||||
renderColumnCell?: Record<string, (...args: any[]) => ReactNode>;
|
renderColumnCell?: Record<string, (record: RowData) => ReactNode>;
|
||||||
downloadOption?: DownloadOptions;
|
downloadOption?: DownloadOptions;
|
||||||
columns?: ColumnsType<RowData>;
|
columns?: ColumnsType<RowData>;
|
||||||
dataSource?: RowData[];
|
dataSource?: RowData[];
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ function ResourceAttributesFilter(): JSX.Element | null {
|
|||||||
query={query}
|
query={query}
|
||||||
onChange={handleChangeTagFilters}
|
onChange={handleChangeTagFilters}
|
||||||
operatorConfigKey={OperatorConfigKeys.EXCEPTIONS}
|
operatorConfigKey={OperatorConfigKeys.EXCEPTIONS}
|
||||||
hideSpanScopeSelector
|
hideSpanScopeSelector={false}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -20,6 +20,10 @@ import { FontSize } from 'container/OptionsMenu/types';
|
|||||||
import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||||
import createQueryParams from 'lib/createQueryParams';
|
import createQueryParams from 'lib/createQueryParams';
|
||||||
|
import {
|
||||||
|
LOG_FIELD_BODY_KEY,
|
||||||
|
LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
} from 'lib/logs/flatLogData';
|
||||||
import { Compass } from 'lucide-react';
|
import { Compass } from 'lucide-react';
|
||||||
import { useCallback, useMemo } from 'react';
|
import { useCallback, useMemo } from 'react';
|
||||||
import { Virtuoso } from 'react-virtuoso';
|
import { Virtuoso } from 'react-virtuoso';
|
||||||
@@ -182,12 +186,16 @@ function SpanLogs({
|
|||||||
{
|
{
|
||||||
dataType: 'string',
|
dataType: 'string',
|
||||||
type: '',
|
type: '',
|
||||||
|
displayName: 'Body',
|
||||||
name: 'body',
|
name: 'body',
|
||||||
|
key: LOG_FIELD_BODY_KEY,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
dataType: 'string',
|
dataType: 'string',
|
||||||
type: '',
|
type: '',
|
||||||
name: 'timestamp',
|
name: 'timestamp',
|
||||||
|
key: LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
displayName: 'Timestamp',
|
||||||
},
|
},
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -245,81 +245,5 @@ describe('useQueryBuilderOperations - Empty Aggregate Attribute Type', () => {
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reset operators when going from gauge -> empty -> gauge', () => {
|
|
||||||
// Start with a gauge metric
|
|
||||||
const gaugeQuery: IBuilderQuery = {
|
|
||||||
...defaultMockQuery,
|
|
||||||
aggregateAttribute: {
|
|
||||||
key: 'original_gauge',
|
|
||||||
dataType: DataTypes.Float64,
|
|
||||||
type: ATTRIBUTE_TYPES.GAUGE,
|
|
||||||
} as BaseAutocompleteData,
|
|
||||||
aggregations: [
|
|
||||||
{
|
|
||||||
timeAggregation: MetricAggregateOperator.COUNT_DISTINCT,
|
|
||||||
metricName: 'original_gauge',
|
|
||||||
temporality: '',
|
|
||||||
spaceAggregation: '',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
const { result, rerender } = renderHook(
|
|
||||||
({ query }) =>
|
|
||||||
useQueryOperations({
|
|
||||||
query,
|
|
||||||
index: 0,
|
|
||||||
entityVersion: ENTITY_VERSION_V5,
|
|
||||||
}),
|
|
||||||
{
|
|
||||||
initialProps: { query: gaugeQuery },
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// Re-render with empty attribute
|
|
||||||
const emptyAttribute: BaseAutocompleteData = {
|
|
||||||
key: '',
|
|
||||||
dataType: DataTypes.Float64,
|
|
||||||
type: '',
|
|
||||||
};
|
|
||||||
const emptyQuery: IBuilderQuery = {
|
|
||||||
...defaultMockQuery,
|
|
||||||
aggregateAttribute: emptyAttribute,
|
|
||||||
aggregations: [
|
|
||||||
{
|
|
||||||
timeAggregation: MetricAggregateOperator.COUNT,
|
|
||||||
metricName: '',
|
|
||||||
temporality: '',
|
|
||||||
spaceAggregation: MetricAggregateOperator.SUM,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
rerender({ query: emptyQuery });
|
|
||||||
|
|
||||||
// Change to a new gauge metric
|
|
||||||
const newGaugeAttribute: BaseAutocompleteData = {
|
|
||||||
key: 'new_gauge',
|
|
||||||
dataType: DataTypes.Float64,
|
|
||||||
type: ATTRIBUTE_TYPES.GAUGE,
|
|
||||||
};
|
|
||||||
act(() => {
|
|
||||||
result.current.handleChangeAggregatorAttribute(newGaugeAttribute);
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(mockHandleSetQueryData).toHaveBeenLastCalledWith(
|
|
||||||
0,
|
|
||||||
expect.objectContaining({
|
|
||||||
aggregateAttribute: newGaugeAttribute,
|
|
||||||
aggregations: [
|
|
||||||
{
|
|
||||||
timeAggregation: MetricAggregateOperator.AVG,
|
|
||||||
metricName: 'new_gauge',
|
|
||||||
temporality: '',
|
|
||||||
spaceAggregation: '',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -89,8 +89,6 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
name: metricName,
|
name: metricName,
|
||||||
type: metricType,
|
type: metricType,
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
setPreviousMetricInfo(null);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, [query]);
|
}, [query]);
|
||||||
@@ -297,6 +295,7 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
|
|
||||||
if (!isEditMode) {
|
if (!isEditMode) {
|
||||||
// Get current metric info
|
// Get current metric info
|
||||||
|
const currentMetricName = newQuery.aggregateAttribute?.key || '';
|
||||||
const currentMetricType = newQuery.aggregateAttribute?.type || '';
|
const currentMetricType = newQuery.aggregateAttribute?.type || '';
|
||||||
|
|
||||||
const prevMetricType = previousMetricInfo?.type
|
const prevMetricType = previousMetricInfo?.type
|
||||||
@@ -379,6 +378,14 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update the tracked metric info for next comparison only if we have valid data
|
||||||
|
if (currentMetricName && currentMetricType) {
|
||||||
|
setPreviousMetricInfo({
|
||||||
|
name: currentMetricName,
|
||||||
|
type: currentMetricType,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,6 @@ import { DataSource } from 'types/common/queryBuilder';
|
|||||||
import { useGetSearchQueryParam } from './queryBuilder/useGetSearchQueryParam';
|
import { useGetSearchQueryParam } from './queryBuilder/useGetSearchQueryParam';
|
||||||
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
|
import { useQueryBuilder } from './queryBuilder/useQueryBuilder';
|
||||||
|
|
||||||
export interface ICurrentQueryData {
|
|
||||||
name: string;
|
|
||||||
id: string;
|
|
||||||
query: Query;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useHandleExplorerTabChange = (): {
|
export const useHandleExplorerTabChange = (): {
|
||||||
handleExplorerTabChange: (
|
handleExplorerTabChange: (
|
||||||
type: string,
|
type: string,
|
||||||
@@ -93,3 +87,9 @@ export const useHandleExplorerTabChange = (): {
|
|||||||
|
|
||||||
return { handleExplorerTabChange };
|
return { handleExplorerTabChange };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
interface ICurrentQueryData {
|
||||||
|
name: string;
|
||||||
|
id: string;
|
||||||
|
query: Query;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,17 +1,61 @@
|
|||||||
import { defaultTo } from 'lodash-es';
|
import { defaultTo } from 'lodash-es';
|
||||||
import { ILog } from 'types/api/logs/log';
|
import { ILog } from 'types/api/logs/log';
|
||||||
|
|
||||||
|
// Exported constants for top-level field mappings
|
||||||
|
export const LOG_FIELD_ID_KEY = 'id';
|
||||||
|
export const LOG_FIELD_TIMESTAMP_KEY = 'log.timestamp:string';
|
||||||
|
export const LOG_FIELD_BODY_KEY = 'log.body:string';
|
||||||
|
export const LOG_FIELD_SPAN_ID_KEY = 'log.span_id:string';
|
||||||
|
export const LOG_FIELD_TRACE_ID_KEY = 'log.trace_id:string';
|
||||||
|
export const LOG_FIELD_TRACE_FLAGS_KEY = 'log.trace_flags:number';
|
||||||
|
export const LOG_FIELD_SEVERITY_TEXT_KEY = 'log.severity_text:string';
|
||||||
|
export const LOG_FIELD_SEVERITY_NUMBER_KEY = 'log.severity_number:number';
|
||||||
|
export const LOG_FIELD_SCOPE_NAME_KEY = 'scope.scope_name:string';
|
||||||
|
export const LOG_FIELD_SCOPE_VERSION_KEY = 'scope.scope_version:string';
|
||||||
|
|
||||||
export function FlatLogData(log: ILog): Record<string, string> {
|
export function FlatLogData(log: ILog): Record<string, string> {
|
||||||
const flattenLogObject: Record<string, string> = {};
|
const flattenLogObject: Record<string, string> = {};
|
||||||
|
|
||||||
Object.keys(log).forEach((key: string): void => {
|
// Map of field names to their contexts and data types
|
||||||
if (typeof log[key as never] !== 'object') {
|
const fieldMappings: Record<string, { context: string; datatype: string }> = {
|
||||||
flattenLogObject[key] = log[key as never];
|
resources_string: { context: 'resource', datatype: 'string' },
|
||||||
} else {
|
scope_string: { context: 'scope', datatype: 'string' },
|
||||||
Object.keys(defaultTo(log[key as never], {})).forEach((childKey) => {
|
attributes_string: { context: 'attribute', datatype: 'string' },
|
||||||
flattenLogObject[childKey] = log[key as never][childKey];
|
attributes_number: { context: 'attribute', datatype: 'number' },
|
||||||
|
attributes_bool: { context: 'attribute', datatype: 'bool' },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Flatten specific fields with context and datatype
|
||||||
|
Object.entries(fieldMappings).forEach(([fieldKey, { context, datatype }]) => {
|
||||||
|
const fieldData = log[fieldKey as keyof ILog];
|
||||||
|
if (fieldData && typeof fieldData === 'object') {
|
||||||
|
Object.entries(defaultTo(fieldData, {})).forEach(([key, value]) => {
|
||||||
|
const flatKey = `${context}.${key}:${datatype}`;
|
||||||
|
flattenLogObject[flatKey] = String(value);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Add top-level fields
|
||||||
|
const topLevelFieldsToContextMapping: Record<string, string> = {
|
||||||
|
id: LOG_FIELD_ID_KEY,
|
||||||
|
timestamp: LOG_FIELD_TIMESTAMP_KEY,
|
||||||
|
body: LOG_FIELD_BODY_KEY,
|
||||||
|
span_id: LOG_FIELD_SPAN_ID_KEY,
|
||||||
|
trace_id: LOG_FIELD_TRACE_ID_KEY,
|
||||||
|
trace_flags: LOG_FIELD_TRACE_FLAGS_KEY,
|
||||||
|
severity_text: LOG_FIELD_SEVERITY_TEXT_KEY,
|
||||||
|
severity_number: LOG_FIELD_SEVERITY_NUMBER_KEY,
|
||||||
|
scope_name: LOG_FIELD_SCOPE_NAME_KEY,
|
||||||
|
scope_version: LOG_FIELD_SCOPE_VERSION_KEY,
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.entries(topLevelFieldsToContextMapping).forEach(([field, key]) => {
|
||||||
|
const value = log[field as keyof ILog];
|
||||||
|
if (value !== undefined && value !== null) {
|
||||||
|
flattenLogObject[key] = String(value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return flattenLogObject;
|
return flattenLogObject;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,11 +26,8 @@ export const handlers = [
|
|||||||
res(ctx.status(200), ctx.json(queryRangeSuccessResponse)),
|
res(ctx.status(200), ctx.json(queryRangeSuccessResponse)),
|
||||||
),
|
),
|
||||||
|
|
||||||
rest.post('http://localhost/api/v2/services', (req, res, ctx) =>
|
rest.post('http://localhost/api/v1/services', (req, res, ctx) =>
|
||||||
res(
|
res(ctx.status(200), ctx.json(serviceSuccessResponse)),
|
||||||
ctx.status(200),
|
|
||||||
ctx.json({ status: 'success', data: serviceSuccessResponse }),
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
|
|
||||||
rest.post(
|
rest.post(
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ import QuickFilters from 'components/QuickFilters/QuickFilters';
|
|||||||
import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types';
|
import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types';
|
||||||
import WarningPopover from 'components/WarningPopover/WarningPopover';
|
import WarningPopover from 'components/WarningPopover/WarningPopover';
|
||||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
import { QueryParams } from 'constants/query';
|
||||||
|
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
import LogExplorerQuerySection from 'container/LogExplorerQuerySection';
|
import LogExplorerQuerySection from 'container/LogExplorerQuerySection';
|
||||||
import LogsExplorerViewsContainer from 'container/LogsExplorerViews';
|
import LogsExplorerViewsContainer from 'container/LogsExplorerViews';
|
||||||
import {
|
import {
|
||||||
@@ -24,33 +25,34 @@ import RightToolbarActions from 'container/QueryBuilder/components/ToolbarAction
|
|||||||
import Toolbar from 'container/Toolbar/Toolbar';
|
import Toolbar from 'container/Toolbar/Toolbar';
|
||||||
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
|
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
|
||||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||||
import {
|
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||||
ICurrentQueryData,
|
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||||
useHandleExplorerTabChange,
|
|
||||||
} from 'hooks/useHandleExplorerTabChange';
|
|
||||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||||
import { defaultTo, isEmpty, isEqual, isNull } from 'lodash-es';
|
import { isEmpty, isEqual, isNull } from 'lodash-es';
|
||||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||||
import { EventSourceProvider } from 'providers/EventSource';
|
import { EventSourceProvider } from 'providers/EventSource';
|
||||||
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
|
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
|
||||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||||
|
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||||
import { Warning } from 'types/api';
|
import { Warning } from 'types/api';
|
||||||
|
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
import {
|
import {
|
||||||
explorerViewToPanelType,
|
getExplorerViewForPanelType,
|
||||||
panelTypeToExplorerView,
|
getExplorerViewFromUrl,
|
||||||
} from 'utils/explorerUtils';
|
} from 'utils/explorerUtils';
|
||||||
|
|
||||||
import { ExplorerViews } from './utils';
|
import { ExplorerViews } from './utils';
|
||||||
|
|
||||||
function LogsExplorer(): JSX.Element {
|
function LogsExplorer(): JSX.Element {
|
||||||
|
const [searchParams] = useSearchParams();
|
||||||
const [showLiveLogs, setShowLiveLogs] = useState<boolean>(false);
|
const [showLiveLogs, setShowLiveLogs] = useState<boolean>(false);
|
||||||
|
|
||||||
// Get panel type from URL
|
// Get panel type from URL
|
||||||
const panelTypesFromUrl = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
|
const panelTypesFromUrl = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
|
||||||
|
|
||||||
const [selectedView, setSelectedView] = useState<ExplorerViews>(
|
const [selectedView, setSelectedView] = useState<ExplorerViews>(() =>
|
||||||
() => panelTypeToExplorerView[panelTypesFromUrl],
|
getExplorerViewFromUrl(searchParams, panelTypesFromUrl),
|
||||||
);
|
);
|
||||||
const { logs } = usePreferenceContext();
|
const { logs } = usePreferenceContext();
|
||||||
const { preferences } = logs;
|
const { preferences } = logs;
|
||||||
@@ -65,7 +67,30 @@ function LogsExplorer(): JSX.Element {
|
|||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
const { handleRunQuery, handleSetConfig } = useQueryBuilder();
|
// Update selected view when panel type from URL changes
|
||||||
|
useEffect(() => {
|
||||||
|
if (panelTypesFromUrl) {
|
||||||
|
const newView = getExplorerViewForPanelType(panelTypesFromUrl);
|
||||||
|
if (newView && newView !== selectedView) {
|
||||||
|
setSelectedView(newView);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [panelTypesFromUrl, selectedView]);
|
||||||
|
|
||||||
|
// Update URL when selectedView changes (without triggering re-renders)
|
||||||
|
useEffect(() => {
|
||||||
|
const url = new URL(window.location.href);
|
||||||
|
url.searchParams.set(QueryParams.selectedExplorerView, selectedView);
|
||||||
|
window.history.replaceState({}, '', url.toString());
|
||||||
|
}, [selectedView]);
|
||||||
|
|
||||||
|
const {
|
||||||
|
handleRunQuery,
|
||||||
|
handleSetConfig,
|
||||||
|
updateAllQueriesOperators,
|
||||||
|
currentQuery,
|
||||||
|
updateQueriesData,
|
||||||
|
} = useQueryBuilder();
|
||||||
|
|
||||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||||
|
|
||||||
@@ -77,12 +102,49 @@ function LogsExplorer(): JSX.Element {
|
|||||||
|
|
||||||
const [warning, setWarning] = useState<Warning | undefined>(undefined);
|
const [warning, setWarning] = useState<Warning | undefined>(undefined);
|
||||||
|
|
||||||
|
const [shouldReset, setShouldReset] = useState(false);
|
||||||
|
|
||||||
|
const [defaultQuery, setDefaultQuery] = useState<Query>(() =>
|
||||||
|
updateAllQueriesOperators(
|
||||||
|
initialQueriesMap.logs,
|
||||||
|
PANEL_TYPES.LIST,
|
||||||
|
DataSource.LOGS,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
const handleChangeSelectedView = useCallback(
|
const handleChangeSelectedView = useCallback(
|
||||||
(view: ExplorerViews, querySearchParameters?: ICurrentQueryData): void => {
|
(view: ExplorerViews): void => {
|
||||||
handleSetConfig(
|
if (selectedView === ExplorerViews.LIST) {
|
||||||
defaultTo(explorerViewToPanelType[view], PANEL_TYPES.LIST),
|
handleSetConfig(PANEL_TYPES.LIST, DataSource.LOGS);
|
||||||
DataSource.LOGS,
|
}
|
||||||
);
|
|
||||||
|
if (view === ExplorerViews.LIST) {
|
||||||
|
if (
|
||||||
|
selectedView !== ExplorerViews.LIST &&
|
||||||
|
currentQuery?.builder?.queryData?.[0]
|
||||||
|
) {
|
||||||
|
const filterToRetain = currentQuery.builder.queryData[0].filter;
|
||||||
|
|
||||||
|
const newDefaultQuery = updateAllQueriesOperators(
|
||||||
|
initialQueriesMap.logs,
|
||||||
|
PANEL_TYPES.LIST,
|
||||||
|
DataSource.LOGS,
|
||||||
|
);
|
||||||
|
|
||||||
|
const newListQuery = updateQueriesData(
|
||||||
|
newDefaultQuery,
|
||||||
|
'queryData',
|
||||||
|
(item, index) => {
|
||||||
|
if (index === 0) {
|
||||||
|
return { ...item, filter: filterToRetain };
|
||||||
|
}
|
||||||
|
return item;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
setDefaultQuery(newListQuery);
|
||||||
|
}
|
||||||
|
setShouldReset(true);
|
||||||
|
}
|
||||||
|
|
||||||
setSelectedView(view);
|
setSelectedView(view);
|
||||||
|
|
||||||
@@ -91,13 +153,38 @@ function LogsExplorer(): JSX.Element {
|
|||||||
}
|
}
|
||||||
|
|
||||||
handleExplorerTabChange(
|
handleExplorerTabChange(
|
||||||
explorerViewToPanelType[view],
|
view === ExplorerViews.TIMESERIES ? PANEL_TYPES.TIME_SERIES : view,
|
||||||
querySearchParameters,
|
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
[handleSetConfig, handleExplorerTabChange, setSelectedView],
|
[
|
||||||
|
handleSetConfig,
|
||||||
|
handleExplorerTabChange,
|
||||||
|
selectedView,
|
||||||
|
currentQuery,
|
||||||
|
updateAllQueriesOperators,
|
||||||
|
updateQueriesData,
|
||||||
|
setSelectedView,
|
||||||
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
useShareBuilderUrl({
|
||||||
|
defaultValue: defaultQuery,
|
||||||
|
forceReset: shouldReset,
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (shouldReset) {
|
||||||
|
setShouldReset(false);
|
||||||
|
setDefaultQuery(
|
||||||
|
updateAllQueriesOperators(
|
||||||
|
initialQueriesMap.logs,
|
||||||
|
PANEL_TYPES.LIST,
|
||||||
|
DataSource.LOGS,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}, [shouldReset, updateAllQueriesOperators]);
|
||||||
|
|
||||||
const handleFilterVisibilityChange = (): void => {
|
const handleFilterVisibilityChange = (): void => {
|
||||||
setLocalStorageApi(
|
setLocalStorageApi(
|
||||||
LOCALSTORAGE.SHOW_LOGS_QUICK_FILTERS,
|
LOCALSTORAGE.SHOW_LOGS_QUICK_FILTERS,
|
||||||
@@ -312,12 +399,12 @@ function LogsExplorer(): JSX.Element {
|
|||||||
</div>
|
</div>
|
||||||
<div className="logs-explorer-views">
|
<div className="logs-explorer-views">
|
||||||
<LogsExplorerViewsContainer
|
<LogsExplorerViewsContainer
|
||||||
|
selectedView={selectedView}
|
||||||
listQueryKeyRef={listQueryKeyRef}
|
listQueryKeyRef={listQueryKeyRef}
|
||||||
chartQueryKeyRef={chartQueryKeyRef}
|
chartQueryKeyRef={chartQueryKeyRef}
|
||||||
setIsLoadingQueries={setIsLoadingQueries}
|
setIsLoadingQueries={setIsLoadingQueries}
|
||||||
setWarning={setWarning}
|
setWarning={setWarning}
|
||||||
showLiveLogs={showLiveLogs}
|
showLiveLogs={showLiveLogs}
|
||||||
handleChangeSelectedView={handleChangeSelectedView}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ function MetricsApplication(): JSX.Element {
|
|||||||
servicename: string;
|
servicename: string;
|
||||||
}>();
|
}>();
|
||||||
|
|
||||||
|
const servicename = decodeURIComponent(encodedServiceName);
|
||||||
|
|
||||||
const activeKey = useMetricsApplicationTabKey();
|
const activeKey = useMetricsApplicationTabKey();
|
||||||
|
|
||||||
const urlQuery = useUrlQuery();
|
const urlQuery = useUrlQuery();
|
||||||
@@ -44,7 +46,7 @@ function MetricsApplication(): JSX.Element {
|
|||||||
|
|
||||||
const onTabChange = (tab: string): void => {
|
const onTabChange = (tab: string): void => {
|
||||||
urlQuery.set(QueryParams.tab, tab);
|
urlQuery.set(QueryParams.tab, tab);
|
||||||
safeNavigate(`/services/${encodedServiceName}?${urlQuery.toString()}`);
|
safeNavigate(`/services/${servicename}?${urlQuery.toString()}`);
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import { FilterSelect } from 'components/CeleryOverview/CeleryOverviewConfigOpti
|
|||||||
import { QueryParams } from 'constants/query';
|
import { QueryParams } from 'constants/query';
|
||||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||||
import { ChevronDown, PencilLine } from 'lucide-react';
|
import { ChevronDown, HardHat, PencilLine } from 'lucide-react';
|
||||||
import { LatencyPointers } from 'pages/TracesFunnelDetails/constants';
|
import { LatencyPointers } from 'pages/TracesFunnelDetails/constants';
|
||||||
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext';
|
||||||
import { useAppContext } from 'providers/App/App';
|
import { useAppContext } from 'providers/App/App';
|
||||||
@@ -194,6 +194,7 @@ function FunnelStep({
|
|||||||
}
|
}
|
||||||
hasPopupContainer={false}
|
hasPopupContainer={false}
|
||||||
placeholder="Search for filters..."
|
placeholder="Search for filters..."
|
||||||
|
suffixIcon={<HardHat size={12} color="var(--bg-vanilla-400)" />}
|
||||||
rootClassName="traces-funnel-where-filter"
|
rootClassName="traces-funnel-where-filter"
|
||||||
/>
|
/>
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
|
|||||||
@@ -1,17 +1,12 @@
|
|||||||
:root {
|
|
||||||
--bg-vanilla-100-rgb: 255, 255, 255;
|
|
||||||
}
|
|
||||||
.funnel-table {
|
.funnel-table {
|
||||||
border-radius: 3px;
|
border-radius: 3px;
|
||||||
border: 1px solid var(--bg-slate-500);
|
border: 1px solid var(--bg-slate-500);
|
||||||
table {
|
background: linear-gradient(
|
||||||
background: linear-gradient(
|
0deg,
|
||||||
0deg,
|
rgba(171, 189, 255, 0.01) 0%,
|
||||||
rgba(171, 189, 255, 0.01) 0%,
|
rgba(171, 189, 255, 0.01) 100%
|
||||||
rgba(171, 189, 255, 0.01) 100%
|
),
|
||||||
),
|
#0b0c0e;
|
||||||
#0b0c0e;
|
|
||||||
}
|
|
||||||
|
|
||||||
&__header {
|
&__header {
|
||||||
padding: 12px 14px 12px;
|
padding: 12px 14px 12px;
|
||||||
@@ -102,7 +97,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.table-row-dark {
|
.table-row-dark {
|
||||||
background: rgba(var(--bg-vanilla-100-rgb), 0.01);
|
background: var(--bg-ink-300);
|
||||||
}
|
}
|
||||||
|
|
||||||
.trace-id-cell {
|
.trace-id-cell {
|
||||||
|
|||||||
@@ -16,6 +16,6 @@ export const topTracesTableColumns = [
|
|||||||
title: 'STEP TRANSITION DURATION',
|
title: 'STEP TRANSITION DURATION',
|
||||||
dataIndex: 'duration_ms',
|
dataIndex: 'duration_ms',
|
||||||
key: 'duration_ms',
|
key: 'duration_ms',
|
||||||
render: (value: string): string => getYAxisFormattedValue(`${value}`, 'ms'),
|
render: (value: string): string => getYAxisFormattedValue(value, 'ms'),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -136,7 +136,6 @@ export interface Widgets extends IBaseWidget {
|
|||||||
query: Query;
|
query: Query;
|
||||||
renderColumnCell?: QueryTableProps['renderColumnCell'];
|
renderColumnCell?: QueryTableProps['renderColumnCell'];
|
||||||
customColTitles?: Record<string, string>;
|
customColTitles?: Record<string, string>;
|
||||||
hiddenColumns?: string[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PromQLWidgets extends IBaseWidget {
|
export interface PromQLWidgets extends IBaseWidget {
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
export interface IField {
|
export interface IField {
|
||||||
name: string;
|
name: string;
|
||||||
|
displayName: string;
|
||||||
|
key: string;
|
||||||
type: string;
|
type: string;
|
||||||
dataType: string;
|
dataType: string;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -128,7 +128,11 @@ export interface VariableItem {
|
|||||||
|
|
||||||
export interface TelemetryFieldKey {
|
export interface TelemetryFieldKey {
|
||||||
name: string;
|
name: string;
|
||||||
|
displayName?: string;
|
||||||
|
// display name can change dynamically depending on if there's a conflicting field with same name and in only meant for UI display
|
||||||
key?: string;
|
key?: string;
|
||||||
|
// key is a unique identifier generated for each field, used for comparisons and selections
|
||||||
|
// key = fieldContext.name:fieldDataType
|
||||||
description?: string;
|
description?: string;
|
||||||
unit?: string;
|
unit?: string;
|
||||||
signal?: SignalType;
|
signal?: SignalType;
|
||||||
|
|||||||
@@ -26,16 +26,7 @@ describe('extractQueryPairs', () => {
|
|||||||
valuesPosition: [],
|
valuesPosition: [],
|
||||||
hasNegation: true,
|
hasNegation: true,
|
||||||
isMultiValue: false,
|
isMultiValue: false,
|
||||||
position: {
|
position: expect.any(Object),
|
||||||
keyStart: 0,
|
|
||||||
keyEnd: 5,
|
|
||||||
negationEnd: 9,
|
|
||||||
negationStart: 7,
|
|
||||||
operatorEnd: 16,
|
|
||||||
operatorStart: 11,
|
|
||||||
valueEnd: undefined,
|
|
||||||
valueStart: undefined,
|
|
||||||
},
|
|
||||||
isComplete: false,
|
isComplete: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -46,16 +37,7 @@ describe('extractQueryPairs', () => {
|
|||||||
valuesPosition: [],
|
valuesPosition: [],
|
||||||
hasNegation: true,
|
hasNegation: true,
|
||||||
isMultiValue: false,
|
isMultiValue: false,
|
||||||
position: {
|
position: expect.any(Object),
|
||||||
keyEnd: 25,
|
|
||||||
keyStart: 22,
|
|
||||||
negationEnd: 29,
|
|
||||||
negationStart: 27,
|
|
||||||
operatorEnd: 34,
|
|
||||||
operatorStart: 31,
|
|
||||||
valueEnd: 42,
|
|
||||||
valueStart: 36,
|
|
||||||
},
|
|
||||||
isComplete: true,
|
isComplete: true,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
@@ -72,11 +54,12 @@ describe('extractQueryPairs', () => {
|
|||||||
isComplete: true,
|
isComplete: true,
|
||||||
value: expect.stringMatching(/^\(.*\)$/),
|
value: expect.stringMatching(/^\(.*\)$/),
|
||||||
valueList: ['1', '2', '3'],
|
valueList: ['1', '2', '3'],
|
||||||
valuesPosition: [
|
valuesPosition: expect.arrayContaining([
|
||||||
{ start: 7, end: 7 },
|
expect.objectContaining({
|
||||||
{ start: 10, end: 10 },
|
start: expect.any(Number),
|
||||||
{ start: 13, end: 13 },
|
end: expect.any(Number),
|
||||||
],
|
}),
|
||||||
|
]),
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
@@ -92,31 +75,6 @@ describe('extractQueryPairs', () => {
|
|||||||
isComplete: true,
|
isComplete: true,
|
||||||
value: expect.stringMatching(/^\[.*\]$/),
|
value: expect.stringMatching(/^\[.*\]$/),
|
||||||
valueList: ["'a'", "'b'", "'c'"],
|
valueList: ["'a'", "'b'", "'c'"],
|
||||||
valuesPosition: [
|
|
||||||
{ start: 11, end: 13 },
|
|
||||||
{ start: 18, end: 20 },
|
|
||||||
{ start: 25, end: 27 },
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should extract correct query pairs when the query has space at the start of the value', () => {
|
|
||||||
const input = " label IN [ 'a' , 'b' , 'c' ]";
|
|
||||||
const result = extractQueryPairs(input);
|
|
||||||
expect(result).toEqual([
|
|
||||||
expect.objectContaining({
|
|
||||||
key: 'label',
|
|
||||||
operator: 'IN',
|
|
||||||
isMultiValue: true,
|
|
||||||
isComplete: true,
|
|
||||||
value: expect.stringMatching(/^\[.*\]$/),
|
|
||||||
valueList: ["'a'", "'b'", "'c'"],
|
|
||||||
valuesPosition: [
|
|
||||||
{ start: 13, end: 15 },
|
|
||||||
{ start: 20, end: 22 },
|
|
||||||
{ start: 27, end: 29 },
|
|
||||||
],
|
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -15,13 +15,6 @@ export const panelTypeToExplorerView: Record<PANEL_TYPES, ExplorerViews> = {
|
|||||||
[PANEL_TYPES.EMPTY_WIDGET]: ExplorerViews.LIST,
|
[PANEL_TYPES.EMPTY_WIDGET]: ExplorerViews.LIST,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const explorerViewToPanelType = {
|
|
||||||
[ExplorerViews.LIST]: PANEL_TYPES.LIST,
|
|
||||||
[ExplorerViews.TIMESERIES]: PANEL_TYPES.TIME_SERIES,
|
|
||||||
[ExplorerViews.TRACE]: PANEL_TYPES.TRACE,
|
|
||||||
[ExplorerViews.TABLE]: PANEL_TYPES.TABLE,
|
|
||||||
} as Record<ExplorerViews, PANEL_TYPES>;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the explorer view based on panel type from URL or saved view
|
* Get the explorer view based on panel type from URL or saved view
|
||||||
* @param searchParams - URL search parameters
|
* @param searchParams - URL search parameters
|
||||||
|
|||||||
@@ -13832,6 +13832,11 @@ ora@^5.4.1:
|
|||||||
strip-ansi "^6.0.0"
|
strip-ansi "^6.0.0"
|
||||||
wcwidth "^1.0.1"
|
wcwidth "^1.0.1"
|
||||||
|
|
||||||
|
os-tmpdir@~1.0.2:
|
||||||
|
version "1.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
||||||
|
integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==
|
||||||
|
|
||||||
outvariant@^1.2.1, outvariant@^1.4.0:
|
outvariant@^1.2.1, outvariant@^1.4.0:
|
||||||
version "1.4.0"
|
version "1.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.0.tgz#e742e4bda77692da3eca698ef5bfac62d9fba06e"
|
resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.0.tgz#e742e4bda77692da3eca698ef5bfac62d9fba06e"
|
||||||
@@ -17320,10 +17325,12 @@ tinycolor2@1, tinycolor2@1.6.0, tinycolor2@^1.6.0:
|
|||||||
resolved "https://registry.yarnpkg.com/tinycolor2/-/tinycolor2-1.6.0.tgz#f98007460169b0263b97072c5ae92484ce02d09e"
|
resolved "https://registry.yarnpkg.com/tinycolor2/-/tinycolor2-1.6.0.tgz#f98007460169b0263b97072c5ae92484ce02d09e"
|
||||||
integrity sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==
|
integrity sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==
|
||||||
|
|
||||||
tmp@0.2.4, tmp@^0.0.33:
|
tmp@^0.0.33:
|
||||||
version "0.2.4"
|
version "0.0.33"
|
||||||
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.4.tgz#c6db987a2ccc97f812f17137b36af2b6521b0d13"
|
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
|
||||||
integrity sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==
|
integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==
|
||||||
|
dependencies:
|
||||||
|
os-tmpdir "~1.0.2"
|
||||||
|
|
||||||
tmpl@1.0.5:
|
tmpl@1.0.5:
|
||||||
version "1.0.5"
|
version "1.0.5"
|
||||||
|
|||||||
2
go.mod
2
go.mod
@@ -9,11 +9,11 @@ require (
|
|||||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||||
github.com/SigNoz/signoz-otel-collector v0.129.4
|
github.com/SigNoz/signoz-otel-collector v0.129.4
|
||||||
|
github.com/allegro/bigcache/v3 v3.1.0
|
||||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||||
github.com/antonmedv/expr v1.15.3
|
github.com/antonmedv/expr v1.15.3
|
||||||
github.com/cespare/xxhash/v2 v2.3.0
|
github.com/cespare/xxhash/v2 v2.3.0
|
||||||
github.com/coreos/go-oidc/v3 v3.14.1
|
github.com/coreos/go-oidc/v3 v3.14.1
|
||||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
|
||||||
github.com/dustin/go-humanize v1.0.1
|
github.com/dustin/go-humanize v1.0.1
|
||||||
github.com/go-co-op/gocron v1.30.1
|
github.com/go-co-op/gocron v1.30.1
|
||||||
github.com/go-openapi/runtime v0.28.0
|
github.com/go-openapi/runtime v0.28.0
|
||||||
|
|||||||
6
go.sum
6
go.sum
@@ -118,6 +118,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
|
|||||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||||
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b h1:mimo19zliBX/vSQ6PWWSL9lK8qwHozUj03+zLoEB8O0=
|
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b h1:mimo19zliBX/vSQ6PWWSL9lK8qwHozUj03+zLoEB8O0=
|
||||||
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4vN3H1qMT+8wDmzjAcDONcgo2/SZ/TyfdUOFs=
|
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4vN3H1qMT+8wDmzjAcDONcgo2/SZ/TyfdUOFs=
|
||||||
|
github.com/allegro/bigcache/v3 v3.1.0 h1:H2Vp8VOvxcrB91o86fUSVJFqeuz8kpyyB02eH3bSzwk=
|
||||||
|
github.com/allegro/bigcache/v3 v3.1.0/go.mod h1:aPyh7jEvrog9zAwx5N7+JUQX5dZTSGpxF1LAR4dr35I=
|
||||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||||
@@ -209,10 +211,6 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
|
|||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dennwc/varint v1.0.0 h1:kGNFFSSw8ToIy3obO/kKr8U9GZYUAxQEVuix4zfDWzE=
|
github.com/dennwc/varint v1.0.0 h1:kGNFFSSw8ToIy3obO/kKr8U9GZYUAxQEVuix4zfDWzE=
|
||||||
github.com/dennwc/varint v1.0.0/go.mod h1:hnItb35rvZvJrbTALZtY/iQfDs48JKRG1RPpgziApxA=
|
github.com/dennwc/varint v1.0.0/go.mod h1:hnItb35rvZvJrbTALZtY/iQfDs48JKRG1RPpgziApxA=
|
||||||
github.com/dgraph-io/ristretto/v2 v2.3.0 h1:qTQ38m7oIyd4GAed/QkUZyPFNMnvVWyazGXRwvOt5zk=
|
|
||||||
github.com/dgraph-io/ristretto/v2 v2.3.0/go.mod h1:gpoRV3VzrEY1a9dWAYV6T1U7YzfgttXdd/ZzL1s9OZM=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
|
||||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||||
github.com/digitalocean/godo v1.144.0 h1:rDCsmpwcDe5egFQ3Ae45HTde685/GzX037mWRMPufW0=
|
github.com/digitalocean/godo v1.144.0 h1:rDCsmpwcDe5egFQ3Ae45HTde685/GzX037mWRMPufW0=
|
||||||
|
|||||||
@@ -18,8 +18,6 @@ type AuthZ interface {
|
|||||||
// CheckWithTupleCreation takes upon the responsibility for generating the tuples alongside everything Check does.
|
// CheckWithTupleCreation takes upon the responsibility for generating the tuples alongside everything Check does.
|
||||||
CheckWithTupleCreation(context.Context, authtypes.Claims, valuer.UUID, authtypes.Relation, authtypes.Relation, authtypes.Typeable, []authtypes.Selector) error
|
CheckWithTupleCreation(context.Context, authtypes.Claims, valuer.UUID, authtypes.Relation, authtypes.Relation, authtypes.Typeable, []authtypes.Selector) error
|
||||||
|
|
||||||
CheckWithTupleCreationWithoutClaims(context.Context, valuer.UUID, authtypes.Relation, authtypes.Relation, authtypes.Typeable, []authtypes.Selector) error
|
|
||||||
|
|
||||||
// Batch Check returns error when the upstream authorization server is unavailable or for all the tuples of subject (s) doesn't have relation (r) on object (o).
|
// Batch Check returns error when the upstream authorization server is unavailable or for all the tuples of subject (s) doesn't have relation (r) on object (o).
|
||||||
BatchCheck(context.Context, []*openfgav1.TupleKey) error
|
BatchCheck(context.Context, []*openfgav1.TupleKey) error
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ package openfgaauthz
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"strconv"
|
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
authz "github.com/SigNoz/signoz/pkg/authz"
|
authz "github.com/SigNoz/signoz/pkg/authz"
|
||||||
@@ -95,153 +94,6 @@ func (provider *provider) Stop(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
|
|
||||||
storeID, modelID := provider.getStoreIDandModelID()
|
|
||||||
checkResponse, err := provider.openfgaServer.Check(
|
|
||||||
ctx,
|
|
||||||
&openfgav1.CheckRequest{
|
|
||||||
StoreId: storeID,
|
|
||||||
AuthorizationModelId: modelID,
|
|
||||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
|
||||||
User: tupleReq.User,
|
|
||||||
Relation: tupleReq.Relation,
|
|
||||||
Object: tupleReq.Object,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
if !checkResponse.Allowed {
|
|
||||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
|
|
||||||
storeID, modelID := provider.getStoreIDandModelID()
|
|
||||||
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
|
|
||||||
for idx, tuple := range tupleReq {
|
|
||||||
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
|
|
||||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
|
||||||
User: tuple.User,
|
|
||||||
Relation: tuple.Relation,
|
|
||||||
Object: tuple.Object,
|
|
||||||
},
|
|
||||||
// the batch check response is map[string] keyed by correlationID.
|
|
||||||
CorrelationId: strconv.Itoa(idx),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
checkResponse, err := provider.openfgaServer.BatchCheck(
|
|
||||||
ctx,
|
|
||||||
&openfgav1.BatchCheckRequest{
|
|
||||||
StoreId: storeID,
|
|
||||||
AuthorizationModelId: modelID,
|
|
||||||
Checks: batchCheckItems,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, checkResponse := range checkResponse.Result {
|
|
||||||
if checkResponse.GetAllowed() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors.New(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "")
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, translation authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector) error {
|
|
||||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
tuples, err := authtypes.TypeableOrganization.Tuples(subject, translation, []authtypes.Selector{authtypes.MustNewSelector(authtypes.TypeOrganization, orgID.StringValue())}, orgID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = provider.BatchCheck(ctx, tuples)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, translation authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector) error {
|
|
||||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
tuples, err := authtypes.TypeableOrganization.Tuples(subject, translation, []authtypes.Selector{authtypes.MustNewSelector(authtypes.TypeOrganization, orgID.StringValue())}, orgID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = provider.BatchCheck(ctx, tuples)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
|
||||||
storeID, modelID := provider.getStoreIDandModelID()
|
|
||||||
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
|
|
||||||
for idx, tuple := range deletions {
|
|
||||||
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
|
|
||||||
StoreId: storeID,
|
|
||||||
AuthorizationModelId: modelID,
|
|
||||||
Writes: func() *openfgav1.WriteRequestWrites {
|
|
||||||
if len(additions) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return &openfgav1.WriteRequestWrites{
|
|
||||||
TupleKeys: additions,
|
|
||||||
OnDuplicate: "ignore",
|
|
||||||
}
|
|
||||||
}(),
|
|
||||||
Deletes: func() *openfgav1.WriteRequestDeletes {
|
|
||||||
if len(deletionTuplesWithoutCondition) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return &openfgav1.WriteRequestDeletes{
|
|
||||||
TupleKeys: deletionTuplesWithoutCondition,
|
|
||||||
OnMissing: "ignore",
|
|
||||||
}
|
|
||||||
}(),
|
|
||||||
})
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
|
||||||
storeID, modelID := provider.getStoreIDandModelID()
|
|
||||||
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
|
|
||||||
StoreId: storeID,
|
|
||||||
AuthorizationModelId: modelID,
|
|
||||||
User: subject,
|
|
||||||
Relation: relation.StringValue(),
|
|
||||||
Type: typeable.Type().StringValue(),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
|
|
||||||
}
|
|
||||||
|
|
||||||
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
|
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
|
||||||
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
|
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -324,12 +176,112 @@ func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, a
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) getStoreIDandModelID() (string, string) {
|
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
|
||||||
provider.mtx.RLock()
|
checkResponse, err := provider.openfgaServer.Check(
|
||||||
defer provider.mtx.RUnlock()
|
ctx,
|
||||||
|
&openfgav1.CheckRequest{
|
||||||
|
StoreId: provider.storeID,
|
||||||
|
AuthorizationModelId: provider.modelID,
|
||||||
|
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||||
|
User: tupleReq.User,
|
||||||
|
Relation: tupleReq.Relation,
|
||||||
|
Object: tupleReq.Object,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
storeID := provider.storeID
|
if !checkResponse.Allowed {
|
||||||
modelID := provider.modelID
|
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
|
||||||
|
}
|
||||||
|
|
||||||
return storeID, modelID
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
|
||||||
|
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
|
||||||
|
for _, tuple := range tupleReq {
|
||||||
|
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
|
||||||
|
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||||
|
User: tuple.User,
|
||||||
|
Relation: tuple.Relation,
|
||||||
|
Object: tuple.Object,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
checkResponse, err := provider.openfgaServer.BatchCheck(
|
||||||
|
ctx,
|
||||||
|
&openfgav1.BatchCheckRequest{
|
||||||
|
StoreId: provider.storeID,
|
||||||
|
AuthorizationModelId: provider.modelID,
|
||||||
|
Checks: batchCheckItems,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, checkResponse := range checkResponse.Result {
|
||||||
|
if checkResponse.GetAllowed() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors.New(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, translation authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector) error {
|
||||||
|
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
tuples, err := authtypes.TypeableOrganization.Tuples(subject, translation, []authtypes.Selector{authtypes.MustNewSelector(authtypes.TypeOrganization, orgID.StringValue())}, orgID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = provider.BatchCheck(ctx, tuples)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||||
|
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
|
||||||
|
for idx, tuple := range deletions {
|
||||||
|
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
|
||||||
|
StoreId: provider.storeID,
|
||||||
|
AuthorizationModelId: provider.modelID,
|
||||||
|
Writes: &openfgav1.WriteRequestWrites{
|
||||||
|
TupleKeys: additions,
|
||||||
|
},
|
||||||
|
Deletes: &openfgav1.WriteRequestDeletes{
|
||||||
|
TupleKeys: deletionTuplesWithoutCondition,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||||
|
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
|
||||||
|
StoreId: provider.storeID,
|
||||||
|
AuthorizationModelId: provider.modelID,
|
||||||
|
User: subject,
|
||||||
|
Relation: relation.StringValue(),
|
||||||
|
Type: typeable.Type().StringValue(),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
|
||||||
|
}
|
||||||
|
|
||||||
|
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user