Compare commits
90 Commits
platform-p
...
enh/dashbo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17dec71695 | ||
|
|
c7c2d2a7ef | ||
|
|
0cfb809605 | ||
|
|
6a378ed7b4 | ||
|
|
8e41847523 | ||
|
|
779df62093 | ||
|
|
3763794531 | ||
|
|
e9fa68e1f3 | ||
|
|
7bd3e1c453 | ||
|
|
a48455b2b3 | ||
|
|
fbb66f14ba | ||
|
|
54b67d9cfd | ||
|
|
1a193015a7 | ||
|
|
245179cbf7 | ||
|
|
dbb6b333c8 | ||
|
|
56f8e53d88 | ||
|
|
2f4e371dac | ||
|
|
db75ec56bc | ||
|
|
02755a6527 | ||
|
|
9f089e0784 | ||
|
|
fb9a7ad3cd | ||
|
|
ad631d70b6 | ||
|
|
c44efeab33 | ||
|
|
e9743fa7ac | ||
|
|
b7ece08d3e | ||
|
|
e5f4f5cc72 | ||
|
|
4437630127 | ||
|
|
89639b239e | ||
|
|
785ae9f0bd | ||
|
|
8752022cef | ||
|
|
c7e4a9c45d | ||
|
|
bf92c92204 | ||
|
|
bd63633be7 | ||
|
|
1158e1199b | ||
|
|
0a60c49314 | ||
|
|
c25e3beb81 | ||
|
|
c9e0f2b9ca | ||
|
|
6d831849c1 | ||
|
|
83eeb46f99 | ||
|
|
287558dc9d | ||
|
|
83aad793c2 | ||
|
|
3eff689c85 | ||
|
|
f5bcd65e2e | ||
|
|
e7772d93af | ||
|
|
bbf987ebd7 | ||
|
|
105c3a3b8c | ||
|
|
c1a4a5b8db | ||
|
|
c9591f4341 | ||
|
|
fd216fdee1 | ||
|
|
f5bf4293a1 | ||
|
|
155a44a25d | ||
|
|
4b21c9d5f9 | ||
|
|
5ef0a18867 | ||
|
|
c8266d1aec | ||
|
|
adfd16ce1b | ||
|
|
6db74a5585 | ||
|
|
f8e0db0085 | ||
|
|
01e0b36d62 | ||
|
|
e90bb016f7 | ||
|
|
bdecbfb7f5 | ||
|
|
3dced2b082 | ||
|
|
1285666087 | ||
|
|
1655397eaa | ||
|
|
718360a966 | ||
|
|
2f5995b071 | ||
|
|
a061c9de0f | ||
|
|
7b1ca9a1a6 | ||
|
|
0d1131e99f | ||
|
|
44d1d0f994 | ||
|
|
bdce97a727 | ||
|
|
5f8cfbe474 | ||
|
|
55c2f98768 | ||
|
|
624bb5cc62 | ||
|
|
95f8fa1566 | ||
|
|
fa97e63912 | ||
|
|
c8419c1f82 | ||
|
|
e05ede3978 | ||
|
|
437d0d1345 | ||
|
|
64e379c413 | ||
|
|
d05d394f57 | ||
|
|
b4e5085a5a | ||
|
|
88f7502a15 | ||
|
|
b0442761ac | ||
|
|
d539ca9bab | ||
|
|
c8194e9abb | ||
|
|
c919102fee | ||
|
|
765370752c | ||
|
|
db5c102f14 | ||
|
|
8a0c5bc3c8 | ||
|
|
a28ccffd01 |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -2,7 +2,7 @@
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
# that they own.
|
||||
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/ @YounixM @aks07
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
|
||||
|
||||
7
.github/workflows/build-community.yaml
vendored
7
.github/workflows/build-community.yaml
vendored
@@ -3,8 +3,8 @@ name: build-community
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@@ -69,14 +69,13 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/community
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/community/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
6
.github/workflows/build-enterprise.yaml
vendored
6
.github/workflows/build-enterprise.yaml
vendored
@@ -84,7 +84,7 @@ jobs:
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -107,10 +107,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
6
.github/workflows/build-staging.yaml
vendored
6
.github/workflows/build-staging.yaml
vendored
@@ -98,7 +98,7 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -106,10 +106,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
@@ -125,4 +123,4 @@ jobs:
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
GITHUB_EVENT_PAYLOAD: '{"deployment": "${{ needs.prepare.outputs.deployment }}", "signoz_version": "${{ needs.prepare.outputs.version }}"}'
|
||||
|
||||
1
.github/workflows/integrationci.yaml
vendored
1
.github/workflows/integrationci.yaml
vendored
@@ -17,6 +17,7 @@ jobs:
|
||||
- bootstrap
|
||||
- passwordauthn
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- querier
|
||||
- ttl
|
||||
sqlstore-provider:
|
||||
|
||||
@@ -1,39 +1,63 @@
|
||||
version: "2"
|
||||
linters:
|
||||
default: standard
|
||||
default: none
|
||||
enable:
|
||||
- bodyclose
|
||||
- depguard
|
||||
- errcheck
|
||||
- forbidigo
|
||||
- govet
|
||||
- iface
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nilnil
|
||||
- sloglint
|
||||
- depguard
|
||||
- iface
|
||||
- unparam
|
||||
- forbidigo
|
||||
|
||||
linters-settings:
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
msg-style: lowercased
|
||||
key-naming-case: snake
|
||||
depguard:
|
||||
rules:
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: "go.uber.org/zap"
|
||||
desc: "Do not use zap logger. Use slog instead."
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: "errors"
|
||||
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
issues:
|
||||
exclude-dirs:
|
||||
- "pkg/query-service"
|
||||
- "ee/query-service"
|
||||
- "scripts/"
|
||||
- unused
|
||||
settings:
|
||||
depguard:
|
||||
rules:
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: errors
|
||||
desc: Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead.
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: go.uber.org/zap
|
||||
desc: Do not use zap logger. Use slog instead.
|
||||
forbidigo:
|
||||
forbid:
|
||||
- pattern: fmt.Errorf
|
||||
- pattern: ^(fmt\.Print.*|print|println)$
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
key-naming-case: snake
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- pkg/query-service
|
||||
- ee/query-service
|
||||
- scripts/
|
||||
- tmp/
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
|
||||
24
Makefile
24
Makefile
@@ -84,10 +84,9 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@@ -102,10 +101,9 @@ go-run-community: ## Runs the community go backend server
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
@@ -114,9 +112,9 @@ $(GO_BUILD_ARCHS_COMMUNITY): go-build-community-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)-community"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
fi
|
||||
|
||||
|
||||
@@ -127,9 +125,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
@@ -139,9 +137,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
@@ -208,4 +206,4 @@ py-lint: ## Run lint for integration tests
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
|
||||
@@ -12,12 +12,6 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/community
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -36,8 +30,6 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -5,9 +5,12 @@ import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
@@ -76,6 +79,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -12,12 +12,6 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/enterprise
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -37,11 +31,8 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -1,18 +1,10 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
)
|
||||
|
||||
func main() {
|
||||
@@ -22,21 +14,5 @@ func main() {
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
|
||||
// TODO(grandwizard28): DRY this code
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
logger.ErrorContext(context.TODO(), "failed to add postgressqlstore factory", "error", err)
|
||||
panic(err)
|
||||
}
|
||||
|
||||
cmd.RegisterSQL(cmd.RootCmd, logger, func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] {
|
||||
existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore)
|
||||
if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return existingFactories
|
||||
}, sqlstoreFactories)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
|
||||
@@ -8,6 +8,8 @@ import (
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
@@ -17,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -105,6 +108,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
155
cmd/sql.go
155
cmd/sql.go
@@ -1,155 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlmigration"
|
||||
"github.com/SigNoz/signoz/pkg/sqlmigrator"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// TODO(grandwizard28): DRY this code
|
||||
func RegisterSQL(parentCmd *cobra.Command, logger *slog.Logger, sqlSchemaProviderFactories func(sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]], sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]]) {
|
||||
sqlCmd := &cobra.Command{
|
||||
Use: "sql",
|
||||
Short: "Run commands to interact with the SQL",
|
||||
}
|
||||
|
||||
migrateCmd := &cobra.Command{
|
||||
Use: "migrate",
|
||||
Short: "Run migrations for the SQL database",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
config, err := NewSigNozConfig(ctx, logger, signoz.DeprecatedFlags{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Initialize instrumentation
|
||||
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Get the provider settings from instrumentation
|
||||
providerSettings := instrumentation.ToProviderSettings()
|
||||
|
||||
// Initialize sqlstore from the available sqlstore provider factories
|
||||
sqlstore, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.SQLStore,
|
||||
sqlstoreProviderFactories,
|
||||
config.SQLStore.Provider,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Initialize sqlschema from the available sqlschema provider factories
|
||||
sqlschema, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.SQLSchema,
|
||||
sqlSchemaProviderFactories(sqlstore),
|
||||
config.SQLStore.Provider,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Run migrations on the sqlstore
|
||||
sqlmigrations, err := sqlmigration.New(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.SQLMigration,
|
||||
signoz.NewSQLMigrationProviderFactories(sqlstore, sqlschema),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sqlmigrator.New(ctx, providerSettings, sqlstore, sqlmigrations, config.SQLMigrator).Migrate(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
rollbackCmd := &cobra.Command{
|
||||
Use: "rollback",
|
||||
Short: "Rollback the last migration",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
config, err := NewSigNozConfig(ctx, logger, signoz.DeprecatedFlags{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Initialize instrumentation
|
||||
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Get the provider settings from instrumentation
|
||||
providerSettings := instrumentation.ToProviderSettings()
|
||||
|
||||
// Initialize sqlstore from the available sqlstore provider factories
|
||||
sqlstore, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.SQLStore,
|
||||
sqlstoreProviderFactories,
|
||||
config.SQLStore.Provider,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Initialize sqlschema from the available sqlschema provider factories
|
||||
sqlschema, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.SQLSchema,
|
||||
sqlSchemaProviderFactories(sqlstore),
|
||||
config.SQLStore.Provider,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Run migrations on the sqlstore
|
||||
sqlmigrations, err := sqlmigration.New(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.SQLMigration,
|
||||
signoz.NewSQLMigrationProviderFactories(sqlstore, sqlschema),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sqlmigrator.New(ctx, providerSettings, sqlstore, sqlmigrations, config.SQLMigrator).Rollback(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
sqlCmd.AddCommand(migrateCmd)
|
||||
sqlCmd.AddCommand(rollbackCmd)
|
||||
parentCmd.AddCommand(sqlCmd)
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
##################### SigNoz Configuration Example #####################
|
||||
#
|
||||
#
|
||||
# Do not modify this file
|
||||
#
|
||||
|
||||
@@ -58,7 +58,7 @@ cache:
|
||||
# The port on which the Redis server is running. Default is usually 6379.
|
||||
port: 6379
|
||||
# The password for authenticating with the Redis server, if required.
|
||||
password:
|
||||
password:
|
||||
# The Redis database number to use
|
||||
db: 0
|
||||
|
||||
@@ -71,6 +71,10 @@ sqlstore:
|
||||
sqlite:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
# Mode is the mode to use for the sqlite database.
|
||||
mode: delete
|
||||
# BusyTimeout is the timeout for the sqlite database to wait for a lock.
|
||||
busy_timeout: 10s
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
@@ -238,7 +242,6 @@ statsreporter:
|
||||
# Whether to collect identities and traits (emails).
|
||||
identities: true
|
||||
|
||||
|
||||
##################### Gateway (License only) #####################
|
||||
gateway:
|
||||
# The URL of the gateway's api.
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.97.0
|
||||
image: signoz/signoz:v0.102.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,7 +209,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.11
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +233,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.97.0
|
||||
image: signoz/signoz:v0.102.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.11
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -21,6 +28,10 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -66,6 +77,11 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -77,16 +93,20 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.97.0}
|
||||
image: signoz/signoz:${VERSION:-v0.102.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +213,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +239,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +250,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.97.0}
|
||||
image: signoz/signoz:${VERSION:-v0.102.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -21,6 +28,10 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -66,6 +77,11 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -77,16 +93,20 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
|
||||
@@ -13,8 +13,6 @@ Before diving in, make sure you have these tools installed:
|
||||
- Download from [go.dev/dl](https://go.dev/dl/)
|
||||
- Check [go.mod](../../go.mod#L3) for the minimum version
|
||||
|
||||
- **GCC** - Required for CGO dependencies
|
||||
- Download from [gcc.gnu.org](https://gcc.gnu.org/)
|
||||
|
||||
- **Node** - Powers our frontend
|
||||
- Download from [nodejs.org](https://nodejs.org)
|
||||
|
||||
@@ -78,12 +78,13 @@ All tables follow a consistent primary key pattern using a `id` column (referenc
|
||||
|
||||
## How to write migrations?
|
||||
|
||||
For schema migrations, use the [SQLMigration](/pkg/sqlmigration/sqlmigration.go) interface. The migrations are split into multiple packages based on the starting number of the series of the migration. For example, migrations with starting number `100` are in the `s100sqlmigration` package (read as series 100 sql migrations), migrations with starting number `200` are in the `s200sqlmigration` package, and so on. When creating migrations, adhere to these guidelines:
|
||||
For schema migrations, use the [SQLMigration](/pkg/sqlmigration/sqlmigration.go) interface and write the migration in the same package. When creating migrations, adhere to these guidelines:
|
||||
|
||||
- Use the [SQLSchema](/pkg/sqlschema/sqlschema.go) interface to write migrations. SQLSchema is responsible for generating idempotent SQL statements to alter the database schema. For instance, if you want to add a column to the `users` table, you can use the `AddColumn` method to add the column. If the column already exists, the method will return no SQL statements.
|
||||
- Do not implement **`ON CASCADE` foreign key constraints**. Deletion operations should be handled explicitly in application logic rather than delegated to the database.
|
||||
- Do not **import types from the types package** in the `sqlmigration` package. Instead, define the required types within the migration package itself. This practice ensures migration stability as the core types evolve over time.
|
||||
- Do not implement **`Down` migrations**. As the codebase matures, we may introduce this capability, but for now, the `Down` function should remain empty.
|
||||
- Always write **idempotent** migrations. This means that if the migration is run multiple times, it should not cause an error.
|
||||
- A migration which is **dependent on the underlying dialect** (sqlite, postgres, etc) should be written as part of the [SQLDialect](/pkg/sqlstore/sqlstore.go) interface. The implementation needs to go in the dialect specific package of the respective database.
|
||||
|
||||
## What should I remember?
|
||||
|
||||
|
||||
@@ -103,9 +103,19 @@ Remember to replace the region and ingestion key with proper values as obtained
|
||||
|
||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||
|
||||
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
|
||||
|
||||
|
||||
```sh
|
||||
ENVOY_PORT=8081 docker compose up -d
|
||||
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
|
||||
```
|
||||
|
||||
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||
```sh
|
||||
docker compose ps -a
|
||||
|
||||
@@ -48,7 +48,26 @@ func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -15,18 +15,18 @@ type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user]
|
||||
define assignee: [user, anonymous]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type resources
|
||||
type metaresources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
|
||||
type resource
|
||||
type metaresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
@@ -35,6 +35,6 @@ type resource
|
||||
define block: [user, role#assignee]
|
||||
|
||||
|
||||
type telemetry
|
||||
type telemetryresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define read: [user, role#assignee]
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package licensing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@ func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
|
||||
once.Do(func() {
|
||||
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid licensing config: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid licensing config"))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -20,6 +20,10 @@ import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -99,6 +103,39 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
|
||||
|
||||
// dashboards
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.CreatePublic)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.GetPublic)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.UpdatePublic)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.DeletePublic)).Methods(http.MethodDelete)
|
||||
|
||||
// public access for dashboards
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicData,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/public/dashboards/{id}/widgets/{index}/query_range", am.CheckWithoutClaims(
|
||||
ah.Signoz.Handlers.Dashboard.GetPublicWidgetQueryRange,
|
||||
authtypes.RelationRead, authtypes.RelationRead,
|
||||
dashboardtypes.TypeableMetaResourcePublicDashboard,
|
||||
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
id, err := valuer.NewUUID(mux.Vars(req)["id"])
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
|
||||
})).Methods(http.MethodGet)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -77,7 +76,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
@@ -186,48 +185,37 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
return cloudIntegrationUser, nil
|
||||
}
|
||||
|
||||
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, string, *basemodel.ApiError,
|
||||
) {
|
||||
url := fmt.Sprintf(
|
||||
"%s%s",
|
||||
strings.TrimSuffix(constants.ZeusURL, "/"),
|
||||
"/v2/deployments/me",
|
||||
)
|
||||
|
||||
// TODO: remove this struct from here
|
||||
type deploymentResponse struct {
|
||||
Status string `json:"status"`
|
||||
Error string `json:"error"`
|
||||
Data struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
} `json:"data"`
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
resp, apiErr := requestAndParseResponse[deploymentResponse](
|
||||
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
return "", "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't query for deployment info",
|
||||
)
|
||||
}
|
||||
|
||||
if resp.Status != "success" {
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: status: %s, error: %s",
|
||||
resp.Status, resp.Error,
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.Data.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Data.Name
|
||||
resp := new(deploymentResponse)
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Name
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
|
||||
@@ -192,7 +192,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
||||
|
||||
r.Use(otelmux.Middleware(
|
||||
"apiserver",
|
||||
|
||||
@@ -10,9 +10,6 @@ var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
|
||||
// this is set via build time variable
|
||||
var ZeusURL = "https://api.signoz.cloud"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
if len(v) == 0 {
|
||||
|
||||
@@ -29,7 +29,7 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
|
||||
case "BOOL", "BOOLEAN":
|
||||
return sqlschema.DataTypeBoolean
|
||||
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
|
||||
return sqlschema.DataTypeVarchar
|
||||
return sqlschema.DataTypeText
|
||||
}
|
||||
|
||||
return formatter.Formatter.DataTypeOf(dataType)
|
||||
|
||||
@@ -2,19 +2,14 @@ package postgressqlschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
"regexp"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
columnDefaultValuePattern = regexp.MustCompile(`^(.*?)(?:::.*)?$`)
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
fmter sqlschema.SQLFormatter
|
||||
@@ -37,9 +32,9 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
fmter: fmter,
|
||||
settings: settings,
|
||||
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
|
||||
SCreateAndDropConstraint: true,
|
||||
SAlterTableAddAndDropColumnIfNotExistsAndExists: true,
|
||||
SAlterTableAlterColumnSetAndDrop: true,
|
||||
DropConstraint: true,
|
||||
ColumnIfNotExistsExists: true,
|
||||
AlterColumnSetNotNull: true,
|
||||
}),
|
||||
}, nil
|
||||
}
|
||||
@@ -53,50 +48,45 @@ func (provider *provider) Operator() sqlschema.SQLOperator {
|
||||
}
|
||||
|
||||
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
|
||||
rows, err := provider.
|
||||
columns := []struct {
|
||||
ColumnName string `bun:"column_name"`
|
||||
Nullable bool `bun:"nullable"`
|
||||
SQLDataType string `bun:"udt_name"`
|
||||
DefaultVal *string `bun:"column_default"`
|
||||
}{}
|
||||
|
||||
err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
QueryContext(ctx, `
|
||||
NewRaw(`
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.is_nullable = 'YES',
|
||||
c.is_nullable = 'YES' as nullable,
|
||||
c.udt_name,
|
||||
c.column_default
|
||||
FROM
|
||||
information_schema.columns AS c
|
||||
WHERE
|
||||
c.table_name = ?`, string(tableName))
|
||||
c.table_name = ?`, string(tableName)).
|
||||
Scan(ctx, &columns)
|
||||
if err != nil {
|
||||
return nil, nil, provider.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "table (%s) not found", tableName)
|
||||
return nil, nil, err
|
||||
}
|
||||
if len(columns) == 0 {
|
||||
return nil, nil, sql.ErrNoRows
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := rows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
columns := make([]*sqlschema.Column, 0)
|
||||
for rows.Next() {
|
||||
var (
|
||||
name string
|
||||
sqlDataType string
|
||||
nullable bool
|
||||
defaultVal *string
|
||||
)
|
||||
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
sqlschemaColumns := make([]*sqlschema.Column, 0)
|
||||
for _, column := range columns {
|
||||
columnDefault := ""
|
||||
if defaultVal != nil {
|
||||
columnDefault = columnDefaultValuePattern.ReplaceAllString(*defaultVal, "$1")
|
||||
if column.DefaultVal != nil {
|
||||
columnDefault = *column.DefaultVal
|
||||
}
|
||||
|
||||
columns = append(columns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(name),
|
||||
Nullable: nullable,
|
||||
DataType: provider.fmter.DataTypeOf(sqlDataType),
|
||||
sqlschemaColumns = append(sqlschemaColumns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(column.ColumnName),
|
||||
Nullable: column.Nullable,
|
||||
DataType: provider.fmter.DataTypeOf(column.SQLDataType),
|
||||
Default: columnDefault,
|
||||
})
|
||||
}
|
||||
@@ -214,7 +204,7 @@ WHERE
|
||||
|
||||
return &sqlschema.Table{
|
||||
Name: tableName,
|
||||
Columns: columns,
|
||||
Columns: sqlschemaColumns,
|
||||
PrimaryKeyConstraint: primaryKeyConstraint,
|
||||
ForeignKeyConstraints: foreignKeyConstraints,
|
||||
}, uniqueConstraints, nil
|
||||
@@ -230,8 +220,7 @@ SELECT
|
||||
ci.relname AS index_name,
|
||||
i.indisunique AS unique,
|
||||
i.indisprimary AS primary,
|
||||
a.attname AS column_name,
|
||||
array_position(i.indkey, a.attnum) AS column_position
|
||||
a.attname AS column_name
|
||||
FROM
|
||||
pg_index i
|
||||
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
|
||||
@@ -242,10 +231,9 @@ WHERE
|
||||
a.attnum = ANY(i.indkey)
|
||||
AND con.oid IS NULL
|
||||
AND ct.relkind = 'r'
|
||||
AND ct.relname = ?
|
||||
ORDER BY index_name, column_position`, string(name))
|
||||
AND ct.relname = ?`, string(name))
|
||||
if err != nil {
|
||||
return nil, provider.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "no indices for table (%s) found", name)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
@@ -262,11 +250,9 @@ ORDER BY index_name, column_position`, string(name))
|
||||
unique bool
|
||||
primary bool
|
||||
columnName string
|
||||
// starts from 0 and is unused in this function, this is to ensure that the column names are in the correct order
|
||||
columnPosition int
|
||||
)
|
||||
|
||||
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName, &columnPosition); err != nil {
|
||||
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -283,12 +269,8 @@ ORDER BY index_name, column_position`, string(name))
|
||||
}
|
||||
|
||||
indices := make([]sqlschema.Index, 0)
|
||||
for indexName, index := range uniqueIndicesMap {
|
||||
if index.Name() == indexName {
|
||||
indices = append(indices, index)
|
||||
} else {
|
||||
indices = append(indices, index.Named(indexName))
|
||||
}
|
||||
for _, index := range uniqueIndicesMap {
|
||||
indices = append(indices, index)
|
||||
}
|
||||
|
||||
return indices, nil
|
||||
|
||||
456
ee/sqlstore/postgressqlstore/dialect.go
Normal file
456
ee/sqlstore/postgressqlstore/dialect.go
Normal file
@@ -0,0 +1,456 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
Identity = "id"
|
||||
Integer = "bigint"
|
||||
Text = "text"
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
UserNoCascade = "user_no_cascade"
|
||||
FactorPassword = "factor_password"
|
||||
CloudIntegration = "cloud_integration"
|
||||
AgentConfigVersion = "agent_config_version"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
|
||||
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
|
||||
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||
AgentConfigVersionReference = `("version_id") REFERENCES "agent_config_version" ("id")`
|
||||
)
|
||||
|
||||
type dialect struct{}
|
||||
|
||||
func (dialect *dialect) IntToTimestamp(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// bigint for postgres and INTEGER for sqlite
|
||||
if columnType != "bigint" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// if the columns is integer then do this
|
||||
if _, err := bun.
|
||||
ExecContext(ctx, `ALTER TABLE `+table+` RENAME COLUMN `+column+` TO `+column+`_old`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add new timestamp column
|
||||
if _, err := bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " TIMESTAMP").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := bun.
|
||||
NewUpdate().
|
||||
Table(table).
|
||||
Set(column + " = to_timestamp(cast(" + column + "_old as INTEGER))").
|
||||
Where("1=1").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// drop old column
|
||||
if _, err := bun.
|
||||
NewDropColumn().
|
||||
Table(table).
|
||||
Column(column + "_old").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) IntToBoolean(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
columnExists, err := dialect.ColumnExists(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !columnExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if columnType != "bigint" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := bun.
|
||||
ExecContext(ctx, `ALTER TABLE `+table+` RENAME COLUMN `+column+` TO `+column+`_old`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add new boolean column
|
||||
if _, err := bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " BOOLEAN").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// copy data from old column to new column, converting from int to boolean
|
||||
if _, err := bun.NewUpdate().
|
||||
Table(table).
|
||||
Set(column + " = CASE WHEN " + column + "_old = 1 THEN true ELSE false END").
|
||||
Where("1=1").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// drop old column
|
||||
if _, err := bun.NewDropColumn().Table(table).Column(column + "_old").Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) GetColumnType(ctx context.Context, bun bun.IDB, table string, column string) (string, error) {
|
||||
var columnType string
|
||||
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("data_type").
|
||||
TableExpr("information_schema.columns").
|
||||
Where("table_name = ?", table).
|
||||
Where("column_name = ?", column).
|
||||
Scan(ctx, &columnType)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return columnType, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table string, column string) (bool, error) {
|
||||
var count int
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("COUNT(*)").
|
||||
TableExpr("information_schema.columns").
|
||||
Where("table_name = ?", table).
|
||||
Where("column_name = ?", column).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddColumn(ctx context.Context, bun bun.IDB, table string, column string, columnExpr string) error {
|
||||
exists, err := dialect.ColumnExists(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !exists {
|
||||
_, err = bun.
|
||||
NewAddColumn().
|
||||
Table(table).
|
||||
ColumnExpr(column + " " + columnExpr).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
|
||||
oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
newColumnExists, err := dialect.ColumnExists(ctx, bun, table, newColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if newColumnExists {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if !oldColumnExists {
|
||||
return false, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "old column: %s doesn't exist", oldColumnName)
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, "ALTER TABLE "+table+" RENAME COLUMN "+oldColumnName+" TO "+newColumnName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) DropColumn(ctx context.Context, bun bun.IDB, table string, column string) error {
|
||||
exists, err := dialect.ColumnExists(ctx, bun, table, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
_, err = bun.
|
||||
NewDropColumn().
|
||||
Table(table).
|
||||
Column(column).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) {
|
||||
|
||||
count := 0
|
||||
err := bun.
|
||||
NewSelect().
|
||||
ColumnExpr("count(*)").
|
||||
Table("pg_catalog.pg_tables").
|
||||
Where("tablename = ?", bun.Dialect().Tables().Get(reflect.TypeOf(table)).Name).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if count == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, references []string, cb func(context.Context) error) error {
|
||||
if len(references) == 0 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
var fkReferences []string
|
||||
for _, reference := range references {
|
||||
if reference == Org && !slices.Contains(fkReferences, OrgReference) {
|
||||
fkReferences = append(fkReferences, OrgReference)
|
||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||
fkReferences = append(fkReferences, UserReference)
|
||||
} else if reference == UserNoCascade && !slices.Contains(fkReferences, UserReferenceNoCascade) {
|
||||
fkReferences = append(fkReferences, UserReferenceNoCascade)
|
||||
} else if reference == FactorPassword && !slices.Contains(fkReferences, FactorPasswordReference) {
|
||||
fkReferences = append(fkReferences, FactorPasswordReference)
|
||||
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||
} else if reference == AgentConfigVersion && !slices.Contains(fkReferences, AgentConfigVersionReference) {
|
||||
fkReferences = append(fkReferences, AgentConfigVersionReference)
|
||||
}
|
||||
}
|
||||
|
||||
createTable := bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel)
|
||||
|
||||
for _, fk := range fkReferences {
|
||||
createTable = createTable.ForeignKey(fk)
|
||||
}
|
||||
|
||||
_, err = createTable.Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddNotNullDefaultToColumn(ctx context.Context, bun bun.IDB, table string, column, columnType, defaultValue string) error {
|
||||
query := fmt.Sprintf("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT %s, ALTER COLUMN %s SET NOT NULL", table, column, defaultValue, column)
|
||||
if _, err := bun.ExecContext(ctx, query); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if columnType == Text {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if identityExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) DropColumnWithForeignKeyConstraint(ctx context.Context, bunIDB bun.IDB, model interface{}, column string) error {
|
||||
existingTable := bunIDB.Dialect().Tables().Get(reflect.TypeOf(model))
|
||||
columnExists, err := dialect.ColumnExists(ctx, bunIDB, existingTable.Name, column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !columnExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = bunIDB.
|
||||
NewDropColumn().
|
||||
Model(model).
|
||||
Column(column).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
153
ee/sqlstore/postgressqlstore/formatter.go
Normal file
153
ee/sqlstore/postgressqlstore/formatter.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgres(path)...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_typeof("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements_text("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), "::text"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_agg("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_build_array("...)
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, "::jsonb"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
|
||||
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
|
||||
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
|
||||
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := strings.Split(path, ".")
|
||||
|
||||
var validParts []string
|
||||
for _, part := range parts {
|
||||
if part != "" {
|
||||
validParts = append(validParts, part)
|
||||
}
|
||||
}
|
||||
|
||||
if len(validParts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []byte
|
||||
|
||||
for idx, part := range validParts {
|
||||
if idx == len(validParts)-1 {
|
||||
if asText {
|
||||
result = append(result, "->>"...)
|
||||
} else {
|
||||
result = append(result, "->"...)
|
||||
}
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
return result
|
||||
}
|
||||
|
||||
result = append(result, "->"...)
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
500
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
500
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
@@ -0,0 +1,500 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `"data"->>'field'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `"metadata"->'user'->>'name'`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.level1.level2.level3",
|
||||
expected: `"json_col"->'level1'->'level2'->>'level3'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `"json_col"`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `"data"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `jsonb_typeof("data"->'field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col")`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data")`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `jsonb_typeof("data"->'items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col") = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data") = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `jsonb_each("metadata"->'object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "jsonb_agg(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "jsonb_agg(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "data->>'field'",
|
||||
expected: "jsonb_agg(data->>'field')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "jsonb_build_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "jsonb_build_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "jsonb_build_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonPath string
|
||||
asText bool
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path as text",
|
||||
jsonPath: "$.field",
|
||||
asText: true,
|
||||
expected: "->>'field'",
|
||||
},
|
||||
{
|
||||
name: "simple path as json",
|
||||
jsonPath: "$.field",
|
||||
asText: false,
|
||||
expected: "->'field'",
|
||||
},
|
||||
{
|
||||
name: "nested path as text",
|
||||
jsonPath: "$.user.name",
|
||||
asText: true,
|
||||
expected: "->'user'->>'name'",
|
||||
},
|
||||
{
|
||||
name: "nested path as json",
|
||||
jsonPath: "$.user.name",
|
||||
asText: false,
|
||||
expected: "->'user'->'name'",
|
||||
},
|
||||
{
|
||||
name: "deeply nested as text",
|
||||
jsonPath: "$.a.b.c.d",
|
||||
asText: true,
|
||||
expected: "->'a'->'b'->'c'->>'d'",
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
jsonPath: "$",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
jsonPath: "",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New()).(*formatter)
|
||||
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"::jsonb`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
expr: "name",
|
||||
expected: "lower(name)",
|
||||
},
|
||||
{
|
||||
name: "quoted column identifier",
|
||||
expr: `"column_name"`,
|
||||
expected: `lower("column_name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb text extraction",
|
||||
expr: "data->>'field'",
|
||||
expected: "lower(data->>'field')",
|
||||
},
|
||||
{
|
||||
name: "nested jsonb extraction",
|
||||
expr: "metadata->'user'->>'name'",
|
||||
expected: "lower(metadata->'user'->>'name')",
|
||||
},
|
||||
{
|
||||
name: "jsonb_typeof expression",
|
||||
expr: "jsonb_typeof(data->'field')",
|
||||
expected: "lower(jsonb_typeof(data->'field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
{
|
||||
name: "subquery column",
|
||||
expr: "users.email",
|
||||
expected: "lower(users.email)",
|
||||
},
|
||||
{
|
||||
name: "quoted identifier with special chars",
|
||||
expr: `"user-name"`,
|
||||
expected: `lower("user-name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb to text cast",
|
||||
expr: "data::text",
|
||||
expected: "lower(data::text)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,11 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -54,10 +56,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
|
||||
sqldb := stdlib.OpenDBFromPool(pool)
|
||||
|
||||
pgDialect := pgdialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -69,6 +75,14 @@ func (provider *provider) SQLDB() *sql.DB {
|
||||
return provider.sqldb
|
||||
}
|
||||
|
||||
func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
@@ -93,3 +107,7 @@ func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, for
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (dialect *dialect) ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package zeus
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
neturl "net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
@@ -24,17 +24,17 @@ func Config() zeus.Config {
|
||||
once.Do(func() {
|
||||
parsedURL, err := neturl.Parse(url)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus URL: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus URL"))
|
||||
}
|
||||
|
||||
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus deprecated URL"))
|
||||
}
|
||||
|
||||
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid zeus config: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus config"))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"antlr4": "4.13.2",
|
||||
"axios": "1.8.2",
|
||||
"axios": "1.12.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -279,6 +279,8 @@
|
||||
"prismjs": "1.30.0",
|
||||
"got": "11.8.5",
|
||||
"form-data": "4.0.4",
|
||||
"brace-expansion": "^2.0.2"
|
||||
"brace-expansion": "^2.0.2",
|
||||
"on-headers": "^1.1.0",
|
||||
"tmp": "0.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
frontend/public/Images/grains.png
Normal file
BIN
frontend/public/Images/grains.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
14
frontend/public/Logos/mastra.svg
Normal file
14
frontend/public/Logos/mastra.svg
Normal file
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" width="512" height="512">
|
||||
<path d="M0 0 C1.01790619 0.00215515 2.03581238 0.0043103 3.08456421 0.00653076 C17.69363523 0.05620146 32.02575853 0.30709809 46.375 3.3125 C47.34985352 3.51117676 48.32470703 3.70985352 49.32910156 3.91455078 C97.7109774 14.03517194 141.69928269 35.9516877 177.375 70.3125 C178.3640332 71.26068604 178.3640332 71.26068604 179.37304688 72.22802734 C188.86130305 81.37247479 197.6012044 90.66984009 205.375 101.3125 C206.23306152 102.46355017 207.09113451 103.61459177 207.94921875 104.765625 C213.98736829 112.95680614 219.34595521 121.47054105 224.375 130.3125 C224.73948242 130.94430176 225.10396484 131.57610352 225.47949219 132.22705078 C247.08534998 169.97532572 256.85198095 212.74189997 256.6875 255.9375 C256.68534485 256.95540619 256.6831897 257.97331238 256.68096924 259.02206421 C256.63129854 273.63113523 256.38040191 287.96325853 253.375 302.3125 C253.07698486 303.77478027 253.07698486 303.77478027 252.77294922 305.26660156 C242.65232806 353.6484774 220.7358123 397.63678269 186.375 433.3125 C185.74287598 433.97185547 185.11075195 434.63121094 184.45947266 435.31054688 C175.31502521 444.79880305 166.01765991 453.5387044 155.375 461.3125 C154.22394983 462.17056152 153.07290823 463.02863451 151.921875 463.88671875 C143.73224019 469.92427396 135.22961724 475.30707302 126.375 480.3125 C125.66778809 480.7146875 124.96057617 481.116875 124.23193359 481.53125 C101.09654164 494.5566741 75.92537591 503.30551461 49.9375 508.625 C49.21860596 508.77235596 48.49971191 508.91971191 47.7590332 509.0715332 C33.08339502 511.86090125 18.55831162 512.66142619 3.64770508 512.62817383 C0.70811513 512.6250221 -2.2304558 512.64854926 -5.16992188 512.67382812 C-18.54306192 512.71848737 -31.43916711 511.51301542 -44.625 509.3125 C-46.33055054 509.03031616 -46.33055054 509.03031616 -48.07055664 508.74243164 C-91.33802759 500.97044532 -132.38793216 480.98345309 -165.625 452.3125 C-166.37007813 451.67828125 -167.11515625 451.0440625 -167.8828125 450.390625 C-181.38779833 438.57948485 -194.05918743 425.82889772 -204.625 411.3125 C-205.48149066 410.16255169 -206.33826433 409.01281411 -207.1953125 407.86328125 C-213.23534209 399.67315742 -218.61867614 391.16870382 -223.625 382.3125 C-224.0271875 381.60528809 -224.429375 380.89807617 -224.84375 380.16943359 C-237.8691741 357.03404164 -246.61801461 331.86287591 -251.9375 305.875 C-252.08485596 305.15610596 -252.23221191 304.43721191 -252.3840332 303.6965332 C-255.35789088 288.05024391 -255.99800362 272.57681954 -255.9375 256.6875 C-255.93534485 255.66959381 -255.9331897 254.65168762 -255.93096924 253.60293579 C-255.88129854 238.99386477 -255.63040191 224.66174147 -252.625 210.3125 C-252.42632324 209.33764648 -252.22764648 208.36279297 -252.02294922 207.35839844 C-241.90232806 158.9765226 -219.9858123 114.98821731 -185.625 79.3125 C-184.99287598 78.65314453 -184.36075195 77.99378906 -183.70947266 77.31445312 C-174.56502521 67.82619695 -165.26765991 59.0862956 -154.625 51.3125 C-153.47394983 50.45443848 -152.32290823 49.59636549 -151.171875 48.73828125 C-142.98224019 42.70072604 -134.47961724 37.31792698 -125.625 32.3125 C-124.91778809 31.9103125 -124.21057617 31.508125 -123.48193359 31.09375 C-100.34654164 18.0683259 -75.17537591 9.31948539 -49.1875 4 C-48.46860596 3.85264404 -47.74971191 3.70528809 -47.0090332 3.5534668 C-31.36274391 0.57960912 -15.88931954 -0.06050362 0 0 Z " fill="#FAFAFA" transform="translate(255.625,-0.3125)"/>
|
||||
<path d="M0 0 C1.01790619 0.00215515 2.03581238 0.0043103 3.08456421 0.00653076 C17.69363523 0.05620146 32.02575853 0.30709809 46.375 3.3125 C47.34985352 3.51117676 48.32470703 3.70985352 49.32910156 3.91455078 C97.7109774 14.03517194 141.69928269 35.9516877 177.375 70.3125 C178.3640332 71.26068604 178.3640332 71.26068604 179.37304688 72.22802734 C188.86130305 81.37247479 197.6012044 90.66984009 205.375 101.3125 C206.23306152 102.46355017 207.09113451 103.61459177 207.94921875 104.765625 C213.98736829 112.95680614 219.34595521 121.47054105 224.375 130.3125 C224.73948242 130.94430176 225.10396484 131.57610352 225.47949219 132.22705078 C247.08534998 169.97532572 256.85198095 212.74189997 256.6875 255.9375 C256.68534485 256.95540619 256.6831897 257.97331238 256.68096924 259.02206421 C256.63129854 273.63113523 256.38040191 287.96325853 253.375 302.3125 C253.07698486 303.77478027 253.07698486 303.77478027 252.77294922 305.26660156 C242.65232806 353.6484774 220.7358123 397.63678269 186.375 433.3125 C185.74287598 433.97185547 185.11075195 434.63121094 184.45947266 435.31054688 C175.31502521 444.79880305 166.01765991 453.5387044 155.375 461.3125 C154.22394983 462.17056152 153.07290823 463.02863451 151.921875 463.88671875 C143.73224019 469.92427396 135.22961724 475.30707302 126.375 480.3125 C125.66778809 480.7146875 124.96057617 481.116875 124.23193359 481.53125 C101.09654164 494.5566741 75.92537591 503.30551461 49.9375 508.625 C49.21860596 508.77235596 48.49971191 508.91971191 47.7590332 509.0715332 C33.08339502 511.86090125 18.55831162 512.66142619 3.64770508 512.62817383 C0.70811513 512.6250221 -2.2304558 512.64854926 -5.16992188 512.67382812 C-18.54306192 512.71848737 -31.43916711 511.51301542 -44.625 509.3125 C-46.33055054 509.03031616 -46.33055054 509.03031616 -48.07055664 508.74243164 C-91.33802759 500.97044532 -132.38793216 480.98345309 -165.625 452.3125 C-166.37007813 451.67828125 -167.11515625 451.0440625 -167.8828125 450.390625 C-181.38779833 438.57948485 -194.05918743 425.82889772 -204.625 411.3125 C-205.48149066 410.16255169 -206.33826433 409.01281411 -207.1953125 407.86328125 C-213.23534209 399.67315742 -218.61867614 391.16870382 -223.625 382.3125 C-224.0271875 381.60528809 -224.429375 380.89807617 -224.84375 380.16943359 C-237.8691741 357.03404164 -246.61801461 331.86287591 -251.9375 305.875 C-252.08485596 305.15610596 -252.23221191 304.43721191 -252.3840332 303.6965332 C-255.35789088 288.05024391 -255.99800362 272.57681954 -255.9375 256.6875 C-255.93534485 255.66959381 -255.9331897 254.65168762 -255.93096924 253.60293579 C-255.88129854 238.99386477 -255.63040191 224.66174147 -252.625 210.3125 C-252.42632324 209.33764648 -252.22764648 208.36279297 -252.02294922 207.35839844 C-241.90232806 158.9765226 -219.9858123 114.98821731 -185.625 79.3125 C-184.99287598 78.65314453 -184.36075195 77.99378906 -183.70947266 77.31445312 C-174.56502521 67.82619695 -165.26765991 59.0862956 -154.625 51.3125 C-153.47394983 50.45443848 -152.32290823 49.59636549 -151.171875 48.73828125 C-142.98224019 42.70072604 -134.47961724 37.31792698 -125.625 32.3125 C-124.91778809 31.9103125 -124.21057617 31.508125 -123.48193359 31.09375 C-100.34654164 18.0683259 -75.17537591 9.31948539 -49.1875 4 C-48.46860596 3.85264404 -47.74971191 3.70528809 -47.0090332 3.5534668 C-31.36274391 0.57960912 -15.88931954 -0.06050362 0 0 Z M-132.625 100.3125 C-133.86636719 101.38757813 -133.86636719 101.38757813 -135.1328125 102.484375 C-165.63578284 129.44938326 -187.86448496 164.37287727 -198.01196289 203.92382812 C-198.4561255 205.654488 -198.91264545 207.38199177 -199.37670898 209.10742188 C-206.39011482 235.81273152 -206.57174532 264.26106754 -201.625 291.3125 C-201.5022168 292.01439453 -201.37943359 292.71628906 -201.25292969 293.43945312 C-191.91867325 346.46214396 -160.92344717 393.42130233 -117.23217773 424.5078125 C-89.1520157 444.03658082 -57.46503366 455.81316968 -23.625 460.3125 C-22.6246875 460.4465625 -21.624375 460.580625 -20.59375 460.71875 C33.68154914 466.46554638 87.79464708 449.50556098 130.05859375 415.375 C172.73532881 380.04170204 197.99757236 331.33192114 204.83984375 276.59375 C205.14411827 273.15919925 205.29014212 269.75985138 205.375 266.3125 C205.40078125 265.47734863 205.4265625 264.64219727 205.453125 263.78173828 C207.08252124 205.19475973 185.21292028 153.59303729 145.375 111.3125 C144.28251953 110.13880859 144.28251953 110.13880859 143.16796875 108.94140625 C136.81651093 102.33070526 129.68927931 96.80369409 122.375 91.3125 C121.54226563 90.68472656 120.70953125 90.05695313 119.8515625 89.41015625 C44.39149597 34.6822923 -62.3071717 39.41419387 -132.625 100.3125 Z " fill="#000000" transform="translate(255.625,-0.3125)"/>
|
||||
<path d="M0 0 C0.71623535 0.07524902 1.4324707 0.15049805 2.17041016 0.22802734 C11.41582457 1.31558193 17.53905875 5.69555803 24.37939453 11.60302734 C25.9717539 12.97565167 27.58779464 14.31330925 29.21484375 15.64453125 C39.45353655 24.07849539 49.89302189 32.86357336 58.390625 43.0859375 C59.52771819 44.43830561 60.70999313 45.7532854 61.921875 47.0390625 C78.86561041 65.03405157 92.54732646 86.67485183 103 109 C103.45761719 109.94875 103.91523438 110.8975 104.38671875 111.875 C105.65363083 114.56069375 106.84247298 117.26538924 108 120 C108.70576172 121.65902344 108.70576172 121.65902344 109.42578125 123.3515625 C115.29775985 137.75748334 119.44802749 153.51162791 121 169 C121.1393396 170.13598633 121.1393396 170.13598633 121.28149414 171.29492188 C123.04224228 187.01734848 123.04224228 187.01734848 118.6640625 192.58203125 C115.46027156 195.86702306 111.77976561 198.42626256 108 201 C106.70961638 201.94695663 105.4218661 202.89751165 104.13671875 203.8515625 C54.19558639 240.19218217 -10.15836211 250.82403206 -70.52783203 244.24707031 C-72.70701376 244.02928126 -74.87191381 243.88318722 -77.05859375 243.7734375 C-87.15317474 242.98567756 -93.49567628 238.26267641 -101 232 C-101.5775 231.5270752 -102.155 231.05415039 -102.75 230.56689453 C-113.80888326 221.47972622 -125.20747742 211.94987788 -134.3828125 200.92578125 C-136.24364652 198.70986054 -138.21467717 196.61674763 -140.1875 194.5 C-161.40351648 170.90663702 -177.09305866 142.66632254 -188 113 C-188.40734375 111.93007812 -188.8146875 110.86015625 -189.234375 109.7578125 C-194.48827127 95.16814672 -202.71297728 67.59681209 -195.89453125 52.5625 C-194.07223789 50.69071225 -192.19999029 49.41220499 -190 48 C-188.52096885 46.81314803 -187.04236305 45.62573122 -185.57055664 44.42993164 C-163.09739012 26.47222869 -136.79512656 14.12849151 -109 7 C-107.47898826 6.58659334 -105.95816766 6.17248301 -104.4375 5.7578125 C-70.37699256 -3.34276097 -34.91038892 -3.88896444 0 0 Z " fill="#010101" transform="translate(294,134)"/>
|
||||
<path d="M0 0 C0.86409988 -0.0033284 1.72819977 -0.0066568 2.6184845 -0.01008606 C4.43616205 -0.0151155 6.2538487 -0.01749563 8.0715332 -0.01733398 C10.80761482 -0.01951182 13.54328676 -0.03768671 16.27929688 -0.05664062 C26.27760684 -0.08904371 35.78646484 0.35879187 45.64453125 2.16796875 C46.42820068 2.2980835 47.21187012 2.42819824 48.01928711 2.56225586 C67.48091134 5.91708002 86.4354731 13.58908312 103.64453125 23.16796875 C104.27649414 23.51827148 104.90845703 23.86857422 105.55957031 24.22949219 C119.56180482 32.08853758 131.62289424 41.70235966 143.20019531 52.78662109 C144.48254743 54.01304914 145.77750786 55.22626362 147.07421875 56.4375 C160.46125565 69.27666884 171.07943638 85.21359379 179.83203125 101.48046875 C180.23115723 102.21628174 180.6302832 102.95209473 181.04150391 103.71020508 C181.97584067 105.49613991 182.81943883 107.32900956 183.64453125 109.16796875 C183.31453125 109.82796875 182.98453125 110.48796875 182.64453125 111.16796875 C181.77828125 110.23984375 180.91203125 109.31171875 180.01953125 108.35546875 C151.77719444 80.22655498 111.24847041 62.18954649 72.55981445 54.76806641 C66.66500207 53.55440703 62.13035714 50.73600667 57.08203125 47.54296875 C55.29699984 46.44819083 53.51053116 45.35575303 51.72265625 44.265625 C50.85318359 43.73340332 49.98371094 43.20118164 49.08789062 42.65283203 C16.63913044 22.93327173 -19.64835388 13.71677996 -57.35546875 13.16796875 C-51.06422023 6.87672023 -37.42242063 5.35193024 -28.98046875 3.41796875 C-28.06620117 3.20744873 -27.15193359 2.99692871 -26.20996094 2.7800293 C-17.46378901 0.84110933 -8.95226967 0.0123532 0 0 Z " fill="#030303" transform="translate(244.35546875,67.83203125)"/>
|
||||
<path d="M0 0 C1.3921875 1.2065625 1.3921875 1.2065625 2.8125 2.4375 C17.70081221 15.15211863 32.63519217 25.94043322 50 35 C50.65210449 35.34772461 51.30420898 35.69544922 51.97607422 36.05371094 C68.61826436 44.84435504 87.93410811 51.6622374 106.5 54.8125 C117.0495917 56.69262525 125.60341083 62.82215102 134.53027344 68.45361328 C166.82331519 88.67347552 203.35131846 97.02260308 241 98 C234.72437292 104.27562708 221.11199545 105.81088155 212.6875 107.75 C211.78161133 107.96052002 210.87572266 108.17104004 209.94238281 108.38793945 C198.42674064 110.9501174 187.15547289 111.18641307 175.40698242 111.18530273 C172.83560399 111.18748043 170.2646618 111.20566475 167.69335938 111.22460938 C157.60706378 111.25933256 147.96699734 110.68684053 138 109 C137.17717529 108.86456787 136.35435059 108.72913574 135.5065918 108.58959961 C99.40933725 102.18890888 66.71597085 83.54843429 40.46044922 58.39746094 C39.17158965 57.16418909 37.86994193 55.94431172 36.56640625 54.7265625 C21.17388217 39.96510447 9.14707063 21.15582183 0 2 C0 1.34 0 0.68 0 0 Z " fill="#030303" transform="translate(84,333)"/>
|
||||
<path d="M0 0 C2.99943905 1.34572887 5.08076179 2.83537101 7.39160156 5.16455078 C8.0319635 5.80506378 8.67232544 6.44557678 9.33209229 7.10549927 C10.01140808 7.79513763 10.69072388 8.484776 11.390625 9.1953125 C12.09523865 9.90275604 12.79985229 10.61019958 13.52581787 11.33908081 C15.7719539 13.59639862 18.01103009 15.86057633 20.25 18.125 C21.77297717 19.65800953 23.29640939 21.19056715 24.8203125 22.72265625 C28.55216815 26.47654573 32.27825351 30.23608914 36 34 C40.8277897 29.9845347 45.3437641 25.77173317 49.73828125 21.2890625 C50.98229394 20.03242542 52.22643824 18.77591862 53.47070312 17.51953125 C55.40594863 15.55930686 57.33953935 13.59749882 59.27075195 11.63330078 C61.15145313 9.72257458 63.03818159 7.81797036 64.92578125 5.9140625 C65.50596512 5.32041901 66.08614899 4.72677551 66.68391418 4.11514282 C68.08510986 2.70514466 69.53810801 1.34696546 71 0 C73 0 73 0 74.62109375 1.609375 C75.22050781 2.31578125 75.81992187 3.0221875 76.4375 3.75 C77.03433594 4.44609375 77.63117188 5.1421875 78.24609375 5.859375 C79.88667532 7.86168836 81.44684965 9.92913287 83 12 C74.42 20.58 65.84 29.16 57 38 C86.7 38.495 86.7 38.495 117 39 C117 44.28 117 49.56 117 55 C97.53 55.33 78.06 55.66 58 56 C66.25 64.25 74.5 72.5 83 81 C81.16614077 84.66771845 79.74937171 86.46834996 76.875 89.25 C76.15054688 89.95640625 75.42609375 90.6628125 74.6796875 91.390625 C74.12539063 91.92171875 73.57109375 92.4528125 73 93 C70.00056095 91.65427113 67.91923821 90.16462899 65.60839844 87.83544922 C64.64785553 86.87467972 64.64785553 86.87467972 63.66790771 85.89450073 C62.98859192 85.20486237 62.30927612 84.515224 61.609375 83.8046875 C60.90476135 83.09724396 60.20014771 82.38980042 59.47418213 81.66091919 C57.2280461 79.40360138 54.98896991 77.13942367 52.75 74.875 C51.22702283 73.34199047 49.70359061 71.80943285 48.1796875 70.27734375 C44.44783185 66.52345427 40.72174649 62.76391086 37 59 C32.16701194 63.02748053 27.61889996 67.24379413 23.19140625 71.7109375 C22.57982162 72.32393707 21.968237 72.93693665 21.33811951 73.56851196 C19.41052441 75.50164564 17.48646072 77.43824987 15.5625 79.375 C14.24567596 80.69684699 12.92862074 82.01846372 11.61132812 83.33984375 C8.40487627 86.55724299 5.20158371 89.77775681 2 93 C-1.41059403 91.39464713 -3.61792484 89.59481099 -6.25 86.875 C-6.95640625 86.15054688 -7.6628125 85.42609375 -8.390625 84.6796875 C-8.92171875 84.12539063 -9.4528125 83.57109375 -10 83 C-8.41570736 79.09154201 -5.73402218 76.47642804 -2.7734375 73.578125 C-1.84450684 72.65418945 -0.91557617 71.73025391 0.04150391 70.77832031 C1.22405762 69.61397461 2.40661133 68.44962891 3.625 67.25 C7.37875 63.5375 11.1325 59.825 15 56 C-14.205 55.505 -14.205 55.505 -44 55 C-44 49.72 -44 44.44 -44 39 C-24.2 38.67 -4.4 38.34 16 38 C7.42 29.42 -1.16 20.84 -10 12 C-8.02 9.69 -6.04 7.38 -4 5 C-2.65807132 3.34024611 -1.32090525 1.67653359 0 0 Z " fill="#FBFBFB" transform="translate(220,210)"/>
|
||||
<path d="M0 0 C0.66 0 1.32 0 2 0 C2.10320557 0.85489014 2.10320557 0.85489014 2.20849609 1.72705078 C7.84197412 45.87779718 28.1939551 86.6195367 56 121 C56.42941895 121.53383301 56.85883789 122.06766602 57.30126953 122.61767578 C66.40690991 133.83981661 76.68709229 143.89775823 87 154 C85 155 85 155 83.08325195 154.41430664 C82.30941162 154.08293701 81.53557129 153.75156738 80.73828125 153.41015625 C79.44216919 152.85835693 79.44216919 152.85835693 78.11987305 152.29541016 C77.19343994 151.8885498 76.26700684 151.48168945 75.3125 151.0625 C74.36221924 150.64790527 73.41193848 150.23331055 72.43286133 149.80615234 C39.14146569 135.09423719 7.08477782 112.00144189 -6.8203125 77.16015625 C-7.20960937 76.11730469 -7.59890625 75.07445312 -8 74 C-8.32742187 73.13246094 -8.65484375 72.26492188 -8.9921875 71.37109375 C-16.36268399 50.62841076 -12.86150198 26.97666813 -3.75 7.5625 C-2.52169606 5.02980009 -1.26750366 2.51347207 0 0 Z " fill="#040404" transform="translate(80,210)"/>
|
||||
<path d="M0 0 C1.35764345 4.07293036 -0.06910614 6.67744348 -1.5625 10.5 C-1.97749756 11.57531982 -1.97749756 11.57531982 -2.40087891 12.67236328 C-6.47155806 22.9772079 -11.68698526 31.65423127 -19 40 C-19.59296875 40.68835938 -20.1859375 41.37671875 -20.796875 42.0859375 C-35.31857853 57.78675371 -58.56153437 66.83592642 -79.57324219 68.23852539 C-83.59327999 68.37727956 -87.60297433 68.43877365 -91.625 68.4375 C-92.70432297 68.43910126 -92.70432297 68.43910126 -93.80545044 68.44073486 C-120.19044633 68.3846361 -144.22754501 60.94698881 -168 50 C-168 49.67 -168 49.34 -168 49 C-167.23671387 48.98018066 -166.47342773 48.96036133 -165.68701172 48.93994141 C-135.41473945 48.1185831 -105.01412697 46.60137763 -75.89160156 37.58056641 C-74.17234947 37.05289714 -72.4420738 36.56148633 -70.7109375 36.07421875 C-45.39043035 28.79857935 -20.43492118 16.73681044 0 0 Z " fill="#040404" transform="translate(411,347)"/>
|
||||
<path d="M0 0 C36.3336157 11.02984762 71.78280942 35.12814008 90.01855469 69.06640625 C96.50040978 81.83662823 99.34867561 93.65420512 99.25 107.9375 C99.26160156 109.61102539 99.26160156 109.61102539 99.2734375 111.31835938 C99.25018103 121.24305766 97.70881388 129.9522873 94.125 139.1875 C93.88136719 139.84685547 93.63773437 140.50621094 93.38671875 141.18554688 C91.65503117 145.7390777 89.50393006 149.82136348 87 154 C86.34 153.67 85.68 153.34 85 153 C84.50097656 151.18432617 84.50097656 151.18432617 84.140625 148.79296875 C83.99786133 147.90649658 83.85509766 147.02002441 83.70800781 146.10668945 C83.55686523 145.14335693 83.40572266 144.18002441 83.25 143.1875 C79.16249798 119.24447032 70.31517306 97.3963538 59 76 C58.48832275 75.02450195 58.48832275 75.02450195 57.96630859 74.02929688 C47.301887 53.8627614 33.39273041 35.47640491 17.7265625 18.9375 C15.86858655 16.97059803 14.11773128 14.95405655 12.41015625 12.85546875 C10.3273051 10.38777963 8.28787438 8.36653406 5.8125 6.3125 C3.31942756 4.23891203 1.82972289 2.74458434 0 0 Z " fill="#040404" transform="translate(345,147)"/>
|
||||
<path d="M0 0 C1.26231445 0.00491455 2.52462891 0.0098291 3.82519531 0.01489258 C24.5153613 0.27226578 42.86797287 5.22314233 62.125 12.4375 C63.02718262 12.77418701 63.92936523 13.11087402 64.85888672 13.45776367 C66.12949951 13.94660034 66.12949951 13.94660034 67.42578125 14.4453125 C68.55173706 14.87674561 68.55173706 14.87674561 69.70043945 15.31689453 C71.99402845 16.3980708 73.83827422 17.73010316 75.8125 19.3125 C74.90935059 19.3218457 74.00620117 19.33119141 73.07568359 19.34082031 C46.66373358 19.68118909 20.60587941 21.22322233 -5.11425781 27.79150391 C-7.27539495 28.33458759 -9.44421442 28.83079049 -11.6171875 29.32421875 C-39.00925704 35.7635841 -65.52353508 48.25154961 -88.08203125 65.01171875 C-91.1875 67.3125 -91.1875 67.3125 -93.1875 68.3125 C-92.18113049 53.55241391 -82.67422534 39.13895373 -73.1875 28.3125 C-72.59453125 27.62414062 -72.0015625 26.93578125 -71.390625 26.2265625 C-53.12979089 6.48301109 -25.93856094 -0.12102791 0 0 Z " fill="#030303" transform="translate(193.1875,96.6875)"/>
|
||||
<path d="M0 0 C0.99 0.33 1.98 0.66 3 1 C2.34 1.66 1.68 2.32 1 3 C0.67 2.01 0.34 1.02 0 0 Z M-9 10 C-9 13 -9 13 -9 13 Z " fill="#E8E8E8" transform="translate(220,210)"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 20 KiB |
@@ -274,7 +274,7 @@ function App(): JSX.Element {
|
||||
chat_settings: {
|
||||
app_id: process.env.PYLON_APP_ID,
|
||||
email: user.email,
|
||||
name: user.displayName,
|
||||
name: user.displayName || user.email,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance as axios } from 'api';
|
||||
import { LogEventAxiosInstance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
|
||||
import { getFieldKeys } from '../getFieldKeys';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
get: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('getFieldKeys API', () => {
|
||||
@@ -31,33 +29,33 @@ describe('getFieldKeys API', () => {
|
||||
|
||||
it('should call API with correct parameters when no args provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with no parameters
|
||||
await getFieldKeys();
|
||||
|
||||
// Verify API was called correctly with empty params object
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with signal parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with signal parameter
|
||||
await getFieldKeys('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with name parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -72,14 +70,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys(undefined, 'service');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with both signal and name when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -94,14 +92,14 @@ describe('getFieldKeys API', () => {
|
||||
await getFieldKeys('logs', 'service');
|
||||
|
||||
// Verify API was called with both parameters
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'logs', name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should return properly formatted response', async () => {
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldKeys('traces');
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
|
||||
import { getFieldValues } from '../getFieldValues';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
get: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('getFieldValues API', () => {
|
||||
@@ -17,7 +15,7 @@ describe('getFieldValues API', () => {
|
||||
|
||||
it('should call the API with correct parameters (no options)', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -34,14 +32,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues();
|
||||
|
||||
// Verify API was called correctly with empty params
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with signal parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -58,14 +56,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with name parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -82,14 +80,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with value parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -106,14 +104,14 @@ describe('getFieldValues API', () => {
|
||||
await getFieldValues(undefined, 'service.name', 'front');
|
||||
|
||||
// Verify API was called with value parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name', searchText: 'front' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with time range parameters', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
@@ -138,7 +136,7 @@ describe('getFieldValues API', () => {
|
||||
);
|
||||
|
||||
// Verify API was called with time range parameters (converted to milliseconds)
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {
|
||||
signal: 'logs',
|
||||
name: 'service.name',
|
||||
@@ -165,7 +163,7 @@ describe('getFieldValues API', () => {
|
||||
},
|
||||
};
|
||||
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'mixed.values');
|
||||
@@ -196,7 +194,7 @@ describe('getFieldValues API', () => {
|
||||
};
|
||||
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
(axios.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'service.name');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -24,7 +24,7 @@ export const getFieldKeys = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/keys', { params });
|
||||
const response = await axios.get('/fields/keys', { params });
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -47,7 +47,7 @@ export const getFieldValues = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/values', { params });
|
||||
const response = await axios.get('/fields/values', { params });
|
||||
|
||||
// Normalize values from different types (stringValues, boolValues, etc.)
|
||||
if (response.data?.data?.values) {
|
||||
|
||||
@@ -86,8 +86,9 @@ const interceptorRejected = async (
|
||||
|
||||
if (
|
||||
response.status === 401 &&
|
||||
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
|
||||
response.config.url !== '/sessions/rotate' &&
|
||||
response.config.url !== '/sessions/email_password' &&
|
||||
!(
|
||||
response.config.url === '/sessions' && response.config.method === 'delete'
|
||||
)
|
||||
@@ -199,15 +200,15 @@ ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios Base
|
||||
export const ApiBaseInstance = axios.create({
|
||||
export const LogEventAxiosInstance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
});
|
||||
|
||||
ApiBaseInstance.interceptors.response.use(
|
||||
LogEventAxiosInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejectedBase,
|
||||
);
|
||||
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
|
||||
@@ -17,7 +17,7 @@ export const getHostAttributeKeys = async (
|
||||
try {
|
||||
const response: AxiosResponse<{
|
||||
data: IQueryAutocompleteResponse;
|
||||
}> = await ApiBaseInstance.get(
|
||||
}> = await axios.get(
|
||||
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
|
||||
{
|
||||
params: {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
@@ -20,7 +20,7 @@ const getOnboardingStatus = async (props: {
|
||||
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
||||
const { endpointService, ...rest } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
||||
rest,
|
||||
);
|
||||
|
||||
@@ -1,13 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getService';
|
||||
|
||||
const getService = async (props: Props): Promise<PayloadProps> => {
|
||||
const response = await axios.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data;
|
||||
try {
|
||||
const response = await ApiV2Instance.post(`/services`, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getService;
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
import axios from 'api';
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
|
||||
|
||||
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
try {
|
||||
const endpoint = props.isEntryPoint
|
||||
? '/service/entry_point_operations'
|
||||
: '/service/top_operations';
|
||||
|
||||
const response = await axios.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
});
|
||||
const response = await ApiV2Instance.post(endpoint, {
|
||||
start: `${props.start}`,
|
||||
end: `${props.end}`,
|
||||
service: props.service,
|
||||
tags: props.selectedTags,
|
||||
limit: 5000,
|
||||
});
|
||||
|
||||
if (props.isEntryPoint) {
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export default getTopOperations;
|
||||
|
||||
@@ -9,6 +9,7 @@ export interface UpdateMetricMetadataProps {
|
||||
metricType: MetricType;
|
||||
temporality?: Temporality;
|
||||
isMonotonic?: boolean;
|
||||
unit?: string;
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataResponse {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
@@ -9,7 +9,7 @@ const getCustomFilters = async (
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
const { signal } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
|
||||
const response = await axios.get(`/orgs/me/filters/${signal}`);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
|
||||
@@ -6,7 +6,7 @@ import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFil
|
||||
const updateCustomFiltersAPI = async (
|
||||
props: UpdateCustomFiltersProps,
|
||||
): Promise<SuccessResponse<void> | AxiosError> =>
|
||||
ApiBaseInstance.put(`orgs/me/filters`, {
|
||||
axios.put(`/orgs/me/filters`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ const setRetentionV2 = async ({
|
||||
type,
|
||||
defaultTTLDays,
|
||||
coldStorageVolume,
|
||||
coldStorageDuration,
|
||||
coldStorageDurationDays,
|
||||
ttlConditions,
|
||||
}: PropsV2): Promise<SuccessResponseV2<PayloadPropsV2>> => {
|
||||
try {
|
||||
@@ -16,7 +16,7 @@ const setRetentionV2 = async ({
|
||||
type,
|
||||
defaultTTLDays,
|
||||
coldStorageVolume,
|
||||
coldStorageDuration,
|
||||
coldStorageDurationDays,
|
||||
ttlConditions,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
@@ -9,15 +9,12 @@ const listOverview = async (
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
const { start, end, show_ip: showIp, filter } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
`/third-party-apis/overview/list`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
},
|
||||
);
|
||||
const response = await axios.post(`/third-party-apis/overview/list`, {
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
28
frontend/src/api/trace/getSpanPercentiles.ts
Normal file
28
frontend/src/api/trace/getSpanPercentiles.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetSpanPercentilesProps,
|
||||
GetSpanPercentilesResponseDataProps,
|
||||
} from 'types/api/trace/getSpanPercentiles';
|
||||
|
||||
const getSpanPercentiles = async (
|
||||
props: GetSpanPercentilesProps,
|
||||
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
|
||||
try {
|
||||
const response = await axios.post('/span_percentile', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export default getSpanPercentiles;
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
export const getQueryRangeV5 = async (
|
||||
props: QueryRangePayloadV5,
|
||||
version: string,
|
||||
signal: AbortSignal,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
|
||||
try {
|
||||
|
||||
371
frontend/src/components/Graph/__tests__/yAxisConfig.test.ts
Normal file
371
frontend/src/components/Graph/__tests__/yAxisConfig.test.ts
Normal file
@@ -0,0 +1,371 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { getYAxisFormattedValue, PrecisionOptionsEnum } from '../yAxisConfig';
|
||||
|
||||
const testFullPrecisionGetYAxisFormattedValue = (
|
||||
value: string,
|
||||
format: string,
|
||||
): string => getYAxisFormattedValue(value, format, PrecisionOptionsEnum.FULL);
|
||||
|
||||
describe('getYAxisFormattedValue - none (full precision legacy assertions)', () => {
|
||||
test('large integers and decimals', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('250034', 'none')).toBe(
|
||||
'250034',
|
||||
);
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('250034897.12345', 'none'),
|
||||
).toBe('250034897.12345');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('250034897.02354', 'none'),
|
||||
).toBe('250034897.02354');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('9999999.9999', 'none')).toBe(
|
||||
'9999999.9999',
|
||||
);
|
||||
});
|
||||
|
||||
test('preserves leading zeros after decimal until first non-zero', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.0000234', 'none')).toBe(
|
||||
'1.0000234',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.00003', 'none')).toBe(
|
||||
'0.00003',
|
||||
);
|
||||
});
|
||||
|
||||
test('trims to three significant decimals and removes trailing zeros', () => {
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'none'),
|
||||
).toBe('0.000000250034');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'none')).toBe(
|
||||
'0.00000025',
|
||||
);
|
||||
|
||||
// Big precision, limiting the javascript precision (~16 digits)
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'none'),
|
||||
).toBe('1');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'none'),
|
||||
).toBe('1.005555555595958');
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
|
||||
'0.000000001',
|
||||
);
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000250000', 'none'),
|
||||
).toBe('0.00000025');
|
||||
});
|
||||
|
||||
test('whole numbers normalize', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'none')).toBe('1000');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('99.5458', 'none')).toBe(
|
||||
'99.5458',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.234567', 'none')).toBe(
|
||||
'1.234567',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('99.998', 'none')).toBe(
|
||||
'99.998',
|
||||
);
|
||||
});
|
||||
|
||||
test('strip redundant decimal zeros', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000.000', 'none')).toBe(
|
||||
'1000',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('99.500', 'none')).toBe(
|
||||
'99.5',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.000', 'none')).toBe('1');
|
||||
});
|
||||
|
||||
test('edge values', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'none')).toBe('0');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-0', 'none')).toBe('0');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'none')).toBe('∞');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-Infinity', 'none')).toBe(
|
||||
'-∞',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('invalid', 'none')).toBe(
|
||||
'NaN',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('', 'none')).toBe('NaN');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('abc123', 'none')).toBe('NaN');
|
||||
});
|
||||
|
||||
test('small decimals keep precision as-is', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'none')).toBe(
|
||||
'0.0001',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-0.0001', 'none')).toBe(
|
||||
'-0.0001',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
|
||||
'0.000000001',
|
||||
);
|
||||
});
|
||||
|
||||
test('simple decimals preserved', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.1', 'none')).toBe('0.1');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.2', 'none')).toBe('0.2');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.3', 'none')).toBe('0.3');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.0000000001', 'none')).toBe(
|
||||
'1.0000000001',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getYAxisFormattedValue - units (full precision legacy assertions)', () => {
|
||||
test('ms', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'ms')).toBe('1.5 s');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('500', 'ms')).toBe('500 ms');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('60000', 'ms')).toBe('1 min');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('295.429', 'ms')).toBe(
|
||||
'295.429 ms',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('4353.81', 'ms')).toBe(
|
||||
'4.35381 s',
|
||||
);
|
||||
});
|
||||
|
||||
test('s', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('90', 's')).toBe('1.5 mins');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('30', 's')).toBe('30 s');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('3600', 's')).toBe('1 hour');
|
||||
});
|
||||
|
||||
test('m', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('90', 'm')).toBe('1.5 hours');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('30', 'm')).toBe('30 min');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1440', 'm')).toBe('1 day');
|
||||
});
|
||||
|
||||
test('bytes', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'bytes')).toBe(
|
||||
'1 KiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('512', 'bytes')).toBe('512 B');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'bytes')).toBe(
|
||||
'1.5 KiB',
|
||||
);
|
||||
});
|
||||
|
||||
test('mbytes', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'mbytes')).toBe(
|
||||
'1 GiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('512', 'mbytes')).toBe(
|
||||
'512 MiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'mbytes')).toBe(
|
||||
'1.5 GiB',
|
||||
);
|
||||
});
|
||||
|
||||
test('kbytes', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'kbytes')).toBe(
|
||||
'1 MiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('512', 'kbytes')).toBe(
|
||||
'512 KiB',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'kbytes')).toBe(
|
||||
'1.5 MiB',
|
||||
);
|
||||
});
|
||||
|
||||
test('short', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'short')).toBe('1 K');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'short')).toBe(
|
||||
'1.5 K',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('999', 'short')).toBe('999');
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000000', 'short')).toBe(
|
||||
'1 Mil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1555600', 'short')).toBe(
|
||||
'1.5556 Mil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('999999', 'short')).toBe(
|
||||
'999.999 K',
|
||||
);
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1000000000', 'short')).toBe(
|
||||
'1 Bil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1500000000', 'short')).toBe(
|
||||
'1.5 Bil',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('999999999', 'short')).toBe(
|
||||
'999.999999 Mil',
|
||||
);
|
||||
});
|
||||
|
||||
test('percent', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.15', 'percent')).toBe(
|
||||
'0.15%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.1234', 'percent')).toBe(
|
||||
'0.1234%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.123499', 'percent')).toBe(
|
||||
'0.123499%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.5', 'percent')).toBe(
|
||||
'1.5%',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'percent')).toBe(
|
||||
'0.0001%',
|
||||
);
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000001', 'percent'),
|
||||
).toBe('1e-9%');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'percent'),
|
||||
).toBe('0.000000250034%');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'percent')).toBe(
|
||||
'0.00000025%',
|
||||
);
|
||||
// Big precision, limiting the javascript precision (~16 digits)
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'percent'),
|
||||
).toBe('1%');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'percent'),
|
||||
).toBe('1.005555555595958%');
|
||||
});
|
||||
|
||||
test('ratio', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0.5', 'ratio')).toBe(
|
||||
'0.5 ratio',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('1.25', 'ratio')).toBe(
|
||||
'1.25 ratio',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('2.0', 'ratio')).toBe(
|
||||
'2 ratio',
|
||||
);
|
||||
});
|
||||
|
||||
test('temperature units', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('25', 'celsius')).toBe(
|
||||
'25 °C',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'celsius')).toBe('0 °C');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-10', 'celsius')).toBe(
|
||||
'-10 °C',
|
||||
);
|
||||
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('77', 'fahrenheit')).toBe(
|
||||
'77 °F',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('32', 'fahrenheit')).toBe(
|
||||
'32 °F',
|
||||
);
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('14', 'fahrenheit')).toBe(
|
||||
'14 °F',
|
||||
);
|
||||
});
|
||||
|
||||
test('ms edge cases', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'ms')).toBe('0 ms');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-1500', 'ms')).toBe('-1.5 s');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'ms')).toBe('∞');
|
||||
});
|
||||
|
||||
test('bytes edge cases', () => {
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('0', 'bytes')).toBe('0 B');
|
||||
expect(testFullPrecisionGetYAxisFormattedValue('-1024', 'bytes')).toBe(
|
||||
'-1 KiB',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getYAxisFormattedValue - precision option tests', () => {
|
||||
test('precision 0 drops decimal part', () => {
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 0)).toBe('1');
|
||||
expect(getYAxisFormattedValue('0.9999', 'none', 0)).toBe('0');
|
||||
expect(getYAxisFormattedValue('12345.6789', 'none', 0)).toBe('12345');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 0)).toBe('0');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 0)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 0)).toBe('0');
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 0)).toBe('1');
|
||||
|
||||
// with unit
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 0)).toBe('4 s');
|
||||
});
|
||||
test('precision 1,2,3,4 decimals', () => {
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 1)).toBe('1.2');
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 2)).toBe('1.23');
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 3)).toBe('1.234');
|
||||
expect(getYAxisFormattedValue('1.2345', 'none', 4)).toBe('1.2345');
|
||||
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 1)).toBe('0.00001');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 2)).toBe('0.000012');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 3)).toBe('0.0000123');
|
||||
expect(getYAxisFormattedValue('0.0000123456', 'none', 4)).toBe('0.00001234');
|
||||
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 1)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 2)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 3)).toBe('1000');
|
||||
expect(getYAxisFormattedValue('1000.000', 'none', 4)).toBe('1000');
|
||||
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 1)).toBe('0.0000002');
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 2)).toBe(
|
||||
'0.00000025',
|
||||
); // leading zeros + 2 significant => same trimmed
|
||||
expect(getYAxisFormattedValue('0.000000250034', 'none', 3)).toBe(
|
||||
'0.00000025',
|
||||
);
|
||||
expect(getYAxisFormattedValue('0.000000250304', 'none', 4)).toBe(
|
||||
'0.0000002503',
|
||||
);
|
||||
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 1)).toBe(
|
||||
'1.005',
|
||||
);
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 2)).toBe(
|
||||
'1.0055',
|
||||
);
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 3)).toBe(
|
||||
'1.00555',
|
||||
);
|
||||
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 4)).toBe(
|
||||
'1.005555',
|
||||
);
|
||||
|
||||
// with unit
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 1)).toBe('4.4 s');
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 2)).toBe('4.35 s');
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 3)).toBe('4.354 s');
|
||||
expect(getYAxisFormattedValue('4353.81', 'ms', 4)).toBe('4.3538 s');
|
||||
|
||||
// Percentages
|
||||
expect(getYAxisFormattedValue('0.123456', 'percent', 2)).toBe('0.12%');
|
||||
expect(getYAxisFormattedValue('0.123456', 'percent', 4)).toBe('0.1235%'); // approximation
|
||||
});
|
||||
|
||||
test('precision full uses up to DEFAULT_SIGNIFICANT_DIGITS significant digits', () => {
|
||||
expect(
|
||||
getYAxisFormattedValue(
|
||||
'0.00002625429914148441',
|
||||
'none',
|
||||
PrecisionOptionsEnum.FULL,
|
||||
),
|
||||
).toBe('0.000026254299141');
|
||||
expect(
|
||||
getYAxisFormattedValue(
|
||||
'0.000026254299141484417',
|
||||
's',
|
||||
PrecisionOptionsEnum.FULL,
|
||||
),
|
||||
).toBe('26254299141484417000000 µs');
|
||||
|
||||
expect(
|
||||
getYAxisFormattedValue('4353.81', 'ms', PrecisionOptionsEnum.FULL),
|
||||
).toBe('4.35381 s');
|
||||
expect(getYAxisFormattedValue('500', 'ms', PrecisionOptionsEnum.FULL)).toBe(
|
||||
'500 ms',
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,58 +1,158 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { formattedValueToString, getValueFormat } from '@grafana/data';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { isNaN } from 'lodash-es';
|
||||
|
||||
const DEFAULT_SIGNIFICANT_DIGITS = 15;
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const MAX_DECIMALS = 15;
|
||||
|
||||
export enum PrecisionOptionsEnum {
|
||||
ZERO = 0,
|
||||
ONE = 1,
|
||||
TWO = 2,
|
||||
THREE = 3,
|
||||
FOUR = 4,
|
||||
FULL = 'full',
|
||||
}
|
||||
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
|
||||
|
||||
/**
|
||||
* Formats a number for display, preserving leading zeros after the decimal point
|
||||
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
|
||||
* It avoids scientific notation and removes unnecessary trailing zeros.
|
||||
*
|
||||
* @example
|
||||
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
|
||||
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
|
||||
* formatDecimalWithLeadingZeros(5.0); // "5"
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted string.
|
||||
*/
|
||||
const formatDecimalWithLeadingZeros = (
|
||||
value: number,
|
||||
precision: PrecisionOption,
|
||||
): string => {
|
||||
if (value === 0) {
|
||||
return '0';
|
||||
}
|
||||
|
||||
// Use toLocaleString to get a full decimal representation without scientific notation.
|
||||
const numStr = value.toLocaleString('en-US', {
|
||||
useGrouping: false,
|
||||
maximumFractionDigits: 20,
|
||||
});
|
||||
|
||||
const [integerPart, decimalPart = ''] = numStr.split('.');
|
||||
|
||||
// If there's no decimal part, the integer part is the result.
|
||||
if (!decimalPart) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Find the index of the first non-zero digit in the decimal part.
|
||||
const firstNonZeroIndex = decimalPart.search(/[^0]/);
|
||||
|
||||
// If the decimal part consists only of zeros, return just the integer part.
|
||||
if (firstNonZeroIndex === -1) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
|
||||
const significantDigits =
|
||||
precision === PrecisionOptionsEnum.FULL
|
||||
? DEFAULT_SIGNIFICANT_DIGITS
|
||||
: precision;
|
||||
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
|
||||
|
||||
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
|
||||
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
|
||||
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
|
||||
|
||||
// If precision is 0, we drop the decimal part entirely.
|
||||
if (precision === 0) {
|
||||
return integerPart;
|
||||
}
|
||||
|
||||
// Remove any trailing zeros from the result to keep it clean.
|
||||
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
|
||||
|
||||
// Return the integer part, or the integer and decimal parts combined.
|
||||
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats a Y-axis value based on a given format string.
|
||||
*
|
||||
* @param value The string value from the axis.
|
||||
* @param format The format identifier (e.g. 'none', 'ms', 'bytes', 'short').
|
||||
* @returns A formatted string ready for display.
|
||||
*/
|
||||
export const getYAxisFormattedValue = (
|
||||
value: string,
|
||||
format: string,
|
||||
precision: PrecisionOption = 2, // default precision requested
|
||||
): string => {
|
||||
let decimalPrecision: number | undefined;
|
||||
const parsedValue = getValueFormat(format)(
|
||||
parseFloat(value),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
try {
|
||||
const decimalSplitted = parsedValue.text.split('.');
|
||||
if (decimalSplitted.length === 1) {
|
||||
decimalPrecision = 0;
|
||||
} else {
|
||||
const decimalDigits = decimalSplitted[1].split('');
|
||||
decimalPrecision = decimalDigits.length;
|
||||
let nonZeroCtr = 0;
|
||||
for (let idx = 0; idx < decimalDigits.length; idx += 1) {
|
||||
if (decimalDigits[idx] !== '0') {
|
||||
nonZeroCtr += 1;
|
||||
if (nonZeroCtr >= 2) {
|
||||
decimalPrecision = idx + 1;
|
||||
}
|
||||
} else if (nonZeroCtr) {
|
||||
decimalPrecision = idx;
|
||||
break;
|
||||
}
|
||||
}
|
||||
const numValue = parseFloat(value);
|
||||
|
||||
// Handle non-numeric or special values first.
|
||||
if (isNaN(numValue)) return 'NaN';
|
||||
if (numValue === Infinity) return '∞';
|
||||
if (numValue === -Infinity) return '-∞';
|
||||
|
||||
const decimalPlaces = value.split('.')[1]?.length || undefined;
|
||||
|
||||
// Use custom formatter for the 'none' format honoring precision
|
||||
if (format === 'none') {
|
||||
return formatDecimalWithLeadingZeros(numValue, precision);
|
||||
}
|
||||
|
||||
// For all other standard formats, delegate to grafana/data's built-in formatter.
|
||||
const computeDecimals = (): number | undefined => {
|
||||
if (precision === PrecisionOptionsEnum.FULL) {
|
||||
return decimalPlaces && decimalPlaces >= DEFAULT_SIGNIFICANT_DIGITS
|
||||
? decimalPlaces
|
||||
: DEFAULT_SIGNIFICANT_DIGITS;
|
||||
}
|
||||
return precision;
|
||||
};
|
||||
|
||||
return formattedValueToString(
|
||||
getValueFormat(format)(
|
||||
parseFloat(value),
|
||||
decimalPrecision,
|
||||
undefined,
|
||||
undefined,
|
||||
),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
return `${parseFloat(value)}`;
|
||||
};
|
||||
const fallbackFormat = (): string => {
|
||||
if (precision === PrecisionOptionsEnum.FULL) return numValue.toString();
|
||||
if (precision === 0) return Math.round(numValue).toString();
|
||||
return precision !== undefined
|
||||
? numValue
|
||||
.toFixed(precision)
|
||||
.replace(/(\.[0-9]*[1-9])0+$/, '$1') // trimming zeros
|
||||
.replace(/\.$/, '')
|
||||
: numValue.toString();
|
||||
};
|
||||
|
||||
export const getToolTipValue = (value: string, format?: string): string => {
|
||||
try {
|
||||
return formattedValueToString(
|
||||
getValueFormat(format)(parseFloat(value), undefined, undefined, undefined),
|
||||
);
|
||||
const formatter = getValueFormat(format);
|
||||
const formattedValue = formatter(numValue, computeDecimals(), undefined);
|
||||
if (formattedValue.text && formattedValue.text.includes('.')) {
|
||||
formattedValue.text = formatDecimalWithLeadingZeros(
|
||||
parseFloat(formattedValue.text),
|
||||
precision,
|
||||
);
|
||||
}
|
||||
return formattedValueToString(formattedValue);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
Sentry.captureEvent({
|
||||
message: `Error applying formatter: ${
|
||||
error instanceof Error ? error.message : 'Unknown error'
|
||||
}`,
|
||||
level: 'error',
|
||||
});
|
||||
return fallbackFormat();
|
||||
}
|
||||
return `${value}`;
|
||||
};
|
||||
|
||||
export const getToolTipValue = (
|
||||
value: string | number,
|
||||
format?: string,
|
||||
precision?: PrecisionOption,
|
||||
): string =>
|
||||
getYAxisFormattedValue(value?.toString(), format || 'none', precision);
|
||||
|
||||
@@ -60,6 +60,14 @@ function Metrics({
|
||||
setElement,
|
||||
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
|
||||
|
||||
const legendScrollPositionRef = useRef<{
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}>({
|
||||
scrollTop: 0,
|
||||
scrollLeft: 0,
|
||||
});
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getHostQueryPayload(
|
||||
@@ -147,6 +155,13 @@ function Metrics({
|
||||
maxTimeScale: graphTimeIntervals[idx].end,
|
||||
onDragSelect: (start, end) => onDragSelect(start, end, idx),
|
||||
query: currentQuery,
|
||||
legendScrollPosition: legendScrollPositionRef.current,
|
||||
setLegendScrollPosition: (position: {
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}) => {
|
||||
legendScrollPositionRef.current = position;
|
||||
},
|
||||
}),
|
||||
),
|
||||
[
|
||||
|
||||
@@ -132,9 +132,9 @@
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.json-action-btn {
|
||||
.log-detail-drawer__actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
gap: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -319,31 +319,35 @@ function LogDetailInner({
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
{selectedView === VIEW_TYPES.JSON && (
|
||||
<div className="json-action-btn">
|
||||
<div className="log-detail-drawer__actions">
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Tooltip
|
||||
title="Show Filters"
|
||||
placement="topLeft"
|
||||
aria-label="Show Filters"
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<Tooltip
|
||||
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
|
||||
placement="topLeft"
|
||||
aria-label={
|
||||
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={handleJSONCopy}
|
||||
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Tooltip title="Copy Log Link" placement="left" aria-label="Copy Log Link">
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={onLogCopy}
|
||||
/>
|
||||
</Tooltip>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
@@ -383,7 +387,8 @@ function LogDetailInner({
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
logLineTimestamp={log.timestamp.toString()}
|
||||
timestamp={log.timestamp.toString()}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
)}
|
||||
</Drawer>
|
||||
|
||||
@@ -57,8 +57,8 @@ export const RawLogViewContainer = styled(Row)<{
|
||||
transition: background-color 2s ease-in;`
|
||||
: ''}
|
||||
|
||||
${({ $isCustomHighlighted, $isDarkMode, $logType }): string =>
|
||||
getCustomHighlightBackground($isCustomHighlighted, $isDarkMode, $logType)}
|
||||
${({ $isCustomHighlighted }): string =>
|
||||
getCustomHighlightBackground($isCustomHighlighted)}
|
||||
`;
|
||||
|
||||
export const InfoIconWrapper = styled(Info)`
|
||||
|
||||
@@ -153,7 +153,9 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
children: (
|
||||
<TableBodyContent
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: getSanitizedLogBody(field as string),
|
||||
__html: getSanitizedLogBody(field as string, {
|
||||
shouldEscapeHtml: true,
|
||||
}),
|
||||
}}
|
||||
fontSize={fontSize}
|
||||
linesPerRow={linesPerRow}
|
||||
|
||||
@@ -32,6 +32,7 @@ import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { CustomMultiSelectProps, CustomTagProps, OptionData } from './types';
|
||||
import {
|
||||
ALL_SELECTED_VALUE,
|
||||
filterOptionsBySearch,
|
||||
handleScrollToBottom,
|
||||
prioritizeOrAddOptionForMultiSelect,
|
||||
@@ -43,8 +44,6 @@ enum ToggleTagValue {
|
||||
All = 'All',
|
||||
}
|
||||
|
||||
const ALL_SELECTED_VALUE = '__ALL__'; // Constant for the special value
|
||||
|
||||
const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
|
||||
placeholder = 'Search...',
|
||||
className,
|
||||
|
||||
@@ -5,6 +5,8 @@ import { OptionData } from './types';
|
||||
|
||||
export const SPACEKEY = ' ';
|
||||
|
||||
export const ALL_SELECTED_VALUE = '__ALL__'; // Constant for the special value
|
||||
|
||||
export const prioritizeOrAddOptionForSingleSelect = (
|
||||
options: OptionData[],
|
||||
value: string,
|
||||
|
||||
@@ -398,7 +398,7 @@
|
||||
}
|
||||
|
||||
.qb-search-container {
|
||||
.metrics-select-container {
|
||||
.metrics-container {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,8 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
showOnlyWhereClause = false,
|
||||
showTraceOperator = false,
|
||||
version,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: QueryBuilderProps): JSX.Element {
|
||||
const {
|
||||
currentQuery,
|
||||
@@ -175,6 +177,8 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
/>
|
||||
) : (
|
||||
currentQuery.builder.queryData.map((query, index) => (
|
||||
@@ -193,7 +197,9 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
signalSource={config?.signalSource || ''}
|
||||
signalSource={query.source as 'meter' | ''}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
|
||||
@@ -1,5 +1,14 @@
|
||||
.metrics-select-container {
|
||||
.metrics-source-select-container {
|
||||
margin-bottom: 8px;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
|
||||
.source-selector {
|
||||
width: 120px;
|
||||
}
|
||||
|
||||
.ant-select-selector {
|
||||
width: 100%;
|
||||
@@ -42,7 +51,7 @@
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.metrics-select-container {
|
||||
.metrics-source-select-container {
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
@@ -1,34 +1,121 @@
|
||||
import './MetricsSelect.styles.scss';
|
||||
|
||||
import { Select } from 'antd';
|
||||
import {
|
||||
initialQueriesMap,
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { AggregatorFilter } from 'container/QueryBuilder/filters';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { memo } from 'react';
|
||||
import { memo, useCallback, useMemo, useState } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { SelectOption } from 'types/common/select';
|
||||
|
||||
export const SOURCE_OPTIONS: SelectOption<string, string>[] = [
|
||||
{ value: 'metrics', label: 'Metrics' },
|
||||
{ value: 'meter', label: 'Meter' },
|
||||
];
|
||||
|
||||
export const MetricsSelect = memo(function MetricsSelect({
|
||||
query,
|
||||
index,
|
||||
version,
|
||||
signalSource,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
index: number;
|
||||
version: string;
|
||||
signalSource: 'meter' | '';
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
}): JSX.Element {
|
||||
const [attributeKeys, setAttributeKeys] = useState<BaseAutocompleteData[]>([]);
|
||||
|
||||
const { handleChangeAggregatorAttribute } = useQueryOperations({
|
||||
index,
|
||||
query,
|
||||
entityVersion: version,
|
||||
});
|
||||
|
||||
const handleAggregatorAttributeChange = useCallback(
|
||||
(value: BaseAutocompleteData, isEditMode?: boolean) => {
|
||||
handleChangeAggregatorAttribute(value, isEditMode, attributeKeys || []);
|
||||
},
|
||||
[handleChangeAggregatorAttribute, attributeKeys],
|
||||
);
|
||||
|
||||
const { updateAllQueriesOperators, handleSetQueryData } = useQueryBuilder();
|
||||
|
||||
const source = useMemo(
|
||||
() => (signalSource === 'meter' ? 'meter' : 'metrics'),
|
||||
[signalSource],
|
||||
);
|
||||
|
||||
const defaultMeterQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'meter' as 'meter' | '',
|
||||
),
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const defaultMetricsQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueriesMap.metrics,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'',
|
||||
),
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const handleSignalSourceChange = (value: string): void => {
|
||||
onSignalSourceChange(value);
|
||||
handleSetQueryData(
|
||||
index,
|
||||
value === 'meter'
|
||||
? {
|
||||
...defaultMeterQuery.builder.queryData[0],
|
||||
source: 'meter',
|
||||
queryName: query.queryName,
|
||||
}
|
||||
: {
|
||||
...defaultMetricsQuery.builder.queryData[0],
|
||||
source: '',
|
||||
queryName: query.queryName,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="metrics-select-container">
|
||||
<div className="metrics-source-select-container">
|
||||
{signalSourceChangeEnabled && (
|
||||
<Select
|
||||
className="source-selector"
|
||||
placeholder="Source"
|
||||
options={SOURCE_OPTIONS}
|
||||
value={source}
|
||||
defaultValue="metrics"
|
||||
onChange={handleSignalSourceChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
<AggregatorFilter
|
||||
onChange={handleChangeAggregatorAttribute}
|
||||
onChange={handleAggregatorAttributeChange}
|
||||
query={query}
|
||||
index={index}
|
||||
signalSource={signalSource || ''}
|
||||
setAttributeKeys={setAttributeKeys}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -500,7 +500,10 @@ function QueryAddOns({
|
||||
}
|
||||
value={addOn}
|
||||
>
|
||||
<div className="add-on-tab-title">
|
||||
<div
|
||||
className="add-on-tab-title"
|
||||
data-testid={`query-add-on-${addOn.key}`}
|
||||
>
|
||||
{addOn.icon}
|
||||
{addOn.label}
|
||||
</div>
|
||||
|
||||
@@ -33,7 +33,13 @@ export const QueryV2 = memo(function QueryV2({
|
||||
showOnlyWhereClause = false,
|
||||
signalSource = '',
|
||||
isMultiQueryAllowed = false,
|
||||
}: QueryProps & { ref: React.RefObject<HTMLDivElement> }): JSX.Element {
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: QueryProps & {
|
||||
ref: React.RefObject<HTMLDivElement>;
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
}): JSX.Element {
|
||||
const { cloneQuery, panelType } = useQueryBuilder();
|
||||
|
||||
const showFunctions = query?.functions?.length > 0;
|
||||
@@ -207,12 +213,14 @@ export const QueryV2 = memo(function QueryV2({
|
||||
<div className="qb-elements-container">
|
||||
<div className="qb-search-container">
|
||||
{dataSource === DataSource.METRICS && (
|
||||
<div className="metrics-select-container">
|
||||
<div className="metrics-container">
|
||||
<MetricsSelect
|
||||
query={query}
|
||||
index={index}
|
||||
version={ENTITY_VERSION_V5}
|
||||
signalSource={signalSource as 'meter' | ''}
|
||||
onSignalSourceChange={onSignalSourceChange}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -258,7 +266,7 @@ export const QueryV2 = memo(function QueryV2({
|
||||
panelType={panelType}
|
||||
query={query}
|
||||
index={index}
|
||||
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}`}
|
||||
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}-${signalSource}`}
|
||||
version="v4"
|
||||
signalSource={signalSource as 'meter' | ''}
|
||||
/>
|
||||
|
||||
@@ -224,7 +224,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
||||
const visitedPairs: Set<string> = new Set(); // Set to track visited query pairs
|
||||
|
||||
// Map extracted query pairs to key-specific pair information for faster access
|
||||
let queryPairsMap = getQueryPairsMap(existingQuery.trim());
|
||||
let queryPairsMap = getQueryPairsMap(existingQuery);
|
||||
|
||||
filters?.items?.forEach((filter) => {
|
||||
const { key, op, value } = filter;
|
||||
@@ -309,7 +309,7 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
||||
)}${OPERATORS.IN} ${formattedValue} ${modifiedQuery.slice(
|
||||
notInPair.position.valueEnd + 1,
|
||||
)}`;
|
||||
queryPairsMap = getQueryPairsMap(modifiedQuery.trim());
|
||||
queryPairsMap = getQueryPairsMap(modifiedQuery);
|
||||
}
|
||||
shouldAddToNonExisting = false; // Don't add this to non-existing filters
|
||||
} else if (
|
||||
|
||||
@@ -45,6 +45,12 @@
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
.filter-separator {
|
||||
height: 1px;
|
||||
background-color: var(--bg-slate-400);
|
||||
margin: 4px 0;
|
||||
}
|
||||
|
||||
.value {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -177,6 +183,12 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.values {
|
||||
.filter-separator {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,191 @@
|
||||
import { FiltersType, QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useGetAggregateValues } from 'hooks/queryBuilder/useGetAggregateValues';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQueryKeyValueSuggestions';
|
||||
import { quickFiltersAttributeValuesResponse } from 'mocks-server/__mockdata__/customQuickFilters';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import CheckboxFilter from './Checkbox';
|
||||
|
||||
// Mock the query builder hook
|
||||
jest.mock('hooks/queryBuilder/useQueryBuilder');
|
||||
const mockUseQueryBuilder = jest.mocked(useQueryBuilder);
|
||||
|
||||
// Mock the aggregate values hook
|
||||
jest.mock('hooks/queryBuilder/useGetAggregateValues');
|
||||
|
||||
const mockUseGetAggregateValues = jest.mocked(useGetAggregateValues);
|
||||
|
||||
// Mock the key value suggestions hook
|
||||
jest.mock('hooks/querySuggestions/useGetQueryKeyValueSuggestions');
|
||||
|
||||
const mockUseGetQueryKeyValueSuggestions = jest.mocked(
|
||||
useGetQueryKeyValueSuggestions,
|
||||
);
|
||||
|
||||
interface MockFilterConfig {
|
||||
title: string;
|
||||
attributeKey: {
|
||||
key: string;
|
||||
dataType: DataTypes;
|
||||
type: string;
|
||||
};
|
||||
dataSource: DataSource;
|
||||
defaultOpen: boolean;
|
||||
type: FiltersType;
|
||||
}
|
||||
|
||||
const createMockFilter = (
|
||||
overrides: Partial<MockFilterConfig> = {},
|
||||
): MockFilterConfig => ({
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
title: 'Service Name',
|
||||
attributeKey: {
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
dataSource: DataSource.LOGS,
|
||||
defaultOpen: false,
|
||||
type: FiltersType.CHECKBOX,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createMockQueryBuilderData = (hasActiveFilters = false): any => ({
|
||||
lastUsedQuery: 0,
|
||||
currentQuery: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
filters: {
|
||||
items: hasActiveFilters
|
||||
? [
|
||||
{
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
op: 'in',
|
||||
value: ['otel-demo', 'sample-flask'],
|
||||
},
|
||||
]
|
||||
: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
redirectWithQueryBuilderData: jest.fn(),
|
||||
});
|
||||
|
||||
describe('CheckboxFilter - User Flows', () => {
|
||||
beforeEach(() => {
|
||||
// Reset all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Default mock implementations using the same structure as existing tests
|
||||
mockUseGetAggregateValues.mockReturnValue({
|
||||
data: {
|
||||
payload: {
|
||||
stringAttributeValues: [
|
||||
'mq-kafka',
|
||||
'otel-demo',
|
||||
'otlp-python',
|
||||
'sample-flask',
|
||||
],
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
} as UseQueryResult<SuccessResponse<IAttributeValuesResponse>>);
|
||||
|
||||
mockUseGetQueryKeyValueSuggestions.mockReturnValue({
|
||||
data: null,
|
||||
isLoading: false,
|
||||
} as any);
|
||||
|
||||
// Setup MSW server for API calls
|
||||
server.use(
|
||||
rest.get('*/api/v3/autocomplete/attribute_values', (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should auto-open filter and prioritize checked items with visual separator when user opens page with active filters', async () => {
|
||||
// Mock query builder with active filters
|
||||
mockUseQueryBuilder.mockReturnValue(createMockQueryBuilderData(true) as any);
|
||||
|
||||
const mockFilter = createMockFilter({ defaultOpen: false });
|
||||
|
||||
render(
|
||||
<CheckboxFilter
|
||||
filter={mockFilter}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
/>,
|
||||
);
|
||||
|
||||
// User should see the filter is automatically opened (not collapsed)
|
||||
expect(screen.getByText('Service Name')).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// User should see visual separator between checked and unchecked items
|
||||
expect(screen.getByTestId('filter-separator')).toBeInTheDocument();
|
||||
|
||||
// User should see checked items at the top
|
||||
await waitFor(() => {
|
||||
const checkboxes = screen.getAllByRole('checkbox');
|
||||
expect(checkboxes).toHaveLength(4); // Ensure we have exactly 4 checkboxes
|
||||
expect(checkboxes[0]).toBeChecked(); // otel-demo should be first and checked
|
||||
expect(checkboxes[1]).toBeChecked(); // sample-flask should be second and checked
|
||||
expect(checkboxes[2]).not.toBeChecked(); // mq-kafka should be unchecked
|
||||
expect(checkboxes[3]).not.toBeChecked(); // otlp-python should be unchecked
|
||||
});
|
||||
});
|
||||
|
||||
it('should respect user preference when user manually toggles filter over auto-open behavior', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
// Mock query builder with active filters
|
||||
mockUseQueryBuilder.mockReturnValue(createMockQueryBuilderData(true) as any);
|
||||
|
||||
const mockFilter = createMockFilter({ defaultOpen: false });
|
||||
|
||||
render(
|
||||
<CheckboxFilter
|
||||
filter={mockFilter}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Initially auto-opened due to active filters
|
||||
await waitFor(() => {
|
||||
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// User manually closes the filter
|
||||
await user.click(screen.getByText('Service Name'));
|
||||
|
||||
// User should see filter is now closed (respecting user preference)
|
||||
expect(
|
||||
screen.queryByPlaceholderText('Filter values'),
|
||||
).not.toBeInTheDocument();
|
||||
|
||||
// User manually opens the filter again
|
||||
await user.click(screen.getByText('Service Name'));
|
||||
|
||||
// User should see filter is now open (respecting user preference)
|
||||
await waitFor(() => {
|
||||
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -21,7 +21,7 @@ import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQue
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { cloneDeep, isArray, isEqual, isFunction } from 'lodash-es';
|
||||
import { ChevronDown, ChevronRight } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Fragment, useMemo, useState } from 'react';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -54,7 +54,8 @@ interface ICheckboxProps {
|
||||
export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
const { source, filter, onFilterChange } = props;
|
||||
const [searchText, setSearchText] = useState<string>('');
|
||||
const [isOpen, setIsOpen] = useState<boolean>(filter.defaultOpen);
|
||||
// null = no user action, true = user opened, false = user closed
|
||||
const [userToggleState, setUserToggleState] = useState<boolean | null>(null);
|
||||
const [visibleItemsCount, setVisibleItemsCount] = useState<number>(10);
|
||||
|
||||
const {
|
||||
@@ -63,6 +64,33 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
redirectWithQueryBuilderData,
|
||||
} = useQueryBuilder();
|
||||
|
||||
// Check if this filter has active filters in the query
|
||||
const isSomeFilterPresentForCurrentAttribute = useMemo(
|
||||
() =>
|
||||
currentQuery.builder.queryData?.[
|
||||
lastUsedQuery || 0
|
||||
]?.filters?.items?.some((item) =>
|
||||
isEqual(item.key?.key, filter.attributeKey.key),
|
||||
),
|
||||
[currentQuery.builder.queryData, lastUsedQuery, filter.attributeKey.key],
|
||||
);
|
||||
|
||||
// Derive isOpen from filter state + user action
|
||||
const isOpen = useMemo(() => {
|
||||
// If user explicitly toggled, respect that
|
||||
if (userToggleState !== null) return userToggleState;
|
||||
|
||||
// Auto-open if this filter has active filters in the query
|
||||
if (isSomeFilterPresentForCurrentAttribute) return true;
|
||||
|
||||
// Otherwise use default behavior (first 2 filters open)
|
||||
return filter.defaultOpen;
|
||||
}, [
|
||||
userToggleState,
|
||||
isSomeFilterPresentForCurrentAttribute,
|
||||
filter.defaultOpen,
|
||||
]);
|
||||
|
||||
const { data, isLoading } = useGetAggregateValues(
|
||||
{
|
||||
aggregateOperator: filter.aggregateOperator || 'noop',
|
||||
@@ -128,8 +156,6 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
);
|
||||
}, [data?.payload, filter.attributeKey.dataType, keyValueSuggestions, source]);
|
||||
|
||||
const currentAttributeKeys = attributeValues.slice(0, visibleItemsCount);
|
||||
|
||||
const setSearchTextDebounced = useDebouncedFn((...args) => {
|
||||
setSearchText(args[0] as string);
|
||||
}, DEBOUNCE_DELAY);
|
||||
@@ -202,6 +228,23 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
const isMultipleValuesTrueForTheKey =
|
||||
Object.values(currentFilterState).filter((val) => val).length > 1;
|
||||
|
||||
// Sort checked items to the top, then unchecked items
|
||||
const currentAttributeKeys = useMemo(() => {
|
||||
const checkedValues = attributeValues.filter(
|
||||
(val) => currentFilterState[val],
|
||||
);
|
||||
const uncheckedValues = attributeValues.filter(
|
||||
(val) => !currentFilterState[val],
|
||||
);
|
||||
return [...checkedValues, ...uncheckedValues].slice(0, visibleItemsCount);
|
||||
}, [attributeValues, currentFilterState, visibleItemsCount]);
|
||||
|
||||
// Count of checked values in the currently visible items
|
||||
const checkedValuesCount = useMemo(
|
||||
() => currentAttributeKeys.filter((val) => currentFilterState[val]).length,
|
||||
[currentAttributeKeys, currentFilterState],
|
||||
);
|
||||
|
||||
const handleClearFilterAttribute = (): void => {
|
||||
const preparedQuery: Query = {
|
||||
...currentQuery,
|
||||
@@ -235,12 +278,6 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
}
|
||||
};
|
||||
|
||||
const isSomeFilterPresentForCurrentAttribute = currentQuery.builder.queryData?.[
|
||||
lastUsedQuery || 0
|
||||
]?.filters?.items?.some((item) =>
|
||||
isEqual(item.key?.key, filter.attributeKey.key),
|
||||
);
|
||||
|
||||
const onChange = (
|
||||
value: string,
|
||||
checked: boolean,
|
||||
@@ -490,10 +527,10 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
className="filter-header-checkbox"
|
||||
onClick={(): void => {
|
||||
if (isOpen) {
|
||||
setIsOpen(false);
|
||||
setUserToggleState(false);
|
||||
setVisibleItemsCount(10);
|
||||
} else {
|
||||
setIsOpen(true);
|
||||
setUserToggleState(true);
|
||||
}
|
||||
}}
|
||||
>
|
||||
@@ -540,50 +577,59 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
)}
|
||||
{attributeValues.length > 0 ? (
|
||||
<section className="values">
|
||||
{currentAttributeKeys.map((value: string) => (
|
||||
<div key={value} className="value">
|
||||
<Checkbox
|
||||
onChange={(e): void => onChange(value, e.target.checked, false)}
|
||||
checked={currentFilterState[value]}
|
||||
disabled={isFilterDisabled}
|
||||
rootClassName="check-box"
|
||||
/>
|
||||
{currentAttributeKeys.map((value: string, index: number) => (
|
||||
<Fragment key={value}>
|
||||
{index === checkedValuesCount && checkedValuesCount > 0 && (
|
||||
<div
|
||||
key="separator"
|
||||
className="filter-separator"
|
||||
data-testid="filter-separator"
|
||||
/>
|
||||
)}
|
||||
<div className="value">
|
||||
<Checkbox
|
||||
onChange={(e): void => onChange(value, e.target.checked, false)}
|
||||
checked={currentFilterState[value]}
|
||||
disabled={isFilterDisabled}
|
||||
rootClassName="check-box"
|
||||
/>
|
||||
|
||||
<div
|
||||
className={cx(
|
||||
'checkbox-value-section',
|
||||
isFilterDisabled ? 'filter-disabled' : '',
|
||||
)}
|
||||
onClick={(): void => {
|
||||
if (isFilterDisabled) {
|
||||
return;
|
||||
}
|
||||
onChange(value, currentFilterState[value], true);
|
||||
}}
|
||||
>
|
||||
<div className={`${filter.title} label-${value}`} />
|
||||
{filter.customRendererForValue ? (
|
||||
filter.customRendererForValue(value)
|
||||
) : (
|
||||
<Typography.Text
|
||||
className="value-string"
|
||||
ellipsis={{ tooltip: { placement: 'right' } }}
|
||||
>
|
||||
{String(value)}
|
||||
</Typography.Text>
|
||||
)}
|
||||
<Button type="text" className="only-btn">
|
||||
{isSomeFilterPresentForCurrentAttribute
|
||||
? currentFilterState[value] && !isMultipleValuesTrueForTheKey
|
||||
? 'All'
|
||||
: 'Only'
|
||||
: 'Only'}
|
||||
</Button>
|
||||
<Button type="text" className="toggle-btn">
|
||||
Toggle
|
||||
</Button>
|
||||
<div
|
||||
className={cx(
|
||||
'checkbox-value-section',
|
||||
isFilterDisabled ? 'filter-disabled' : '',
|
||||
)}
|
||||
onClick={(): void => {
|
||||
if (isFilterDisabled) {
|
||||
return;
|
||||
}
|
||||
onChange(value, currentFilterState[value], true);
|
||||
}}
|
||||
>
|
||||
<div className={`${filter.title} label-${value}`} />
|
||||
{filter.customRendererForValue ? (
|
||||
filter.customRendererForValue(value)
|
||||
) : (
|
||||
<Typography.Text
|
||||
className="value-string"
|
||||
ellipsis={{ tooltip: { placement: 'right' } }}
|
||||
>
|
||||
{String(value)}
|
||||
</Typography.Text>
|
||||
)}
|
||||
<Button type="text" className="only-btn">
|
||||
{isSomeFilterPresentForCurrentAttribute
|
||||
? currentFilterState[value] && !isMultipleValuesTrueForTheKey
|
||||
? 'All'
|
||||
: 'Only'
|
||||
: 'Only'}
|
||||
</Button>
|
||||
<Button type="text" className="toggle-btn">
|
||||
Toggle
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Fragment>
|
||||
))}
|
||||
</section>
|
||||
) : isEmptyStateWithDocsEnabled ? (
|
||||
|
||||
@@ -87,6 +87,7 @@ function SpanHoverCard({
|
||||
</Typography.Text>
|
||||
</div>
|
||||
}
|
||||
mouseEnterDelay={0.5}
|
||||
content={getContent()}
|
||||
trigger="hover"
|
||||
rootClassName="span-hover-card"
|
||||
|
||||
@@ -0,0 +1,258 @@
|
||||
import { act, fireEvent, render, screen } from '@testing-library/react';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
|
||||
import SpanHoverCard from '../SpanHoverCard';
|
||||
|
||||
// Mock dayjs completely for testing
|
||||
jest.mock('dayjs', () => {
|
||||
const mockDayjs = jest.fn(() => ({
|
||||
format: jest.fn((formatString: string) => {
|
||||
if (formatString === 'D/M/YY - HH:mm:ss') {
|
||||
return '15/3/24 - 14:23:45';
|
||||
}
|
||||
return 'mock-date';
|
||||
}),
|
||||
}));
|
||||
Object.assign(mockDayjs, {
|
||||
extend: jest.fn(),
|
||||
tz: { guess: jest.fn(() => 'UTC') },
|
||||
});
|
||||
return mockDayjs;
|
||||
});
|
||||
|
||||
const HOVER_ELEMENT_ID = 'hover-element';
|
||||
|
||||
const mockSpan: Span = {
|
||||
spanId: 'test-span-id',
|
||||
traceId: 'test-trace-id',
|
||||
rootSpanId: 'root-span-id',
|
||||
parentSpanId: 'parent-span-id',
|
||||
name: 'GET /api/users',
|
||||
timestamp: 1679748225000000,
|
||||
durationNano: 150000000,
|
||||
serviceName: 'user-service',
|
||||
kind: 1,
|
||||
hasError: false,
|
||||
level: 1,
|
||||
references: [],
|
||||
tagMap: {},
|
||||
event: [
|
||||
{
|
||||
name: 'event1',
|
||||
timeUnixNano: 1679748225100000,
|
||||
attributeMap: {},
|
||||
isError: false,
|
||||
},
|
||||
{
|
||||
name: 'event2',
|
||||
timeUnixNano: 1679748225200000,
|
||||
attributeMap: {},
|
||||
isError: false,
|
||||
},
|
||||
],
|
||||
rootName: 'root-span',
|
||||
statusMessage: '',
|
||||
statusCodeString: 'OK',
|
||||
spanKind: 'server',
|
||||
hasChildren: false,
|
||||
hasSibling: false,
|
||||
subTreeNodeCount: 1,
|
||||
};
|
||||
|
||||
const mockTraceMetadata = {
|
||||
startTime: 1679748225000000,
|
||||
endTime: 1679748226000000,
|
||||
};
|
||||
|
||||
describe('SpanHoverCard', () => {
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('renders child element correctly', () => {
|
||||
render(
|
||||
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid="child-element">Hover me</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('child-element')).toBeInTheDocument();
|
||||
expect(screen.getByText('Hover me')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows popover after 0.5 second delay on hover', async () => {
|
||||
render(
|
||||
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid={HOVER_ELEMENT_ID}>Hover for details</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
|
||||
|
||||
// Hover over the element
|
||||
fireEvent.mouseEnter(hoverElement);
|
||||
|
||||
// Popover should NOT appear immediately
|
||||
expect(screen.queryByText('Duration:')).not.toBeInTheDocument();
|
||||
|
||||
// Advance time by 0.5 seconds
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Now popover should appear
|
||||
expect(screen.getByText('Duration:')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show popover if hover is too brief', async () => {
|
||||
render(
|
||||
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid={HOVER_ELEMENT_ID}>Quick hover test</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
|
||||
|
||||
// Quick hover and unhover
|
||||
fireEvent.mouseEnter(hoverElement);
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(200); // Only 0.2 seconds
|
||||
});
|
||||
fireEvent.mouseLeave(hoverElement);
|
||||
|
||||
// Advance past the full delay
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(400);
|
||||
});
|
||||
|
||||
// Popover should not appear
|
||||
expect(screen.queryByText('Duration:')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays span information in popover content after delay', async () => {
|
||||
render(
|
||||
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid={HOVER_ELEMENT_ID}>Test span</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
|
||||
|
||||
// Hover and wait for popover
|
||||
fireEvent.mouseEnter(hoverElement);
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Check that popover shows span operation name in title
|
||||
expect(screen.getByText('GET /api/users')).toBeInTheDocument();
|
||||
|
||||
// Check duration information
|
||||
expect(screen.getByText('Duration:')).toBeInTheDocument();
|
||||
expect(screen.getByText('150ms')).toBeInTheDocument();
|
||||
|
||||
// Check events count
|
||||
expect(screen.getByText('Events:')).toBeInTheDocument();
|
||||
expect(screen.getByText('2')).toBeInTheDocument();
|
||||
|
||||
// Check start time label
|
||||
expect(screen.getByText('Start time:')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays new date format with seconds', async () => {
|
||||
render(
|
||||
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid={HOVER_ELEMENT_ID}>Date format test</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
|
||||
|
||||
// Hover and wait for popover
|
||||
fireEvent.mouseEnter(hoverElement);
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Verify the new date format is displayed
|
||||
expect(screen.getByText('15/3/24 - 14:23:45')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays relative time information', async () => {
|
||||
const spanWithRelativeTime: Span = {
|
||||
...mockSpan,
|
||||
timestamp: mockTraceMetadata.startTime + 1000000, // 1 second later
|
||||
};
|
||||
|
||||
render(
|
||||
<SpanHoverCard span={spanWithRelativeTime} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid={HOVER_ELEMENT_ID}>Relative time test</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
|
||||
|
||||
// Hover and wait for popover
|
||||
fireEvent.mouseEnter(hoverElement);
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Check relative time display
|
||||
expect(screen.getByText(/after trace start/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles spans with no events correctly', async () => {
|
||||
const spanWithoutEvents: Span = {
|
||||
...mockSpan,
|
||||
event: [],
|
||||
};
|
||||
|
||||
render(
|
||||
<SpanHoverCard span={spanWithoutEvents} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid={HOVER_ELEMENT_ID}>No events test</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
|
||||
|
||||
// Hover and wait for popover
|
||||
fireEvent.mouseEnter(hoverElement);
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
expect(screen.getByText('Events:')).toBeInTheDocument();
|
||||
expect(screen.getByText('0')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('verifies mouseEnterDelay prop is set to 0.5', () => {
|
||||
const { container } = render(
|
||||
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
|
||||
<div data-testid={HOVER_ELEMENT_ID}>Delay test</div>
|
||||
</SpanHoverCard>,
|
||||
);
|
||||
|
||||
// The mouseEnterDelay prop should be set on the Popover component
|
||||
// This test verifies the implementation includes the delay
|
||||
const popover = container.querySelector('.ant-popover');
|
||||
expect(popover).not.toBeInTheDocument(); // Initially not visible
|
||||
|
||||
// Hover to trigger delay mechanism
|
||||
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
|
||||
fireEvent.mouseEnter(hoverElement);
|
||||
|
||||
// Should not appear before delay
|
||||
expect(screen.queryByText('Duration:')).not.toBeInTheDocument();
|
||||
|
||||
// Should appear after delay
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
expect(screen.getByText('Duration:')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -18,11 +18,6 @@ import UPlot from 'uplot';
|
||||
|
||||
import { dataMatch, optionsUpdateState } from './utils';
|
||||
|
||||
// Extended uPlot interface with custom properties
|
||||
interface ExtendedUPlot extends uPlot {
|
||||
_legendScrollCleanup?: () => void;
|
||||
}
|
||||
|
||||
export interface UplotProps {
|
||||
options: uPlot.Options;
|
||||
data: uPlot.AlignedData;
|
||||
@@ -71,12 +66,6 @@ const Uplot = forwardRef<ToggleGraphProps | undefined, UplotProps>(
|
||||
|
||||
const destroy = useCallback((chart: uPlot | null) => {
|
||||
if (chart) {
|
||||
// Clean up legend scroll event listener
|
||||
const extendedChart = chart as ExtendedUPlot;
|
||||
if (extendedChart._legendScrollCleanup) {
|
||||
extendedChart._legendScrollCleanup();
|
||||
}
|
||||
|
||||
onDeleteRef.current?.(chart);
|
||||
chart.destroy();
|
||||
chartRef.current = null;
|
||||
|
||||
@@ -12,6 +12,7 @@ function YAxisUnitSelector({
|
||||
onChange,
|
||||
placeholder = 'Please select a unit',
|
||||
loading = false,
|
||||
'data-testid': dataTestId,
|
||||
}: YAxisUnitSelectorProps): JSX.Element {
|
||||
const universalUnit = mapMetricUnitToUniversalUnit(value);
|
||||
|
||||
@@ -45,6 +46,7 @@ function YAxisUnitSelector({
|
||||
placeholder={placeholder}
|
||||
filterOption={(input, option): boolean => handleSearch(input, option)}
|
||||
loading={loading}
|
||||
data-testid={dataTestId}
|
||||
>
|
||||
{Y_AXIS_CATEGORIES.map((category) => (
|
||||
<Select.OptGroup key={category.name} label={category.name}>
|
||||
|
||||
@@ -4,6 +4,7 @@ export interface YAxisUnitSelectorProps {
|
||||
placeholder?: string;
|
||||
loading?: boolean;
|
||||
disabled?: boolean;
|
||||
'data-testid'?: string;
|
||||
}
|
||||
|
||||
export enum UniversalYAxisUnit {
|
||||
|
||||
@@ -24,12 +24,13 @@ export const DATE_TIME_FORMATS = {
|
||||
TIME_SECONDS: 'HH:mm:ss',
|
||||
TIME_UTC: 'HH:mm:ss (UTC Z)',
|
||||
TIME_UTC_MS: 'HH:mm:ss.SSS (UTC Z)',
|
||||
TIME_SPAN_PERCENTILE: 'HH:mm:ss MMM DD',
|
||||
|
||||
// Short date formats
|
||||
DATE_SHORT: 'MM/DD',
|
||||
YEAR_SHORT: 'YY',
|
||||
YEAR_MONTH: 'YY-MM',
|
||||
SPAN_POPOVER_DATE: 'M/D/YY - HH:mm',
|
||||
SPAN_POPOVER_DATE: 'D/M/YY - HH:mm:ss',
|
||||
|
||||
// Month name formats
|
||||
MONTH_DATE_FULL: 'MMMM DD, YYYY',
|
||||
|
||||
@@ -50,4 +50,5 @@ export enum QueryParams {
|
||||
tab = 'tab',
|
||||
thresholds = 'thresholds',
|
||||
selectedExplorerView = 'selectedExplorerView',
|
||||
variables = 'variables',
|
||||
}
|
||||
|
||||
@@ -86,7 +86,11 @@ export const REACT_QUERY_KEY = {
|
||||
SPAN_LOGS: 'SPAN_LOGS',
|
||||
SPAN_BEFORE_LOGS: 'SPAN_BEFORE_LOGS',
|
||||
SPAN_AFTER_LOGS: 'SPAN_AFTER_LOGS',
|
||||
TRACE_ONLY_LOGS: 'TRACE_ONLY_LOGS',
|
||||
|
||||
// Routing Policies Query Keys
|
||||
GET_ROUTING_POLICIES: 'GET_ROUTING_POLICIES',
|
||||
|
||||
// Span Percentiles Query Keys
|
||||
GET_SPAN_PERCENTILES: 'GET_SPAN_PERCENTILES',
|
||||
} as const;
|
||||
|
||||
@@ -3,4 +3,5 @@ export const USER_PREFERENCES = {
|
||||
NAV_SHORTCUTS: 'nav_shortcuts',
|
||||
LAST_SEEN_CHANGELOG_VERSION: 'last_seen_changelog_version',
|
||||
SPAN_DETAILS_PINNED_ATTRIBUTES: 'span_details_pinned_attributes',
|
||||
SPAN_PERCENTILE_RESOURCE_ATTRIBUTES: 'span_percentile_resource_attributes',
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Select } from 'antd';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
getAllEndpointsWidgetData,
|
||||
@@ -264,6 +265,7 @@ function AllEndPoints({
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customOnRowClick={onRowClick}
|
||||
version={ENTITY_VERSION_V5}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useApiMonitoringParams } from 'container/ApiMonitoring/queryParams';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
@@ -178,18 +179,33 @@ function EndPointDetails({
|
||||
[domainName, filters, minTime, maxTime],
|
||||
);
|
||||
|
||||
const V5_QUERIES = [
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_BAR_CHARTS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_STATUS_CODE_LATENCY_BAR_CHARTS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_METRICS_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DEPENDENT_SERVICES_DATA,
|
||||
REACT_QUERY_KEY.GET_ENDPOINT_DROPDOWN_DATA,
|
||||
] as const;
|
||||
|
||||
const endPointDetailsDataQueries = useQueries(
|
||||
endPointDetailsQueryPayload.map((payload, index) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
filters?.items, // Include filters.items in queryKey for better caching
|
||||
ENTITY_VERSION_V4,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
})),
|
||||
endPointDetailsQueryPayload.map((payload, index) => {
|
||||
const queryKey = END_POINT_DETAILS_QUERY_KEYS_ARRAY[index];
|
||||
const version = (V5_QUERIES as readonly string[]).includes(queryKey)
|
||||
? ENTITY_VERSION_V5
|
||||
: ENTITY_VERSION_V4;
|
||||
return {
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[index],
|
||||
payload,
|
||||
...(filters?.items?.length ? filters.items : []), // Include filters.items in queryKey for better caching
|
||||
version,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, version),
|
||||
enabled: !!payload,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
const [
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Switch, Table, Tooltip, Typography } from 'antd';
|
||||
import { getQueryRangeV5 } from 'api/v5/queryRange/getQueryRange';
|
||||
import { MetricRangePayloadV5, ScalarData } from 'api/v5/v5';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { withErrorBoundary } from 'components/ErrorBoundaryHOC';
|
||||
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
@@ -11,13 +13,12 @@ import {
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { Info } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { QueryFunctionContext, useQueries, useQuery } from 'react-query';
|
||||
import { SuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -46,7 +47,7 @@ function TopErrors({
|
||||
true,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
const queryPayload = useMemo(
|
||||
() =>
|
||||
getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
@@ -55,6 +56,10 @@ function TopErrors({
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
||||
...(initialFilters?.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
) || []),
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
@@ -65,7 +70,6 @@ function TopErrors({
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
...(initialFilters?.items || []),
|
||||
]
|
||||
: [...(initialFilters?.items || [])],
|
||||
op: 'AND',
|
||||
@@ -82,37 +86,34 @@ function TopErrors({
|
||||
],
|
||||
);
|
||||
|
||||
const topErrorsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
payload,
|
||||
DEFAULT_ENTITY_VERSION,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, DEFAULT_ENTITY_VERSION),
|
||||
enabled: !!payload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
})),
|
||||
);
|
||||
|
||||
const topErrorsDataQuery = topErrorsDataQueries[0];
|
||||
const {
|
||||
data: topErrorsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = topErrorsDataQuery;
|
||||
} = useQuery({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
queryPayload,
|
||||
ENTITY_VERSION_V5,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: ({
|
||||
signal,
|
||||
}: QueryFunctionContext): Promise<SuccessResponseV2<MetricRangePayloadV5>> =>
|
||||
getQueryRangeV5(queryPayload, ENTITY_VERSION_V5, signal),
|
||||
enabled: !!queryPayload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
});
|
||||
|
||||
const topErrorsColumnsConfig = useMemo(() => getTopErrorsColumnsConfig(), []);
|
||||
|
||||
const formattedTopErrorsData = useMemo(
|
||||
() =>
|
||||
formatTopErrorsDataForTable(
|
||||
topErrorsData?.payload?.data?.result as TopErrorsResponseRow[],
|
||||
topErrorsData?.data?.data?.data?.results[0] as ScalarData,
|
||||
),
|
||||
[topErrorsData],
|
||||
);
|
||||
@@ -130,12 +131,12 @@ function TopErrors({
|
||||
const endPointDropDownDataQueries = useQueries(
|
||||
endPointDropDownQueryPayload.map((payload) => ({
|
||||
queryKey: [
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[4],
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY[2],
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
ENTITY_VERSION_V5,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000,
|
||||
})),
|
||||
|
||||
@@ -0,0 +1,337 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { TraceAggregation } from 'api/v5/v5';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import DomainMetrics from './DomainMetrics';
|
||||
|
||||
// Mock the API call
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
GetMetricQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock ErrorState component
|
||||
jest.mock('./ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(({ refetch }) => (
|
||||
<div data-testid="error-state">
|
||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const mockProps = {
|
||||
domainName: '0.0.0.0',
|
||||
timeRange: {
|
||||
startTime: 1758259531000,
|
||||
endTime: 1758261331000,
|
||||
},
|
||||
domainListFilters: {
|
||||
items: [],
|
||||
op: 'AND' as const,
|
||||
} as IBuilderQuery['filters'],
|
||||
};
|
||||
|
||||
const mockSuccessResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: '150',
|
||||
B: '125000000',
|
||||
D: '2021-01-01T23:00:00Z',
|
||||
F1: '5.5',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
const renderComponent = (props = mockProps): ReturnType<typeof render> =>
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<DomainMetrics {...props} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
describe('1. V5 Query Payload with Filters', () => {
|
||||
it('sends correct V5 payload structure with domain name filters', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(GetMetricQueryRange).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
const [payload, version] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
||||
|
||||
// Verify it's using V5
|
||||
expect(version).toBe(ENTITY_VERSION_V5);
|
||||
|
||||
// Verify time range
|
||||
expect(payload.start).toBe(1758259531000);
|
||||
expect(payload.end).toBe(1758261331000);
|
||||
|
||||
// Verify V3 payload structure (getDomainMetricsQueryPayload returns V3 format)
|
||||
expect(payload.query).toBeDefined();
|
||||
expect(payload.query.builder).toBeDefined();
|
||||
expect(payload.query.builder.queryData).toBeDefined();
|
||||
|
||||
const queryData = payload.query.builder.queryData;
|
||||
|
||||
// Verify Query A - count with URL filter
|
||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
||||
expect(queryA).toBeDefined();
|
||||
expect(queryA.dataSource).toBe('traces');
|
||||
expect(queryA.aggregations?.[0]).toBeDefined();
|
||||
expect((queryA.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain(
|
||||
'url.full EXISTS OR http.url EXISTS',
|
||||
);
|
||||
|
||||
// Verify Query B - p99 latency
|
||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
||||
expect(queryB).toBeDefined();
|
||||
expect(queryB.aggregateOperator).toBe('p99');
|
||||
expect(queryB.aggregations?.[0]).toBeDefined();
|
||||
expect((queryB.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
);
|
||||
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
|
||||
// Verify Query D - max timestamp
|
||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
||||
expect(queryD).toBeDefined();
|
||||
expect(queryD.aggregateOperator).toBe('max');
|
||||
expect(queryD.aggregations?.[0]).toBeDefined();
|
||||
expect((queryD.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'max(timestamp)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = payload.query.builder.queryFormulas;
|
||||
expect(formulas).toBeDefined();
|
||||
expect(formulas.length).toBeGreaterThan(0);
|
||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
||||
expect(formulaF1).toBeDefined();
|
||||
expect(formulaF1.expression).toBe('(C/A)*100');
|
||||
});
|
||||
|
||||
it('includes custom filters in filter expressions', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'my-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
renderComponent({
|
||||
...mockProps,
|
||||
domainListFilters: customFilters,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(GetMetricQueryRange).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
const [payload] = (GetMetricQueryRange as jest.Mock).mock.calls[0];
|
||||
const queryData = payload.query.builder.queryData;
|
||||
|
||||
// Verify all queries include the custom filters
|
||||
queryData.forEach((query: any) => {
|
||||
if (query.filter && query.filter.expression) {
|
||||
expect(query.filter.expression).toContain('service.name');
|
||||
expect(query.filter.expression).toContain('my-service');
|
||||
expect(query.filter.expression).toContain('deployment.environment');
|
||||
expect(query.filter.expression).toContain('production');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Data Display State', () => {
|
||||
it('displays metrics when data is successfully loaded', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
// Wait for skeletons to disappear
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// Verify all metric labels are displayed
|
||||
expect(screen.getByText('EXTERNAL API')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
|
||||
// Verify metric values are displayed
|
||||
expect(screen.getByText('150')).toBeInTheDocument();
|
||||
expect(screen.getByText('0.125s')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Empty/Missing Data State', () => {
|
||||
it('displays "-" for missing data values', async () => {
|
||||
const emptyResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(emptyResponse);
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// When no data, all values should show "-"
|
||||
const dashValues = screen.getAllByText('-');
|
||||
expect(dashValues.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. Error State', () => {
|
||||
it('displays error state when API call fails', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockRejectedValue(new Error('API Error'));
|
||||
|
||||
renderComponent();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('retries API call when retry button is clicked', async () => {
|
||||
let callCount = 0;
|
||||
(GetMetricQueryRange as jest.Mock).mockImplementation(() => {
|
||||
callCount += 1;
|
||||
if (callCount === 1) {
|
||||
return Promise.reject(new Error('API Error'));
|
||||
}
|
||||
return Promise.resolve(mockSuccessResponse);
|
||||
});
|
||||
|
||||
renderComponent();
|
||||
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click retry
|
||||
const retryButton = screen.getByTestId('retry-button');
|
||||
retryButton.click();
|
||||
|
||||
// Wait for successful load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('150')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(callCount).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
DomainMetricsResponseRow,
|
||||
@@ -44,10 +44,10 @@ function DomainMetrics({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_DOMAIN_METRICS_DATA,
|
||||
payload,
|
||||
ENTITY_VERSION_V4,
|
||||
ENTITY_VERSION_V5,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V5),
|
||||
enabled: !!payload,
|
||||
staleTime: 60 * 1000, // 1 minute stale time : optimize this part
|
||||
})),
|
||||
@@ -132,7 +132,9 @@ function DomainMetrics({
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.latency}>
|
||||
<span className="round-metric-tag">
|
||||
{(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s
|
||||
{formattedDomainMetricsData.latency !== '-'
|
||||
? `${(Number(formattedDomainMetricsData.latency) / 1000).toFixed(3)}s`
|
||||
: '-'}
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
@@ -143,23 +145,27 @@ function DomainMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={formattedDomainMetricsData.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
{formattedDomainMetricsData.errorRate !== '-' ? (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
)}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(formattedDomainMetricsData.errorRate).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
) : (
|
||||
'-'
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
@@ -0,0 +1,419 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { QueryClient, QueryClientProvider, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointMetrics from './EndPointMetrics';
|
||||
|
||||
// Mock the API call
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
GetMetricQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock ErrorState component
|
||||
jest.mock('./ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(({ refetch }) => (
|
||||
<div data-testid="error-state">
|
||||
<button type="button" onClick={refetch} data-testid="retry-button">
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const mockSuccessResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
data: {
|
||||
A: '85.5',
|
||||
B: '245000000',
|
||||
D: '2021-01-01T22:30:00Z',
|
||||
F1: '3.2',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
// Helper to create mock query result
|
||||
const createMockQueryResult = (
|
||||
response: any,
|
||||
overrides?: Partial<UseQueryResult<SuccessResponse<any>, unknown>>,
|
||||
): UseQueryResult<SuccessResponse<any>, unknown> =>
|
||||
({
|
||||
data: response,
|
||||
error: null,
|
||||
isError: false,
|
||||
isIdle: false,
|
||||
isLoading: false,
|
||||
isLoadingError: false,
|
||||
isRefetchError: false,
|
||||
isRefetching: false,
|
||||
isStale: true,
|
||||
isSuccess: true,
|
||||
status: 'success' as const,
|
||||
dataUpdatedAt: Date.now(),
|
||||
errorUpdateCount: 0,
|
||||
errorUpdatedAt: 0,
|
||||
failureCount: 0,
|
||||
isFetched: true,
|
||||
isFetchedAfterMount: true,
|
||||
isFetching: false,
|
||||
isPlaceholderData: false,
|
||||
isPreviousData: false,
|
||||
refetch: jest.fn(),
|
||||
remove: jest.fn(),
|
||||
...overrides,
|
||||
} as UseQueryResult<SuccessResponse<any>, unknown>);
|
||||
|
||||
const renderComponent = (
|
||||
endPointMetricsDataQuery: UseQueryResult<SuccessResponse<any>, unknown>,
|
||||
): ReturnType<typeof render> =>
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
describe('1. V5 Query Payload with Filters', () => {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
it('sends correct V5 payload structure with domain and endpoint filters', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const domainName = 'api.example.com';
|
||||
const startTime = 1758259531000;
|
||||
const endTime = 1758261331000;
|
||||
const filters = {
|
||||
items: [],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
// Get the actual payload that would be generated
|
||||
const payloads = getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
startTime,
|
||||
endTime,
|
||||
filters,
|
||||
);
|
||||
|
||||
// First payload is for endpoint metrics
|
||||
const metricsPayload = payloads[0];
|
||||
|
||||
// Verify it's using the correct structure (V3 format for V5 API)
|
||||
expect(metricsPayload.query).toBeDefined();
|
||||
expect(metricsPayload.query.builder).toBeDefined();
|
||||
expect(metricsPayload.query.builder.queryData).toBeDefined();
|
||||
|
||||
const queryData = metricsPayload.query.builder.queryData;
|
||||
|
||||
// Verify Query A - rate with domain and client kind filters
|
||||
const queryA = queryData.find((q: any) => q.queryName === 'A');
|
||||
expect(queryA).toBeDefined();
|
||||
if (queryA) {
|
||||
expect(queryA.dataSource).toBe('traces');
|
||||
expect(queryA.aggregateOperator).toBe('rate');
|
||||
expect(queryA.timeAggregation).toBe('rate');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryA.filter) {
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query B - p99 latency with duration_nano
|
||||
const queryB = queryData.find((q: any) => q.queryName === 'B');
|
||||
expect(queryB).toBeDefined();
|
||||
if (queryB) {
|
||||
expect(queryB.aggregateOperator).toBe('p99');
|
||||
if (queryB.aggregateAttribute) {
|
||||
expect(queryB.aggregateAttribute.key).toBe('duration_nano');
|
||||
}
|
||||
expect(queryB.timeAggregation).toBe('p99');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryB.filter) {
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
if (queryC) {
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.aggregateOperator).toBe('count');
|
||||
if (queryC.filter) {
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query D - max timestamp for last used
|
||||
const queryD = queryData.find((q: any) => q.queryName === 'D');
|
||||
expect(queryD).toBeDefined();
|
||||
if (queryD) {
|
||||
expect(queryD.aggregateOperator).toBe('max');
|
||||
if (queryD.aggregateAttribute) {
|
||||
expect(queryD.aggregateAttribute.key).toBe('timestamp');
|
||||
}
|
||||
expect(queryD.timeAggregation).toBe('max');
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryD.filter) {
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Query E - total count (disabled)
|
||||
const queryE = queryData.find((q: any) => q.queryName === 'E');
|
||||
expect(queryE).toBeDefined();
|
||||
if (queryE) {
|
||||
expect(queryE.disabled).toBe(true);
|
||||
expect(queryE.aggregateOperator).toBe('count');
|
||||
if (queryE.aggregateAttribute) {
|
||||
expect(queryE.aggregateAttribute.key).toBe('span_id');
|
||||
}
|
||||
if (queryE.filter) {
|
||||
expect(queryE.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = metricsPayload.query.builder.queryFormulas;
|
||||
expect(formulas).toBeDefined();
|
||||
expect(formulas.length).toBeGreaterThan(0);
|
||||
const formulaF1 = formulas.find((f: any) => f.queryName === 'F1');
|
||||
expect(formulaF1).toBeDefined();
|
||||
if (formulaF1) {
|
||||
expect(formulaF1.expression).toBe('(C/E)*100');
|
||||
expect(formulaF1.disabled).toBe(false);
|
||||
expect(formulaF1.legend).toBe('error percentage');
|
||||
}
|
||||
});
|
||||
|
||||
it('includes custom domainListFilters in all query expressions', async () => {
|
||||
(GetMetricQueryRange as jest.Mock).mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
const customFilters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'payment-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'staging',
|
||||
},
|
||||
],
|
||||
op: 'AND' as const,
|
||||
};
|
||||
|
||||
const payloads = getEndPointDetailsQueryPayload(
|
||||
'api.internal.com',
|
||||
1758259531000,
|
||||
1758261331000,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const queryData = payloads[0].query.builder.queryData;
|
||||
|
||||
// Verify ALL queries (A, B, C, D, E) include the custom filters
|
||||
const allQueryNames = ['A', 'B', 'C', 'D', 'E'];
|
||||
allQueryNames.forEach((queryName) => {
|
||||
const query = queryData.find((q: any) => q.queryName === queryName);
|
||||
expect(query).toBeDefined();
|
||||
if (query && query.filter && query.filter.expression) {
|
||||
// Check for exact filter inclusion
|
||||
expect(query.filter.expression).toContain('service.name');
|
||||
expect(query.filter.expression).toContain('payment-service');
|
||||
expect(query.filter.expression).toContain('deployment.environment');
|
||||
expect(query.filter.expression).toContain('staging');
|
||||
// Also verify domain filter is still present
|
||||
expect(query.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
||||
);
|
||||
// Verify client kind filter is present
|
||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Data Display State', () => {
|
||||
it('displays metrics when data is successfully loaded', async () => {
|
||||
const mockQuery = createMockQueryResult(mockSuccessResponse);
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
// Wait for skeletons to disappear
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// Verify all metric labels are displayed
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
|
||||
// Verify metric values are displayed
|
||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
||||
expect(screen.getByText('245ms')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Empty/Missing Data State', () => {
|
||||
it("displays '-' for missing data values", async () => {
|
||||
const emptyResponse = {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockQuery = createMockQueryResult(emptyResponse);
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
await waitFor(() => {
|
||||
const skeletons = document.querySelectorAll('.ant-skeleton-button');
|
||||
expect(skeletons.length).toBe(0);
|
||||
});
|
||||
|
||||
// When no data, all values should show "-"
|
||||
const dashValues = screen.getAllByText('-');
|
||||
// Should have at least 2 dashes (rate and last used - latency shows "-", error % shows progress bar)
|
||||
expect(dashValues.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. Error State', () => {
|
||||
it('displays error state when API call fails', async () => {
|
||||
const mockQuery = createMockQueryResult(null, {
|
||||
isError: true,
|
||||
isSuccess: false,
|
||||
status: 'error',
|
||||
error: new Error('API Error'),
|
||||
});
|
||||
|
||||
renderComponent(mockQuery);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByTestId('retry-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('retries API call when retry button is clicked', async () => {
|
||||
const refetch = jest.fn().mockResolvedValue(mockSuccessResponse);
|
||||
|
||||
// Start with error state
|
||||
const mockQuery = createMockQueryResult(null, {
|
||||
isError: true,
|
||||
isSuccess: false,
|
||||
status: 'error',
|
||||
error: new Error('API Error'),
|
||||
refetch,
|
||||
});
|
||||
|
||||
const { rerender } = renderComponent(mockQuery);
|
||||
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-state')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click retry
|
||||
const retryButton = screen.getByTestId('retry-button');
|
||||
retryButton.click();
|
||||
|
||||
// Verify refetch was called
|
||||
expect(refetch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Simulate successful refetch by rerendering with success state
|
||||
const successQuery = createMockQueryResult(mockSuccessResponse);
|
||||
rerender(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<EndPointMetrics endPointMetricsDataQuery={successQuery} />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
// Wait for successful load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('85.5 ops/sec')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,12 +1,16 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
getDisplayValue,
|
||||
getFormattedEndPointMetricsData,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import ErrorState from './ErrorState';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function EndPointMetrics({
|
||||
endPointMetricsDataQuery,
|
||||
}: {
|
||||
@@ -70,7 +74,9 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.rate}>
|
||||
<span className="round-metric-tag">{metricsData?.rate} ops/sec</span>
|
||||
<span className="round-metric-tag">
|
||||
{metricsData?.rate !== '-' ? `${metricsData?.rate} ops/sec` : '-'}
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -79,7 +85,7 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.latency}>
|
||||
<span className="round-metric-tag">{metricsData?.latency}ms</span>
|
||||
{metricsData?.latency !== '-' ? `${metricsData?.latency}ms` : '-'}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -88,21 +94,25 @@ function EndPointMetrics({
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.errorRate}>
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
{metricsData?.errorRate !== '-' ? (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number(Number(metricsData?.errorRate ?? 0).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number(
|
||||
Number(metricsData?.errorRate ?? 0).toFixed(2),
|
||||
);
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar"
|
||||
/>
|
||||
) : (
|
||||
'-'
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
@@ -110,7 +120,9 @@ function EndPointMetrics({
|
||||
{isLoading || isRefetching ? (
|
||||
<Skeleton.Button active size="small" />
|
||||
) : (
|
||||
<Tooltip title={metricsData?.lastUsed}>{metricsData?.lastUsed}</Tooltip>
|
||||
<Tooltip title={metricsData?.lastUsed}>
|
||||
{getDisplayValue(metricsData?.lastUsed)}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Card } from 'antd';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
@@ -22,6 +23,7 @@ function MetricOverTimeGraph({
|
||||
customOnDragSelect={(): void => {}}
|
||||
customTimeRange={timeRange}
|
||||
customTimeRangeWindowForCoRelation="5m"
|
||||
version={ENTITY_VERSION_V5}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -69,6 +69,13 @@ function StatusCodeBarCharts({
|
||||
} = endPointStatusCodeLatencyBarChartsDataQuery;
|
||||
|
||||
const { startTime: minTime, endTime: maxTime } = timeRange;
|
||||
const legendScrollPositionRef = useRef<{
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}>({
|
||||
scrollTop: 0,
|
||||
scrollLeft: 0,
|
||||
});
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
@@ -207,6 +214,13 @@ function StatusCodeBarCharts({
|
||||
onDragSelect,
|
||||
colorMapping,
|
||||
query: currentQuery,
|
||||
legendScrollPosition: legendScrollPositionRef.current,
|
||||
setLegendScrollPosition: (position: {
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}) => {
|
||||
legendScrollPositionRef.current = position;
|
||||
},
|
||||
}),
|
||||
[
|
||||
minTime,
|
||||
|
||||
@@ -8,23 +8,14 @@ import {
|
||||
endPointStatusCodeColumns,
|
||||
extractPortAndEndpoint,
|
||||
formatDataForTable,
|
||||
formatTopErrorsDataForTable,
|
||||
getAllEndpointsWidgetData,
|
||||
getCustomFiltersForBarChart,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getFormattedDependentServicesData,
|
||||
getFormattedEndPointDropDownData,
|
||||
getFormattedEndPointMetricsData,
|
||||
getFormattedEndPointStatusCodeChartData,
|
||||
getFormattedEndPointStatusCodeData,
|
||||
getGroupByFiltersFromGroupByValues,
|
||||
getLatencyOverTimeWidgetData,
|
||||
getRateOverTimeWidgetData,
|
||||
getStatusCodeBarChartWidgetData,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from '../utils';
|
||||
import { APIMonitoringColumnsMock } from './mock';
|
||||
|
||||
@@ -52,119 +43,13 @@ jest.mock('../utils', () => {
|
||||
});
|
||||
|
||||
describe('API Monitoring Utils', () => {
|
||||
describe('getAllEndpointsWidgetData', () => {
|
||||
it('should create a widget with correct configuration', () => {
|
||||
// Arrange
|
||||
const groupBy = [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
key: 'http.method',
|
||||
type: '',
|
||||
},
|
||||
];
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
const domainName = 'test-domain';
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: 'test-key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getAllEndpointsWidgetData(
|
||||
groupBy as BaseAutocompleteData[],
|
||||
domainName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBeDefined();
|
||||
// Title is a React component, not a string
|
||||
expect(result.title).toBeDefined();
|
||||
expect(result.panelTypes).toBe(PANEL_TYPES.TABLE);
|
||||
|
||||
// Check that each query includes the domainName filter
|
||||
result.query.builder.queryData.forEach((query) => {
|
||||
const serverNameFilter = query.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(serverNameFilter).toBeDefined();
|
||||
expect(serverNameFilter?.value).toBe(domainName);
|
||||
|
||||
// Check that the custom filters were included
|
||||
const testFilter = query.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(testFilter).toBeDefined();
|
||||
});
|
||||
|
||||
// Verify groupBy was included in queries
|
||||
if (result.query.builder.queryData[0].groupBy) {
|
||||
const hasCustomGroupBy = result.query.builder.queryData[0].groupBy.some(
|
||||
(item) => item && item.key === 'http.method',
|
||||
);
|
||||
expect(hasCustomGroupBy).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle empty groupBy correctly', () => {
|
||||
// Arrange
|
||||
const groupBy: any[] = [];
|
||||
const domainName = 'test-domain';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getAllEndpointsWidgetData(groupBy, domainName, filters);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
// Should only include default groupBy
|
||||
if (result.query.builder.queryData[0].groupBy) {
|
||||
expect(result.query.builder.queryData[0].groupBy.length).toBeGreaterThan(0);
|
||||
// Check that it doesn't have extra group by fields (only defaults)
|
||||
const defaultGroupByLength =
|
||||
result.query.builder.queryData[0].groupBy.length;
|
||||
const resultWithCustomGroupBy = getAllEndpointsWidgetData(
|
||||
[
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'custom.field',
|
||||
type: '',
|
||||
},
|
||||
] as BaseAutocompleteData[],
|
||||
domainName,
|
||||
filters,
|
||||
);
|
||||
// Custom groupBy should have more fields than default
|
||||
if (resultWithCustomGroupBy.query.builder.queryData[0].groupBy) {
|
||||
expect(
|
||||
resultWithCustomGroupBy.query.builder.queryData[0].groupBy.length,
|
||||
).toBeGreaterThan(defaultGroupByLength);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// New tests for formatDataForTable
|
||||
describe('formatDataForTable', () => {
|
||||
it('should format rows correctly with valid data', () => {
|
||||
const columns = APIMonitoringColumnsMock;
|
||||
const data = [
|
||||
[
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
'test-domain', // domainName
|
||||
'10', // endpoints
|
||||
'25', // rps
|
||||
@@ -222,6 +107,7 @@ describe('API Monitoring Utils', () => {
|
||||
const groupBy = [
|
||||
{
|
||||
id: 'group-by-1',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
key: 'http.method',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
@@ -344,49 +230,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatTopErrorsDataForTable', () => {
|
||||
it('should format top errors data correctly', () => {
|
||||
// Arrange
|
||||
const inputData = [
|
||||
{
|
||||
metric: {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: '/api/test',
|
||||
[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]: '500',
|
||||
status_message: 'Internal Server Error',
|
||||
},
|
||||
values: [[1000000100, '10']],
|
||||
queryName: 'A',
|
||||
legend: 'Test Legend',
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = formatTopErrorsDataForTable(
|
||||
inputData as TopErrorsResponseRow[],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
|
||||
// Check first item is formatted correctly
|
||||
expect(result[0].endpointName).toBe('/api/test');
|
||||
expect(result[0].statusCode).toBe('500');
|
||||
expect(result[0].statusMessage).toBe('Internal Server Error');
|
||||
expect(result[0].count).toBe('10');
|
||||
expect(result[0].key).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle empty input', () => {
|
||||
// Act
|
||||
const result = formatTopErrorsDataForTable(undefined);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTopErrorsColumnsConfig', () => {
|
||||
it('should return column configuration with expected fields', () => {
|
||||
// Act
|
||||
@@ -453,72 +296,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTopErrorsQueryPayload', () => {
|
||||
it('should create correct query payload with filters', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const start = 1000000000;
|
||||
const end = 1000010000;
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: 'test-key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
start,
|
||||
end,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify query params
|
||||
expect(result[0].start).toBe(start);
|
||||
expect(result[0].end).toBe(end);
|
||||
|
||||
// Verify correct structure
|
||||
expect(result[0].graphType).toBeDefined();
|
||||
expect(result[0].query).toBeDefined();
|
||||
expect(result[0].query.builder).toBeDefined();
|
||||
expect(result[0].query.builder.queryData).toBeDefined();
|
||||
|
||||
// Verify domain filter is included
|
||||
const queryData = result[0].query.builder.queryData[0];
|
||||
expect(queryData.filters).toBeDefined();
|
||||
|
||||
// Check for domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
(item) =>
|
||||
item.key &&
|
||||
item.key.key === SPAN_ATTRIBUTES.SERVER_NAME &&
|
||||
item.value === domainName,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
|
||||
// Check that custom filters were included
|
||||
const testFilter = queryData.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(testFilter).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
// Add new tests for EndPointDetails utility functions
|
||||
describe('extractPortAndEndpoint', () => {
|
||||
it('should extract port and endpoint from a valid URL', () => {
|
||||
@@ -564,243 +341,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEndPointDetailsQueryPayload', () => {
|
||||
it('should generate proper query payload with all parameters', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const startTime = 1609459200000; // 2021-01-01
|
||||
const endTime = 1609545600000; // 2021-01-02
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: 'string',
|
||||
key: 'test.key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getEndPointDetailsQueryPayload(
|
||||
domainName,
|
||||
startTime,
|
||||
endTime,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toHaveLength(6); // Should return 6 queries
|
||||
|
||||
// Check that each query includes proper parameters
|
||||
result.forEach((query) => {
|
||||
expect(query).toHaveProperty('start', startTime);
|
||||
expect(query).toHaveProperty('end', endTime);
|
||||
|
||||
// Should have query property with builder data
|
||||
expect(query).toHaveProperty('query');
|
||||
expect(query.query).toHaveProperty('builder');
|
||||
|
||||
// All queries should include the domain filter
|
||||
const {
|
||||
query: {
|
||||
builder: { queryData },
|
||||
},
|
||||
} = query;
|
||||
queryData.forEach((qd) => {
|
||||
if (qd.filters && qd.filters.items) {
|
||||
const serverNameFilter = qd.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(serverNameFilter).toBeDefined();
|
||||
// Only check if the serverNameFilter exists, as the actual value might vary
|
||||
// depending on implementation details or domain defaults
|
||||
if (serverNameFilter) {
|
||||
expect(typeof serverNameFilter.value).toBe('string');
|
||||
}
|
||||
}
|
||||
|
||||
// Should include our custom filter
|
||||
const customFilter = qd.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(customFilter).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRateOverTimeWidgetData', () => {
|
||||
it('should generate widget configuration for rate over time', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getRateOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('title', 'Rate Over Time');
|
||||
// Check only title since description might vary
|
||||
|
||||
// Check query configuration
|
||||
expect(result).toHaveProperty('query');
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(result).toHaveProperty('query.builder.queryData');
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Should have domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
if (domainFilter) {
|
||||
expect(typeof domainFilter.value).toBe('string');
|
||||
}
|
||||
|
||||
// Should have 'rate' time aggregation
|
||||
expect(queryData).toHaveProperty('timeAggregation', 'rate');
|
||||
|
||||
// Should have proper legend that includes endpoint info
|
||||
expect(queryData).toHaveProperty('legend');
|
||||
expect(
|
||||
typeof queryData.legend === 'string' ? queryData.legend : '',
|
||||
).toContain('/api/test');
|
||||
});
|
||||
|
||||
it('should handle case without endpoint name', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getRateOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Legend should be domain name only
|
||||
expect(queryData).toHaveProperty('legend', domainName);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLatencyOverTimeWidgetData', () => {
|
||||
it('should generate widget configuration for latency over time', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('title', 'Latency Over Time');
|
||||
// Check only title since description might vary
|
||||
|
||||
// Check query configuration
|
||||
expect(result).toHaveProperty('query');
|
||||
expect(result).toHaveProperty('query.builder.queryData');
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Should have domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
if (domainFilter) {
|
||||
expect(typeof domainFilter.value).toBe('string');
|
||||
}
|
||||
|
||||
// Should use duration_nano as the aggregate attribute
|
||||
expect(queryData.aggregateAttribute).toHaveProperty('key', 'duration_nano');
|
||||
|
||||
// Should have 'p99' time aggregation
|
||||
expect(queryData).toHaveProperty('timeAggregation', 'p99');
|
||||
});
|
||||
|
||||
it('should handle case without endpoint name', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Legend should be domain name only
|
||||
expect(queryData).toHaveProperty('legend', domainName);
|
||||
});
|
||||
|
||||
// Changed approach to verify end-to-end behavior for URL with port
|
||||
it('should format legends appropriately for complete URLs with ports', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = 'http://example.com:8080/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Extract what we expect the function to extract
|
||||
const expectedParts = extractPortAndEndpoint(endPointName);
|
||||
|
||||
// Act
|
||||
const result = getLatencyOverTimeWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
const queryData = result.query.builder.queryData[0];
|
||||
|
||||
// Check that legend is present and is a string
|
||||
expect(queryData).toHaveProperty('legend');
|
||||
expect(typeof queryData.legend).toBe('string');
|
||||
|
||||
// If the URL has a port and endpoint, the legend should reflect that appropriately
|
||||
// (Testing the integration rather than the exact formatting)
|
||||
if (expectedParts.port !== '-') {
|
||||
// Verify that both components are incorporated into the legend in some way
|
||||
// This tests the behavior without relying on the exact implementation details
|
||||
const legendStr = queryData.legend as string;
|
||||
expect(legendStr).not.toBe(domainName); // Legend should be different when URL has port/endpoint
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointDropDownData', () => {
|
||||
it('should format endpoint dropdown data correctly', () => {
|
||||
// Arrange
|
||||
@@ -810,6 +350,7 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/users',
|
||||
'url.full': 'http://example.com/api/users',
|
||||
A: 150, // count or other metric
|
||||
},
|
||||
},
|
||||
@@ -817,6 +358,7 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/orders',
|
||||
'url.full': 'http://example.com/api/orders',
|
||||
A: 75,
|
||||
},
|
||||
},
|
||||
@@ -900,87 +442,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointMetricsData', () => {
|
||||
it('should format endpoint metrics data correctly', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
A: '50', // rate
|
||||
B: '15000000', // latency in nanoseconds
|
||||
C: '5', // required by type
|
||||
D: '1640995200000000', // timestamp in nanoseconds
|
||||
F1: '5.5', // error rate
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.key).toBeDefined();
|
||||
expect(result.rate).toBe('50');
|
||||
expect(result.latency).toBe(15); // Should be converted from ns to ms
|
||||
expect(result.errorRate).toBe(5.5);
|
||||
expect(typeof result.lastUsed).toBe('string'); // Time formatting is tested elsewhere
|
||||
});
|
||||
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
it('should handle undefined values in data', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
A: undefined,
|
||||
B: 'n/a',
|
||||
C: '', // required by type
|
||||
D: undefined,
|
||||
F1: 'n/a',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
|
||||
it('should handle empty input array', () => {
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData([]);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
|
||||
it('should handle undefined input', () => {
|
||||
// Arrange
|
||||
const undefinedInput = undefined as any;
|
||||
|
||||
// Act
|
||||
const result = getFormattedEndPointMetricsData(undefinedInput);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.rate).toBe('-');
|
||||
expect(result.latency).toBe('-');
|
||||
expect(result.errorRate).toBe(0);
|
||||
expect(result.lastUsed).toBe('-');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointStatusCodeData', () => {
|
||||
it('should format status code data correctly', () => {
|
||||
// Arrange
|
||||
@@ -1117,139 +578,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedDependentServicesData', () => {
|
||||
it('should format dependent services data correctly', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
'service.name': 'auth-service',
|
||||
A: '500', // count
|
||||
B: '120000000', // latency in nanoseconds
|
||||
C: '15', // rate
|
||||
F1: '2.5', // error percentage
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
'service.name': 'db-service',
|
||||
A: '300',
|
||||
B: '80000000',
|
||||
C: '10',
|
||||
F1: '1.2',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(2);
|
||||
|
||||
// Check first service
|
||||
expect(result[0].key).toBeDefined();
|
||||
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||
expect(result[0].serviceData.count).toBe(500);
|
||||
expect(typeof result[0].serviceData.percentage).toBe('number');
|
||||
expect(result[0].latency).toBe(120); // Should be converted from ns to ms
|
||||
expect(result[0].rate).toBe('15');
|
||||
expect(result[0].errorPercentage).toBe('2.5');
|
||||
|
||||
// Check second service
|
||||
expect(result[1].serviceData.serviceName).toBe('db-service');
|
||||
expect(result[1].serviceData.count).toBe(300);
|
||||
expect(result[1].latency).toBe(80);
|
||||
expect(result[1].rate).toBe('10');
|
||||
expect(result[1].errorPercentage).toBe('1.2');
|
||||
|
||||
// Verify percentage calculation
|
||||
const totalCount = 500 + 300;
|
||||
expect(result[0].serviceData.percentage).toBeCloseTo(
|
||||
(500 / totalCount) * 100,
|
||||
2,
|
||||
);
|
||||
expect(result[1].serviceData.percentage).toBeCloseTo(
|
||||
(300 / totalCount) * 100,
|
||||
2,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle undefined values in data', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
'service.name': 'auth-service',
|
||||
A: 'n/a',
|
||||
B: undefined,
|
||||
C: 'n/a',
|
||||
F1: undefined,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].serviceData.serviceName).toBe('auth-service');
|
||||
expect(result[0].serviceData.count).toBe('-');
|
||||
expect(result[0].serviceData.percentage).toBe(0);
|
||||
expect(result[0].latency).toBe('-');
|
||||
expect(result[0].rate).toBe('-');
|
||||
expect(result[0].errorPercentage).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle empty input array', () => {
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData([]);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined input', () => {
|
||||
// Arrange
|
||||
const undefinedInput = undefined as any;
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(undefinedInput);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle missing service name', () => {
|
||||
// Arrange
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// Missing service.name
|
||||
A: '200',
|
||||
B: '50000000',
|
||||
C: '8',
|
||||
F1: '0.5',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = getFormattedDependentServicesData(mockData as any);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].serviceData.serviceName).toBe('-');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFormattedEndPointStatusCodeChartData', () => {
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
|
||||
@@ -0,0 +1,221 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/**
|
||||
* V5 Migration Tests for All Endpoints Widget (Endpoint Overview)
|
||||
*
|
||||
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Aggregation format: aggregateAttribute → aggregations[] array
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
const emptyGroupBy: BaseAutocompleteData[] = [];
|
||||
|
||||
describe('1. V5 Format Migration - All Four Queries', () => {
|
||||
it('all queries use filter.expression format (not filters.items)', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const { queryData } = widget.query.builder;
|
||||
|
||||
// All 4 queries must use V5 filter.expression format
|
||||
queryData.forEach((query) => {
|
||||
expect(query.filter).toBeDefined();
|
||||
expect(query.filter?.expression).toBeDefined();
|
||||
expect(typeof query.filter?.expression).toBe('string');
|
||||
// OLD V4 format should NOT exist
|
||||
expect(query).not.toHaveProperty('filters');
|
||||
});
|
||||
|
||||
// Verify we have exactly 4 queries
|
||||
expect(queryData).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('all queries use aggregations array format (not aggregateAttribute)', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
// Query A: count()
|
||||
expect(queryA.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryA.aggregations)).toBe(true);
|
||||
expect(queryA.aggregations).toEqual([{ expression: 'count()' }]);
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// Query B: p99(duration_nano)
|
||||
expect(queryB.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryB.aggregations)).toBe(true);
|
||||
expect(queryB.aggregations).toEqual([{ expression: 'p99(duration_nano)' }]);
|
||||
expect(queryB).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// Query C: max(timestamp)
|
||||
expect(queryC.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryC.aggregations)).toBe(true);
|
||||
expect(queryC.aggregations).toEqual([{ expression: 'max(timestamp)' }]);
|
||||
expect(queryC).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// Query D: count() (disabled, for errors)
|
||||
expect(queryD.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryD.aggregations)).toBe(true);
|
||||
expect(queryD.aggregations).toEqual([{ expression: 'count()' }]);
|
||||
expect(queryD).not.toHaveProperty('aggregateAttribute');
|
||||
});
|
||||
|
||||
it('all queries have correct base filter expressions', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
|
||||
|
||||
// Queries A, B, C have identical base filter
|
||||
expect(queryA.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryB.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryC.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
|
||||
// Query D has additional has_error filter
|
||||
expect(queryD.filter?.expression).toBe(
|
||||
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. GroupBy Structure', () => {
|
||||
it('default groupBy includes both http.url and url.full with type attribute', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const { queryData } = widget.query.builder;
|
||||
|
||||
// All queries should have the same default groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(2);
|
||||
|
||||
// http.url
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'attribute',
|
||||
});
|
||||
|
||||
// url.full
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('custom groupBy is appended after defaults', () => {
|
||||
const customGroupBy: BaseAutocompleteData[] = [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
},
|
||||
];
|
||||
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
customGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const { queryData } = widget.query.builder;
|
||||
|
||||
// All queries should have defaults + custom groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
|
||||
|
||||
// First two should be defaults (http.url, url.full)
|
||||
expect(query.groupBy[0].key).toBe('http.url');
|
||||
expect(query.groupBy[1].key).toBe('url.full');
|
||||
|
||||
// Last two should be custom (matching subset of properties)
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
});
|
||||
expect(query.groupBy[3]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. Query-Specific Validations', () => {
|
||||
it('query D has has_error filter and is disabled', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
// Query D should be disabled
|
||||
expect(queryD.disabled).toBe(true);
|
||||
|
||||
// Queries A, B, C should NOT be disabled
|
||||
expect(queryA.disabled).toBe(false);
|
||||
expect(queryB.disabled).toBe(false);
|
||||
expect(queryC.disabled).toBe(false);
|
||||
|
||||
// Query D should have has_error in filter
|
||||
expect(queryD.filter?.expression).toContain('has_error = true');
|
||||
|
||||
// Queries A, B, C should NOT have has_error
|
||||
expect(queryA.filter?.expression).not.toContain('has_error');
|
||||
expect(queryB.filter?.expression).not.toContain('has_error');
|
||||
expect(queryC.filter?.expression).not.toContain('has_error');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,211 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { getFormattedEndPointMetricsData } from 'container/ApiMonitoring/utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
|
||||
import EndPointMetrics from '../Explorer/Domains/DomainDetails/components/EndPointMetrics';
|
||||
import ErrorState from '../Explorer/Domains/DomainDetails/components/ErrorState';
|
||||
|
||||
// Create a partial mock of the UseQueryResult interface for testing
|
||||
interface MockQueryResult {
|
||||
isLoading: boolean;
|
||||
isRefetching: boolean;
|
||||
isError: boolean;
|
||||
data?: any;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
// Mock the utils function
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
getFormattedEndPointMetricsData: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock the ErrorState component
|
||||
jest.mock('../Explorer/Domains/DomainDetails/components/ErrorState', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(({ refetch }) => (
|
||||
<div data-testid="error-state-mock">
|
||||
<button type="button" data-testid="refetch-button" onClick={refetch}>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock antd components
|
||||
jest.mock('antd', () => {
|
||||
const originalModule = jest.requireActual('antd');
|
||||
return {
|
||||
...originalModule,
|
||||
Progress: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="progress-bar-mock" />),
|
||||
Skeleton: {
|
||||
Button: jest
|
||||
.fn()
|
||||
.mockImplementation(() => <div data-testid="skeleton-button-mock" />),
|
||||
},
|
||||
Tooltip: jest
|
||||
.fn()
|
||||
.mockImplementation(({ children }) => (
|
||||
<div data-testid="tooltip-mock">{children}</div>
|
||||
)),
|
||||
Typography: {
|
||||
Text: jest.fn().mockImplementation(({ children, className }) => (
|
||||
<div data-testid={`typography-${className}`} className={className}>
|
||||
{children}
|
||||
</div>
|
||||
)),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('EndPointMetrics', () => {
|
||||
// Common metric data to use in tests
|
||||
const mockMetricsData = {
|
||||
key: 'test-key',
|
||||
rate: '42',
|
||||
latency: 99,
|
||||
errorRate: 5.5,
|
||||
lastUsed: '5 minutes ago',
|
||||
};
|
||||
|
||||
// Basic props for tests
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(
|
||||
mockMetricsData,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders loading state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: true,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
|
||||
// Verify labels are visible even during loading
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
expect(screen.getByText('AVERAGE LATENCY')).toBeInTheDocument();
|
||||
expect(screen.getByText('ERROR %')).toBeInTheDocument();
|
||||
expect(screen.getByText('LAST USED')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders error state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: true,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify error state is shown
|
||||
expect(screen.getByTestId('error-state-mock')).toBeInTheDocument();
|
||||
expect(ErrorState).toHaveBeenCalledWith(
|
||||
{ refetch: expect.any(Function) },
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders data correctly when loaded', () => {
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { A: '42', B: '99000000', D: '1609459200000000', F1: '5.5' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify the utils function was called with the data
|
||||
expect(getFormattedEndPointMetricsData).toHaveBeenCalledWith(
|
||||
mockData.payload.data.result[0].table.rows,
|
||||
);
|
||||
|
||||
// Verify data is displayed
|
||||
expect(
|
||||
screen.getByText(`${mockMetricsData.rate} ops/sec`),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText(`${mockMetricsData.latency}ms`)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricsData.lastUsed)).toBeInTheDocument();
|
||||
expect(screen.getByTestId('progress-bar-mock')).toBeInTheDocument(); // For error rate
|
||||
});
|
||||
|
||||
it('handles refetching state correctly', () => {
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: true,
|
||||
isError: false,
|
||||
data: undefined,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Verify skeleton loaders are visible during refetching
|
||||
const skeletonElements = screen.getAllByTestId('skeleton-button-mock');
|
||||
expect(skeletonElements.length).toBe(4);
|
||||
});
|
||||
|
||||
it('handles null metrics data gracefully', () => {
|
||||
// Mock the utils function to return null to simulate missing data
|
||||
(getFormattedEndPointMetricsData as jest.Mock).mockReturnValue(null);
|
||||
|
||||
const mockData = {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as SuccessResponse<any>;
|
||||
|
||||
const mockQuery: MockQueryResult = {
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
data: mockData,
|
||||
refetch: refetchFn,
|
||||
};
|
||||
|
||||
render(<EndPointMetrics endPointMetricsDataQuery={mockQuery as any} />);
|
||||
|
||||
// Even with null data, the component should render without crashing
|
||||
expect(screen.getByText('Rate')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,173 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/**
|
||||
* V5 Migration Tests for Endpoint Dropdown Query
|
||||
*
|
||||
* These tests validate the migration from V4 to V5 format for the third payload
|
||||
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain handling: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Existence check: (http.url EXISTS OR url.full EXISTS)
|
||||
* - Aggregation: count() expression
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const mockStartTime = 1000;
|
||||
const mockEndTime = 2000;
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
describe('1. V5 Format Migration - Structure and Base Filters', () => {
|
||||
it('migrates to V5 format with correct filter expression structure, aggregations, and groupBy', () => {
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
emptyFilters,
|
||||
);
|
||||
|
||||
// Third payload is the endpoint dropdown query (index 2)
|
||||
const dropdownQuery = payload[2];
|
||||
const queryA = dropdownQuery.query.builder.queryData[0];
|
||||
|
||||
// CRITICAL V5 MIGRATION: filter.expression (not filters.items)
|
||||
expect(queryA.filter).toBeDefined();
|
||||
expect(queryA.filter?.expression).toBeDefined();
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
expect(queryA.filter?.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Base filter 3: Existence check
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
'(http.url EXISTS OR url.full EXISTS)',
|
||||
);
|
||||
|
||||
// V5 Aggregation format: aggregations array (not aggregateAttribute)
|
||||
expect(queryA.aggregations).toBeDefined();
|
||||
expect(Array.isArray(queryA.aggregations)).toBe(true);
|
||||
expect(queryA.aggregations?.[0]).toEqual({
|
||||
expression: 'count()',
|
||||
});
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// GroupBy: Both http.url and url.full
|
||||
expect(queryA.groupBy).toHaveLength(2);
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'http.url',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'url.full',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. Custom Filters Integration', () => {
|
||||
it('merges custom filters into filter expression with AND logic', () => {
|
||||
const customFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-1',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
{
|
||||
id: 'test-2',
|
||||
key: {
|
||||
key: 'deployment.environment',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'production',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
customFilters,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Exact filter expression with custom filters merged
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. HTTP URL Filter Special Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: Exact filter expression with http.url converted to OR logic
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user