Compare commits
2 Commits
v0.96.1
...
feature/sh
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a083bf41d2 | ||
|
|
e0b7b2d795 |
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: clickhouse
|
||||
volumes:
|
||||
- ${PWD}/fs/etc/clickhouse-server/config.d/config.xml:/etc/clickhouse-server/config.d/config.xml
|
||||
@@ -23,10 +23,8 @@ services:
|
||||
retries: 3
|
||||
depends_on:
|
||||
- zookeeper
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
zookeeper:
|
||||
image: signoz/zookeeper:3.7.1
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
container_name: zookeeper
|
||||
volumes:
|
||||
- ${PWD}/fs/tmp/zookeeper:/bitnami/zookeeper
|
||||
@@ -42,7 +40,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.6
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +53,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.6
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
services:
|
||||
signoz-otel-collector:
|
||||
image: signoz/signoz-otel-collector:v0.129.6
|
||||
container_name: signoz-otel-collector-dev
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
environment:
|
||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
|
||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||
ports:
|
||||
- "4317:4317" # OTLP gRPC receiver
|
||||
- "4318:4318" # OTLP HTTP receiver
|
||||
- "13133:13133" # health check extension
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
- wget
|
||||
- --spider
|
||||
- -q
|
||||
- localhost:13133
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
restart: unless-stopped
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
@@ -1,96 +0,0 @@
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
prometheus:
|
||||
config:
|
||||
global:
|
||||
scrape_interval: 60s
|
||||
scrape_configs:
|
||||
- job_name: otel-collector
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:8888
|
||||
labels:
|
||||
job_name: otel-collector
|
||||
|
||||
processors:
|
||||
batch:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
timeout: 2s
|
||||
signozspanmetrics/delta:
|
||||
metrics_exporter: signozclickhousemetrics
|
||||
metrics_flush_interval: 60s
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 100000
|
||||
aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
|
||||
enable_exp_histogram: true
|
||||
dimensions:
|
||||
- name: service.namespace
|
||||
default: default
|
||||
- name: deployment.environment
|
||||
default: default
|
||||
# This is added to ensure the uniqueness of the timeseries
|
||||
# Otherwise, identical timeseries produced by multiple replicas of
|
||||
# collectors result in incorrect APM metrics
|
||||
- name: signoz.collector.id
|
||||
- name: service.version
|
||||
- name: browser.platform
|
||||
- name: browser.mobile
|
||||
- name: k8s.cluster.name
|
||||
- name: k8s.node.name
|
||||
- name: k8s.namespace.name
|
||||
- name: host.name
|
||||
- name: host.type
|
||||
- name: container.name
|
||||
|
||||
extensions:
|
||||
health_check:
|
||||
endpoint: 0.0.0.0:13133
|
||||
pprof:
|
||||
endpoint: 0.0.0.0:1777
|
||||
|
||||
exporters:
|
||||
clickhousetraces:
|
||||
datasource: tcp://host.docker.internal:9000/signoz_traces
|
||||
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
use_new_schema: true
|
||||
signozclickhousemetrics:
|
||||
dsn: tcp://host.docker.internal:9000/signoz_metrics
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://host.docker.internal:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
43
.github/CODEOWNERS
vendored
43
.github/CODEOWNERS
vendored
@@ -5,45 +5,6 @@
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
|
||||
# Dashboard, Alert, Metrics, Service Map, Services
|
||||
/frontend/src/container/ListOfDashboard/ @srikanthccv
|
||||
/frontend/src/container/NewDashboard/ @srikanthccv
|
||||
/frontend/src/pages/DashboardsListPage/ @srikanthccv
|
||||
/frontend/src/pages/DashboardWidget/ @srikanthccv
|
||||
/frontend/src/pages/NewDashboard/ @srikanthccv
|
||||
/frontend/src/providers/Dashboard/ @srikanthccv
|
||||
|
||||
# Alerts
|
||||
/frontend/src/container/AlertHistory/ @srikanthccv
|
||||
/frontend/src/container/AllAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/AnomalyAlertEvaluationView/ @srikanthccv
|
||||
/frontend/src/container/CreateAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/CreateAlertRule/ @srikanthccv
|
||||
/frontend/src/container/EditAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/FormAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/FormAlertRules/ @srikanthccv
|
||||
/frontend/src/container/ListAlertRules/ @srikanthccv
|
||||
/frontend/src/container/TriggeredAlerts/ @srikanthccv
|
||||
/frontend/src/pages/AlertChannelCreate/ @srikanthccv
|
||||
/frontend/src/pages/AlertDetails/ @srikanthccv
|
||||
/frontend/src/pages/AlertHistory/ @srikanthccv
|
||||
/frontend/src/pages/AlertList/ @srikanthccv
|
||||
/frontend/src/pages/CreateAlert/ @srikanthccv
|
||||
/frontend/src/providers/Alert.tsx @srikanthccv
|
||||
|
||||
# Metrics
|
||||
/frontend/src/container/MetricsExplorer/ @srikanthccv
|
||||
/frontend/src/pages/MetricsApplication/ @srikanthccv
|
||||
/frontend/src/pages/MetricsExplorer/ @srikanthccv
|
||||
|
||||
# Services and Service Map
|
||||
/frontend/src/container/ServiceApplication/ @srikanthccv
|
||||
/frontend/src/container/ServiceTable/ @srikanthccv
|
||||
/frontend/src/pages/Services/ @srikanthccv
|
||||
/frontend/src/pages/ServiceTopLevelOperations/ @srikanthccv
|
||||
/frontend/src/container/Home/Services/ @srikanthccv
|
||||
|
||||
/deploy/ @SigNoz/devops
|
||||
.github @SigNoz/devops
|
||||
|
||||
@@ -81,7 +42,3 @@
|
||||
/pkg/telemetrymetadata/ @srikanthccv
|
||||
/pkg/telemetrymetrics/ @srikanthccv
|
||||
/pkg/telemetrytraces/ @srikanthccv
|
||||
|
||||
# AuthN / AuthZ Owners
|
||||
|
||||
/pkg/authz/ @vikrantgupta25 @grandwizard28
|
||||
|
||||
2
.github/workflows/build-community.yaml
vendored
2
.github/workflows/build-community.yaml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
|
||||
2
.github/workflows/build-enterprise.yaml
vendored
2
.github/workflows/build-enterprise.yaml
vendored
@@ -93,7 +93,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
|
||||
2
.github/workflows/build-staging.yaml
vendored
2
.github/workflows/build-staging.yaml
vendored
@@ -92,7 +92,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
|
||||
10
.github/workflows/goci.yaml
vendored
10
.github/workflows/goci.yaml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_TEST_CONTEXT: ./...
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
fmt:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
lint:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
deps:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
build:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: qemu-install
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: aarch64-install
|
||||
|
||||
4
.github/workflows/gor-signoz-community.yaml
vendored
4
.github/workflows/gor-signoz-community.yaml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
4
.github/workflows/gor-signoz.yaml
vendored
4
.github/workflows/gor-signoz.yaml
vendored
@@ -72,7 +72,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
3
.github/workflows/integrationci.yaml
vendored
3
.github/workflows/integrationci.yaml
vendored
@@ -21,9 +21,10 @@ jobs:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 25.5.6
|
||||
schema-migrator-version:
|
||||
- v0.129.6
|
||||
- v0.128.1
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -86,8 +86,6 @@ queries.active
|
||||
.devenv/**/tmp/**
|
||||
.qodo
|
||||
|
||||
.dev
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
@@ -230,6 +228,4 @@ poetry.toml
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
|
||||
frontend/.cursor/rules/
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
@@ -8,7 +8,6 @@ linters:
|
||||
- depguard
|
||||
- iface
|
||||
- unparam
|
||||
- forbidigo
|
||||
|
||||
linters-settings:
|
||||
sloglint:
|
||||
@@ -25,10 +24,6 @@ linters-settings:
|
||||
deny:
|
||||
- pkg: "go.uber.org/zap"
|
||||
desc: "Do not use zap logger. Use slog instead."
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: "errors"
|
||||
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
|
||||
@@ -78,5 +78,4 @@ Need assistance? Join our Slack community:
|
||||
|
||||
- Set up your [development environment](docs/contributing/development.md)
|
||||
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)
|
||||
- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future.
|
||||
- Write [integration tests](docs/contributing/go/integration.md)
|
||||
- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future.
|
||||
11
Makefile
11
Makefile
@@ -61,17 +61,6 @@ devenv-postgres: ## Run postgres in devenv
|
||||
@cd .devenv/docker/postgres; \
|
||||
docker compose -f compose.yaml up -d
|
||||
|
||||
.PHONY: devenv-signoz-otel-collector
|
||||
devenv-signoz-otel-collector: ## Run signoz-otel-collector in devenv (requires clickhouse to be running)
|
||||
@cd .devenv/docker/signoz-otel-collector; \
|
||||
docker compose -f compose.yaml up -d
|
||||
|
||||
.PHONY: devenv-up
|
||||
devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhouse and signoz-otel-collector for local development
|
||||
@echo "Development environment is ready!"
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
|
||||
@@ -32,7 +32,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
Short: "Run the SigNoz server",
|
||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
|
||||
@@ -11,10 +12,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
)
|
||||
|
||||
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.DeprecatedFlags) (signoz.Config, error) {
|
||||
func NewSigNozConfig(ctx context.Context, flags signoz.DeprecatedFlags) (signoz.Config, error) {
|
||||
config, err := signoz.NewConfig(
|
||||
ctx,
|
||||
logger,
|
||||
config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
@@ -31,10 +31,14 @@ func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.Depr
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func NewJWTSecret(ctx context.Context, logger *slog.Logger) string {
|
||||
func NewJWTSecret(_ context.Context, _ *slog.Logger) string {
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
if len(jwtSecret) == 0 {
|
||||
logger.ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
fmt.Println("🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!")
|
||||
fmt.Println("SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application.")
|
||||
fmt.Println("Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access.")
|
||||
fmt.Println("Please set the SIGNOZ_JWT_SECRET environment variable immediately.")
|
||||
fmt.Println("For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
}
|
||||
|
||||
return jwtSecret
|
||||
|
||||
@@ -2,11 +2,10 @@ FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.24-bullseye
|
||||
FROM golang:1.23-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
|
||||
@@ -35,7 +35,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
Short: "Run the SigNoz server",
|
||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -121,8 +121,6 @@ telemetrystore:
|
||||
timeout_before_checking_execution_speed: 0
|
||||
max_bytes_to_read: 0
|
||||
max_result_rows: 0
|
||||
ignore_data_skipping_indices: ""
|
||||
secondary_indices_enable_bulk_filtering: false
|
||||
|
||||
##################### Prometheus #####################
|
||||
prometheus:
|
||||
@@ -137,7 +135,10 @@ prometheus:
|
||||
##################### Alertmanager #####################
|
||||
alertmanager:
|
||||
# Specifies the alertmanager provider to use.
|
||||
provider: signoz
|
||||
provider: legacy
|
||||
legacy:
|
||||
# The API URL (with prefix) of the legacy Alertmanager instance.
|
||||
api_url: http://localhost:9093/api
|
||||
signoz:
|
||||
# The poll interval for periodically syncing the alertmanager with the config in the store.
|
||||
poll_interval: 1m
|
||||
|
||||
@@ -11,7 +11,7 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
deploy:
|
||||
labels:
|
||||
@@ -37,11 +37,9 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
user: root
|
||||
deploy:
|
||||
labels:
|
||||
@@ -65,7 +63,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
@@ -176,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.96.1
|
||||
image: signoz/signoz:v0.91.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,7 +207,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.6
|
||||
image: signoz/signoz-otel-collector:v0.128.2
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +231,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.6
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -11,7 +11,7 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
deploy:
|
||||
labels:
|
||||
@@ -36,11 +36,9 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
user: root
|
||||
deploy:
|
||||
labels:
|
||||
@@ -62,7 +60,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
@@ -117,7 +115,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.96.1
|
||||
image: signoz/signoz:v0.91.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +148,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.6
|
||||
image: signoz/signoz-otel-collector:v0.128.2
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +174,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.6
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -10,7 +10,7 @@ x-common: &common
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -40,11 +40,9 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -67,7 +65,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
@@ -179,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.96.1}
|
||||
image: signoz/signoz:${VERSION:-v0.91.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +211,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.6}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +237,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +248,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -9,7 +9,8 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -35,11 +36,9 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -62,7 +61,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
@@ -111,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.96.1}
|
||||
image: signoz/signoz:${VERSION:-v0.91.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.6}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +165,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +177,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -44,35 +44,20 @@ Before diving in, make sure you have these tools installed:
|
||||
|
||||
SigNoz has three main components: Clickhouse, Backend, and Frontend. Let's set them up one by one.
|
||||
|
||||
### 1. Setting up ClickHouse
|
||||
### 1. Setting up Clickhouse
|
||||
|
||||
First, we need to get ClickHouse running:
|
||||
First, we need to get Clickhouse running:
|
||||
|
||||
```bash
|
||||
make devenv-clickhouse
|
||||
```
|
||||
|
||||
This command:
|
||||
- Starts ClickHouse in a single-shard, single-replica cluster
|
||||
- Starts Clickhouse in a single-shard, single-replica cluster
|
||||
- Sets up Zookeeper
|
||||
- Runs the latest schema migrations
|
||||
|
||||
### 2. Setting up SigNoz OpenTelemetry Collector
|
||||
|
||||
Next, start the OpenTelemetry Collector to receive telemetry data:
|
||||
|
||||
```bash
|
||||
make devenv-signoz-otel-collector
|
||||
```
|
||||
|
||||
This command:
|
||||
- Starts the SigNoz OpenTelemetry Collector
|
||||
- Listens on port 4317 (gRPC) and 4318 (HTTP) for incoming telemetry data
|
||||
- Forwards data to ClickHouse for storage
|
||||
|
||||
> 💡 **Quick Setup**: Use `make devenv-up` to start both ClickHouse and OTel Collector together
|
||||
|
||||
### 3. Starting the Backend
|
||||
### 2. Starting the Backend
|
||||
|
||||
1. Run the backend server:
|
||||
```bash
|
||||
@@ -88,24 +73,19 @@ This command:
|
||||
|
||||
> 💡 **Tip**: The API server runs at `http://localhost:8080/` by default
|
||||
|
||||
### 4. Setting up the Frontend
|
||||
### 3. Setting up the Frontend
|
||||
|
||||
1. Navigate to the frontend directory:
|
||||
```bash
|
||||
cd frontend
|
||||
```
|
||||
|
||||
2. Install dependencies:
|
||||
1. Install dependencies:
|
||||
```bash
|
||||
yarn install
|
||||
```
|
||||
|
||||
3. Create a `.env` file in this directory:
|
||||
2. Create a `.env` file in the `frontend` directory:
|
||||
```env
|
||||
FRONTEND_API_ENDPOINT=http://localhost:8080
|
||||
```
|
||||
|
||||
4. Start the development server:
|
||||
3. Start the development server:
|
||||
```bash
|
||||
yarn dev
|
||||
```
|
||||
@@ -113,25 +93,3 @@ This command:
|
||||
> 💡 **Tip**: `yarn dev` will automatically rebuild when you make changes to the code
|
||||
|
||||
Now you're all set to start developing! Happy coding! 🎉
|
||||
|
||||
## Verifying Your Setup
|
||||
To verify everything is working correctly:
|
||||
|
||||
1. **Check ClickHouse**: `curl http://localhost:8123/ping` (should return "Ok.")
|
||||
2. **Check OTel Collector**: `curl http://localhost:13133` (should return health status)
|
||||
3. **Check Backend**: `curl http://localhost:8080/api/v1/health` (should return `{"status":"ok"}`)
|
||||
4. **Check Frontend**: Open `http://localhost:3301` in your browser
|
||||
|
||||
## How to send test data?
|
||||
|
||||
You can now send telemetry data to your local SigNoz instance:
|
||||
|
||||
- **OTLP gRPC**: `localhost:4317`
|
||||
- **OTLP HTTP**: `localhost:4318`
|
||||
|
||||
For example, using `curl` to send a test trace:
|
||||
```bash
|
||||
curl -X POST http://localhost:4318/v1/traces \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"resourceSpans":[{"resource":{"attributes":[{"key":"service.name","value":{"stringValue":"test-service"}}]},"scopeSpans":[{"spans":[{"traceId":"12345678901234567890123456789012","spanId":"1234567890123456","name":"test-span","startTimeUnixNano":"1609459200000000000","endTimeUnixNano":"1609459201000000000"}]}]}]}'
|
||||
```
|
||||
|
||||
@@ -1,213 +0,0 @@
|
||||
# Integration Tests
|
||||
|
||||
SigNoz uses integration tests to verify that different components work together correctly in a real environment. These tests run against actual services (ClickHouse, PostgreSQL, etc.) to ensure end-to-end functionality.
|
||||
|
||||
## How to set up the integration test environment?
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before running integration tests, ensure you have the following installed:
|
||||
|
||||
- Python 3.13+
|
||||
- Poetry (for dependency management)
|
||||
- Docker (for containerized services)
|
||||
|
||||
### Initial Setup
|
||||
|
||||
1. Navigate to the integration tests directory:
|
||||
```bash
|
||||
cd tests/integration
|
||||
```
|
||||
|
||||
2. Install dependencies using Poetry:
|
||||
```bash
|
||||
poetry install --no-root
|
||||
```
|
||||
|
||||
### Starting the Test Environment
|
||||
|
||||
To spin up all the containers necessary for writing integration tests and keep them running:
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup
|
||||
```
|
||||
|
||||
This command will:
|
||||
- Start all required services (ClickHouse, PostgreSQL, Zookeeper, etc.)
|
||||
- Keep containers running due to the `--reuse` flag
|
||||
- Verify that the setup is working correctly
|
||||
|
||||
### Stopping the Test Environment
|
||||
|
||||
When you're done writing integration tests, clean up the environment:
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown
|
||||
```
|
||||
|
||||
This will destroy the running integration test setup and clean up resources.
|
||||
|
||||
## Understanding the Integration Test Framework
|
||||
|
||||
Python and pytest form the foundation of the integration testing framework. Testcontainers are used to spin up disposable integration environments. Wiremock is used to spin up **test doubles** of other services.
|
||||
|
||||
- **Why Python/pytest?** It's expressive, low-boilerplate, and has powerful fixture capabilities that make integration testing straightforward. Extensive libraries for HTTP requests, JSON handling, and data analysis (numpy) make it easier to test APIs and verify data
|
||||
- **Why testcontainers?** They let us spin up isolated dependencies that match our production environment without complex setup.
|
||||
- **Why wiremock?** Well maintained, documented and extensible.
|
||||
|
||||
```
|
||||
.
|
||||
├── conftest.py
|
||||
├── fixtures
|
||||
│ ├── __init__.py
|
||||
│ ├── auth.py
|
||||
│ ├── clickhouse.py
|
||||
│ ├── fs.py
|
||||
│ ├── http.py
|
||||
│ ├── migrator.py
|
||||
│ ├── network.py
|
||||
│ ├── postgres.py
|
||||
│ ├── signoz.py
|
||||
│ ├── sql.py
|
||||
│ ├── sqlite.py
|
||||
│ ├── types.py
|
||||
│ └── zookeeper.py
|
||||
├── poetry.lock
|
||||
├── pyproject.toml
|
||||
└── src
|
||||
└── bootstrap
|
||||
├── __init__.py
|
||||
├── a_database.py
|
||||
├── b_register.py
|
||||
└── c_license.py
|
||||
```
|
||||
|
||||
Each test suite follows some important principles:
|
||||
|
||||
1. **Organization**: Test suites live under `src/` in self-contained packages. Fixtures (a pytest concept) live inside `fixtures/`.
|
||||
2. **Execution Order**: Files are prefixed with `a_`, `b_`, `c_` to ensure sequential execution.
|
||||
3. **Time Constraints**: Each suite should complete in under 10 minutes (setup takes ~4 mins).
|
||||
|
||||
### Test Suite Design
|
||||
|
||||
Test suites should target functional domains or subsystems within SigNoz. When designing a test suite, consider these principles:
|
||||
|
||||
- **Functional Cohesion**: Group tests around a specific capability or service boundary
|
||||
- **Data Flow**: Follow the path of data through related components
|
||||
- **Change Patterns**: Components frequently modified together should be tested together
|
||||
|
||||
The exact boundaries for modules are intentionally flexible, allowing teams to define logical groupings based on their specific context and knowledge of the system.
|
||||
|
||||
Eg: The **bootstrap** integration test suite validates core system functionality:
|
||||
|
||||
- Database initialization
|
||||
- Version check
|
||||
|
||||
Other test suites can be **pipelines, auth, querier.**
|
||||
|
||||
## How to write an integration test?
|
||||
|
||||
Now start writing an integration test. Create a new file `src/bootstrap/e_version.py` and paste the following:
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
def test_version(signoz: types.SigNoz) -> None:
|
||||
response = requests.get(signoz.self.host_config.get("/api/v1/version"), timeout=2)
|
||||
logger.info(response)
|
||||
```
|
||||
|
||||
We have written a simple test which calls the `version` endpoint of the container in step 1. In **order to just run this function, run the following command:**
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/e_version.py::test_version
|
||||
```
|
||||
|
||||
> Note: The `--reuse` flag is used to reuse the environment if it is already running. Always use this flag when writing and running integration tests. If you don't use this flag, the environment will be destroyed and recreated every time you run the test.
|
||||
|
||||
Here's another example of how to write a more comprehensive integration test:
|
||||
|
||||
```python
|
||||
from http import HTTPStatus
|
||||
import requests
|
||||
from fixtures import types
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
def test_user_registration(signoz: types.SigNoz) -> None:
|
||||
"""Test user registration functionality."""
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
json={
|
||||
"name": "testuser",
|
||||
"orgId": "",
|
||||
"orgName": "test.org",
|
||||
"email": "test@example.com",
|
||||
"password": "password123Z$",
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["setupCompleted"] is True
|
||||
```
|
||||
|
||||
## How to run integration tests?
|
||||
|
||||
### Running All Tests
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/
|
||||
```
|
||||
|
||||
### Running Specific Test Categories
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>
|
||||
|
||||
# Run querier tests
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/querier/
|
||||
# Run auth tests
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/
|
||||
```
|
||||
|
||||
### Running Individual Tests
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>/<file>.py::test_name
|
||||
|
||||
# Run test_register in file a_register.py in auth suite
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/a_register.py::test_register
|
||||
```
|
||||
|
||||
## How to configure different options for integration tests?
|
||||
|
||||
Tests can be configured using pytest options:
|
||||
|
||||
- `--sqlstore-provider` - Choose database provider (default: postgres)
|
||||
- `--postgres-version` - PostgreSQL version (default: 15)
|
||||
- `--clickhouse-version` - ClickHouse version (default: 25.5.6)
|
||||
- `--zookeeper-version` - Zookeeper version (default: 3.7.1)
|
||||
|
||||
Example:
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Always use the `--reuse` flag** when setting up the environment to keep containers running
|
||||
- **Use the `--teardown` flag** when cleaning up to avoid resource leaks
|
||||
- **Follow the naming convention** with alphabetical prefixes for test execution order
|
||||
- **Use proper timeouts** in HTTP requests to avoid hanging tests
|
||||
- **Clean up test data** between tests to avoid interference
|
||||
- **Use descriptive test names** that clearly indicate what is being tested
|
||||
- **Leverage fixtures** for common setup and authentication
|
||||
- **Test both success and failure scenarios** to ensure robust functionality
|
||||
@@ -50,14 +50,19 @@ func (p *BaseSeasonalProvider) getQueryParams(req *AnomaliesRequest) *anomalyQue
|
||||
|
||||
func (p *BaseSeasonalProvider) toTSResults(ctx context.Context, resp *qbtypes.QueryRangeResponse) []*qbtypes.TimeSeriesData {
|
||||
|
||||
tsData := []*qbtypes.TimeSeriesData{}
|
||||
|
||||
if resp == nil {
|
||||
if resp == nil || resp.Data == nil {
|
||||
p.logger.InfoContext(ctx, "nil response from query range")
|
||||
return tsData
|
||||
}
|
||||
|
||||
for _, item := range resp.Data.Results {
|
||||
data, ok := resp.Data.(struct {
|
||||
Results []any `json:"results"`
|
||||
Warnings []string `json:"warnings"`
|
||||
})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
tsData := []*qbtypes.TimeSeriesData{}
|
||||
for _, item := range data.Results {
|
||||
if resultData, ok := item.(*qbtypes.TimeSeriesData); ok {
|
||||
tsData = append(tsData, resultData)
|
||||
}
|
||||
@@ -390,11 +395,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
continue
|
||||
}
|
||||
|
||||
// no data;
|
||||
if len(result.Aggregations) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
aggOfInterest := result.Aggregations[0]
|
||||
|
||||
for _, series := range aggOfInterest.Series {
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
module base
|
||||
|
||||
type organisation
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
|
||||
type user
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type resources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type resource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
define block: [user, role#assignee]
|
||||
|
||||
|
||||
type telemetry
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
@@ -1,29 +0,0 @@
|
||||
package openfgaschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "embed"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed base.fga
|
||||
baseDSL string
|
||||
)
|
||||
|
||||
type schema struct{}
|
||||
|
||||
func NewSchema() authz.Schema {
|
||||
return &schema{}
|
||||
}
|
||||
|
||||
func (schema *schema) Get(ctx context.Context) []openfgapkgtransformer.ModuleFile {
|
||||
return []openfgapkgtransformer.ModuleFile{
|
||||
{
|
||||
Name: "base.fga",
|
||||
Contents: baseDSL,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,132 +0,0 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
const (
|
||||
authzDeniedMessage string = "::AUTHZ-DENIED::"
|
||||
)
|
||||
|
||||
type AuthZ struct {
|
||||
logger *slog.Logger
|
||||
authzService authz.AuthZ
|
||||
}
|
||||
|
||||
func NewAuthZ(logger *slog.Logger) *AuthZ {
|
||||
if logger == nil {
|
||||
panic("cannot build authz middleware, logger is empty")
|
||||
}
|
||||
|
||||
return &AuthZ{logger: logger}
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsViewer(); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) EditAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsEditor(); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) AdminAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsAdmin(); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) SelfAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id := mux.Vars(req)["id"]
|
||||
if err := claims.IsSelfAccess(id); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) OpenAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
// Check middleware accepts the relation, typeable, parentTypeable (for direct access + group relations) and a callback function to derive selector and parentSelectors on per request basis.
|
||||
func (middleware *AuthZ) Check(next http.HandlerFunc, relation authtypes.Relation, translation authtypes.Relation, typeable authtypes.Typeable, parentTypeable authtypes.Typeable, cb authtypes.SelectorCallbackFn) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
selector, parentSelectors, err := cb(req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, relation, typeable, selector, parentTypeable, parentSelectors...)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
@@ -113,8 +113,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// v5
|
||||
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
|
||||
@@ -13,11 +13,11 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@@ -192,14 +192,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
))
|
||||
}
|
||||
|
||||
password := types.MustGenerateFactorPassword(newUser.ID.StringValue())
|
||||
password, err := types.NewFactorPassword(uuid.NewString())
|
||||
|
||||
err = ah.Signoz.Modules.User.CreateUser(ctx, newUser, user.WithFactorPassword(password))
|
||||
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
}
|
||||
|
||||
return newUser, nil
|
||||
return integrationUser, nil
|
||||
}
|
||||
|
||||
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
|
||||
@@ -260,9 +260,11 @@ func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: struct {
|
||||
Results []any `json:"results"`
|
||||
Results []any `json:"results"`
|
||||
Warnings []string `json:"warnings"`
|
||||
}{
|
||||
Results: results,
|
||||
Results: results,
|
||||
Warnings: make([]string, 0), // TODO(srikanthccv): will there be any warnings here?
|
||||
},
|
||||
Meta: struct {
|
||||
RowsScanned uint64 `json:"rowsScanned"`
|
||||
|
||||
@@ -8,8 +8,6 @@ import (
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
@@ -46,6 +44,19 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type ServerOptions struct {
|
||||
Config signoz.Config
|
||||
SigNoz *signoz.SigNoz
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
GatewayUrl string
|
||||
Jwt *authtypes.JWT
|
||||
}
|
||||
|
||||
// Server runs HTTP, Mux and a grpc server
|
||||
type Server struct {
|
||||
config signoz.Config
|
||||
@@ -58,6 +69,11 @@ type Server struct {
|
||||
httpServer *http.Server
|
||||
httpHostPort string
|
||||
|
||||
// private http
|
||||
privateConn net.Listener
|
||||
privateHTTP *http.Server
|
||||
privateHostPort string
|
||||
|
||||
opampServer *opamp.Server
|
||||
|
||||
// Usage manager
|
||||
@@ -167,6 +183,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
|
||||
jwt: jwt,
|
||||
ruleManager: rm,
|
||||
httpHostPort: baseconst.HTTPHostPort,
|
||||
privateHostPort: baseconst.PrivateHostPort,
|
||||
unavailableChannel: make(chan healthcheck.Status),
|
||||
usageManager: usageManager,
|
||||
}
|
||||
@@ -179,6 +196,13 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
|
||||
|
||||
s.httpServer = httpServer
|
||||
|
||||
privateServer, err := s.createPrivateServer(apiHandler)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.privateHTTP = privateServer
|
||||
|
||||
s.opampServer = opamp.InitializeServer(
|
||||
&opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation,
|
||||
)
|
||||
@@ -191,6 +215,36 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
return s.unavailableChannel
|
||||
}
|
||||
|
||||
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
|
||||
s.config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.config.APIServer.Timeout.Default,
|
||||
s.config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
|
||||
apiHandler.RegisterPrivateRoutes(r)
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
//todo(amol): find out a way to add exact domain or
|
||||
// ip here for alert manager
|
||||
AllowedOrigins: []string{"*"},
|
||||
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"},
|
||||
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"},
|
||||
})
|
||||
|
||||
handler := c.Handler(r)
|
||||
handler = handlers.CompressHandler(handler)
|
||||
|
||||
return &http.Server{
|
||||
Handler: handler,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
@@ -203,7 +257,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
s.config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewComment().Wrap)
|
||||
|
||||
apiHandler.RegisterRoutes(r, am)
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
@@ -256,6 +309,19 @@ func (s *Server) initListeners() error {
|
||||
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
|
||||
|
||||
// listen on private port to support internal services
|
||||
privateHostPort := s.privateHostPort
|
||||
|
||||
if privateHostPort == "" {
|
||||
return fmt.Errorf("baseconst.PrivateHostPort is required")
|
||||
}
|
||||
|
||||
s.privateConn, err = net.Listen("tcp", privateHostPort)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -294,6 +360,26 @@ func (s *Server) Start(ctx context.Context) error {
|
||||
}
|
||||
}()
|
||||
|
||||
var privatePort int
|
||||
if port, err := utils.GetPort(s.privateConn.Addr()); err == nil {
|
||||
privatePort = port
|
||||
}
|
||||
|
||||
go func() {
|
||||
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort))
|
||||
|
||||
switch err := s.privateHTTP.Serve(s.privateConn); err {
|
||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||
// normal exit, nothing to do
|
||||
zap.L().Info("private http server closed")
|
||||
default:
|
||||
zap.L().Error("Could not start private HTTP server", zap.Error(err))
|
||||
}
|
||||
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
||||
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
|
||||
@@ -313,6 +399,12 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
if s.privateHTTP != nil {
|
||||
if err := s.privateHTTP.Shutdown(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
@@ -336,8 +428,6 @@ func makeRulesManager(
|
||||
querier querier.Querier,
|
||||
logger *slog.Logger,
|
||||
) (*baserules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore)
|
||||
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
@@ -352,10 +442,8 @@ func makeRulesManager(
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
Alertmanager: alertmanager,
|
||||
SQLStore: sqlstore,
|
||||
OrgGetter: orgGetter,
|
||||
RuleStore: ruleStore,
|
||||
MaintenanceStore: maintenanceStore,
|
||||
SqlStore: sqlstore,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -40,7 +40,7 @@ var IsDotMetricsEnabled = false
|
||||
var IsPreferSpanMetrics = false
|
||||
|
||||
func init() {
|
||||
if GetOrDefaultEnv(DotMetricsEnabled, "true") == "true" {
|
||||
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
|
||||
IsDotMetricsEnabled = true
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
@@ -57,7 +57,7 @@ func Unauthorized(err error) *ApiError {
|
||||
func BadRequestStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorBadData,
|
||||
Err: errors.New(s),
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ func InternalError(err error) *ApiError {
|
||||
func InternalErrorStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorInternal,
|
||||
Err: errors.New(s),
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -35,6 +35,7 @@ import (
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -166,9 +167,16 @@ func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.
|
||||
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
|
||||
)
|
||||
|
||||
st, en := r.Timestamps(ts)
|
||||
start := st.UnixMilli()
|
||||
end := en.UnixMilli()
|
||||
start := ts.Add(-time.Duration(r.EvalWindow())).UnixMilli()
|
||||
end := ts.UnixMilli()
|
||||
|
||||
if r.EvalDelay() > 0 {
|
||||
start = start - int64(r.EvalDelay().Milliseconds())
|
||||
end = end - int64(r.EvalDelay().Milliseconds())
|
||||
}
|
||||
// round to minute otherwise we could potentially miss data
|
||||
start = start - (start % (60 * 1000))
|
||||
end = end - (end % (60 * 1000))
|
||||
|
||||
compositeQuery := r.Condition().CompositeQuery
|
||||
|
||||
@@ -203,8 +211,7 @@ func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*q
|
||||
},
|
||||
NoCache: true,
|
||||
}
|
||||
req.CompositeQuery.Queries = make([]qbtypes.QueryEnvelope, len(r.Condition().CompositeQuery.Queries))
|
||||
copy(req.CompositeQuery.Queries, r.Condition().CompositeQuery.Queries)
|
||||
copy(r.Condition().CompositeQuery.Queries, req.CompositeQuery.Queries)
|
||||
return req, nil
|
||||
}
|
||||
|
||||
@@ -245,17 +252,10 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if r.Condition() != nil && r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.Condition().RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
smpl, shouldAlert := r.ShouldAlert(*series)
|
||||
if shouldAlert {
|
||||
resultVector = append(resultVector, smpl)
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, results...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
@@ -295,17 +295,10 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.Condition().RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
smpl, shouldAlert := r.ShouldAlert(*series)
|
||||
if shouldAlert {
|
||||
resultVector = append(resultVector, smpl)
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, results...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
@@ -505,7 +498,7 @@ func (r *AnomalyRule) String() string {
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
}
|
||||
|
||||
byt, err := json.Marshal(ar)
|
||||
byt, err := yaml.Marshal(ar)
|
||||
if err != nil {
|
||||
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
|
||||
}
|
||||
|
||||
@@ -3,10 +3,8 @@ package rules
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
@@ -22,10 +20,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
var task baserules.Task
|
||||
|
||||
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
|
||||
evaluation, err := opts.Rule.Evaluation.GetEvaluation()
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err)
|
||||
}
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
@@ -46,7 +40,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
@@ -68,7 +62,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evalution
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
@@ -90,7 +84,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
|
||||
@@ -1,484 +0,0 @@
|
||||
# Persona
|
||||
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
|
||||
|
||||
# Auto-detect TypeScript Usage
|
||||
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
|
||||
Adjust syntax based on this detection.
|
||||
|
||||
# TypeScript Type Safety for Jest Tests
|
||||
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
|
||||
|
||||
**Type Safety Requirements:**
|
||||
- Use proper TypeScript interfaces for all mock data
|
||||
- Type all Jest mock functions with `jest.MockedFunction<T>`
|
||||
- Use generic types for React components and hooks
|
||||
- Define proper return types for mock functions
|
||||
- Use `as const` for literal types when needed
|
||||
- Avoid `any` type – use proper typing instead
|
||||
|
||||
# Unit Testing Focus
|
||||
Focus on critical functionality (business logic, utility functions, component behavior)
|
||||
Mock dependencies (API calls, external modules) before imports
|
||||
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
|
||||
Write maintainable tests with descriptive names grouped in describe blocks
|
||||
|
||||
# Global vs Local Mocks
|
||||
**Use Global Mocks for:**
|
||||
- High-frequency dependencies (20+ test files)
|
||||
- Core infrastructure (react-router-dom, react-query, antd)
|
||||
- Standard implementations across the app
|
||||
- Browser APIs (ResizeObserver, matchMedia, localStorage)
|
||||
- Utility libraries (date-fns, lodash)
|
||||
|
||||
**Use Local Mocks for:**
|
||||
- Business logic dependencies (5-15 test files)
|
||||
- Test-specific behavior (different data per test)
|
||||
- API endpoints with specific responses
|
||||
- Domain-specific components
|
||||
- Error scenarios and edge cases
|
||||
|
||||
**Global Mock Files Available (from jest.config.ts):**
|
||||
- `uplot` → `__mocks__/uplotMock.ts`
|
||||
|
||||
# Repo-specific Testing Conventions
|
||||
|
||||
## Imports
|
||||
Always import from our harness:
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
```
|
||||
For API mocks:
|
||||
```ts
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
```
|
||||
Do not import directly from `@testing-library/react`.
|
||||
|
||||
## Router
|
||||
Use the router built into render:
|
||||
```ts
|
||||
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
|
||||
```
|
||||
Only mock `useLocation` / `useParams` if the test depends on them.
|
||||
|
||||
## Hook Mocks
|
||||
Pattern:
|
||||
```ts
|
||||
import useFoo from 'hooks/useFoo';
|
||||
jest.mock('hooks/useFoo');
|
||||
const mockUseFoo = jest.mocked(useFoo);
|
||||
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
|
||||
```
|
||||
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
|
||||
|
||||
## MSW
|
||||
Global MSW server runs automatically.
|
||||
Override per-test:
|
||||
```ts
|
||||
server.use(
|
||||
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
|
||||
);
|
||||
```
|
||||
Keep large responses in `mocks-server/__mockdata_`.
|
||||
|
||||
## Interactions
|
||||
- Prefer `userEvent` for real user interactions (click, type, select, tab).
|
||||
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
|
||||
- Always await interactions:
|
||||
```ts
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
await user.click(screen.getByRole('button', { name: /save/i }));
|
||||
```
|
||||
|
||||
```ts
|
||||
// Example: virtualized list scroll (no userEvent helper)
|
||||
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
|
||||
scroller.scrollTop = targetScrollTop;
|
||||
act(() => { fireEvent.scroll(scroller); });
|
||||
```
|
||||
|
||||
## Timers
|
||||
❌ No global fake timers.
|
||||
✅ Per-test only, for debounce/throttle:
|
||||
```ts
|
||||
jest.useFakeTimers();
|
||||
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
|
||||
await user.type(screen.getByRole('textbox'), 'query');
|
||||
jest.advanceTimersByTime(400);
|
||||
jest.useRealTimers();
|
||||
```
|
||||
|
||||
## Queries
|
||||
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
|
||||
Fallback: visible text.
|
||||
Last resort: `data-testid`.
|
||||
|
||||
# Example Test (using only configured global mocks)
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
import MyComponent from '../MyComponent';
|
||||
|
||||
describe('MyComponent', () => {
|
||||
it('renders and interacts', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
|
||||
);
|
||||
|
||||
render(<MyComponent />, undefined, { initialRoute: '/foo' });
|
||||
|
||||
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
|
||||
await user.click(screen.getByRole('button', { name: /refresh/i }));
|
||||
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
# Anti-patterns
|
||||
❌ Importing RTL directly
|
||||
❌ Using global fake timers
|
||||
❌ Wrapping render in `act(...)`
|
||||
❌ Mocking infra dependencies locally (router, react-query)
|
||||
✅ Use our harness (`tests/test-utils`)
|
||||
✅ Use MSW for API overrides
|
||||
✅ Use userEvent + await
|
||||
✅ Pin time only in tests that assert relative dates
|
||||
|
||||
# Best Practices
|
||||
- **Critical Functionality**: Prioritize testing business logic and utilities
|
||||
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
|
||||
- **Data Scenarios**: Always test valid, invalid, and edge cases
|
||||
- **Descriptive Names**: Make test intent clear
|
||||
- **Organization**: Group related tests in describe
|
||||
- **Consistency**: Match repo conventions
|
||||
- **Edge Cases**: Test null, undefined, unexpected values
|
||||
- **Limit Scope**: 3–5 focused tests per file
|
||||
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
|
||||
- **No Any**: Enforce type safety
|
||||
|
||||
# Example Test
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
import MyComponent from '../MyComponent';
|
||||
|
||||
describe('MyComponent', () => {
|
||||
it('renders and interacts', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
|
||||
);
|
||||
|
||||
render(<MyComponent />, undefined, { initialRoute: '/foo' });
|
||||
|
||||
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
|
||||
await user.click(screen.getByRole('button', { name: /refresh/i }));
|
||||
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
# Anti-patterns
|
||||
❌ Importing RTL directly
|
||||
❌ Using global fake timers
|
||||
❌ Wrapping render in `act(...)`
|
||||
❌ Mocking infra dependencies locally (router, react-query)
|
||||
✅ Use our harness (`tests/test-utils`)
|
||||
✅ Use MSW for API overrides
|
||||
✅ Use userEvent + await
|
||||
✅ Pin time only in tests that assert relative dates
|
||||
|
||||
# TypeScript Type Safety Examples
|
||||
|
||||
## Proper Mock Typing
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed mocks
|
||||
interface User {
|
||||
id: number;
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
interface ApiResponse<T> {
|
||||
data: T;
|
||||
status: number;
|
||||
message: string;
|
||||
}
|
||||
|
||||
// Type the mock functions
|
||||
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
|
||||
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
|
||||
|
||||
// Mock implementation with proper typing
|
||||
mockFetchUser.mockResolvedValue({
|
||||
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
|
||||
status: 200,
|
||||
message: 'Success'
|
||||
});
|
||||
|
||||
// ❌ BAD - Using any type
|
||||
const mockFetchUser = jest.fn() as any; // Don't do this
|
||||
```
|
||||
|
||||
## React Component Testing with Types
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed component testing
|
||||
interface ComponentProps {
|
||||
title: string;
|
||||
data: User[];
|
||||
onUserSelect: (user: User) => void;
|
||||
isLoading?: boolean;
|
||||
}
|
||||
|
||||
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
|
||||
// Component implementation
|
||||
};
|
||||
|
||||
describe('TestComponent', () => {
|
||||
it('should render with proper props', () => {
|
||||
// Arrange - Type the props properly
|
||||
const mockProps: ComponentProps = {
|
||||
title: 'Test Title',
|
||||
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
|
||||
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
|
||||
isLoading: false
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<TestComponent {...mockProps} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Test Title')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Hook Testing with Types
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed hook testing
|
||||
interface UseUserDataReturn {
|
||||
user: User | null;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
const useUserData = (id: number): UseUserDataReturn => {
|
||||
// Hook implementation
|
||||
};
|
||||
|
||||
describe('useUserData', () => {
|
||||
it('should return user data with proper typing', () => {
|
||||
// Arrange
|
||||
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
|
||||
mockFetchUser.mockResolvedValue({
|
||||
data: mockUser,
|
||||
status: 200,
|
||||
message: 'Success'
|
||||
});
|
||||
|
||||
// Act
|
||||
const { result } = renderHook(() => useUserData(1));
|
||||
|
||||
// Assert
|
||||
expect(result.current.user).toEqual(mockUser);
|
||||
expect(result.current.loading).toBe(false);
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Global Mock Type Safety
|
||||
```ts
|
||||
// ✅ GOOD - Type-safe global mocks
|
||||
// In __mocks__/routerMock.ts
|
||||
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
|
||||
pathname: '/traces',
|
||||
search: '',
|
||||
hash: '',
|
||||
state: null,
|
||||
key: 'test-key',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
// In test files
|
||||
const location = useLocation(); // Properly typed from global mock
|
||||
expect(location.pathname).toBe('/traces');
|
||||
```
|
||||
|
||||
# TypeScript Configuration for Jest
|
||||
|
||||
## Required Jest Configuration
|
||||
```json
|
||||
// jest.config.ts
|
||||
{
|
||||
"preset": "ts-jest/presets/js-with-ts-esm",
|
||||
"globals": {
|
||||
"ts-jest": {
|
||||
"useESM": true,
|
||||
"isolatedModules": true,
|
||||
"tsconfig": "<rootDir>/tsconfig.jest.json"
|
||||
}
|
||||
},
|
||||
"extensionsToTreatAsEsm": [".ts", ".tsx"],
|
||||
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
|
||||
}
|
||||
```
|
||||
|
||||
## TypeScript Jest Configuration
|
||||
```json
|
||||
// tsconfig.jest.json
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": ["jest", "@testing-library/jest-dom"],
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"moduleResolution": "node"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*",
|
||||
"**/*.test.ts",
|
||||
"**/*.test.tsx",
|
||||
"__mocks__/**/*"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Common Type Safety Patterns
|
||||
|
||||
### Mock Function Typing
|
||||
```ts
|
||||
// ✅ GOOD - Proper mock function typing
|
||||
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
|
||||
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
|
||||
|
||||
// ❌ BAD - Using any
|
||||
const mockApiCall = jest.fn() as any;
|
||||
```
|
||||
|
||||
### Generic Mock Typing
|
||||
```ts
|
||||
// ✅ GOOD - Generic mock typing
|
||||
interface MockApiResponse<T> {
|
||||
data: T;
|
||||
status: number;
|
||||
}
|
||||
|
||||
const mockFetchData = jest.fn() as jest.MockedFunction<
|
||||
<T>(endpoint: string) => Promise<MockApiResponse<T>>
|
||||
>;
|
||||
|
||||
// Usage
|
||||
mockFetchData<User>('/users').mockResolvedValue({
|
||||
data: { id: 1, name: 'John' },
|
||||
status: 200
|
||||
});
|
||||
```
|
||||
|
||||
### React Testing Library with Types
|
||||
```ts
|
||||
// ✅ GOOD - Typed testing utilities
|
||||
import { render, screen, RenderResult } from '@testing-library/react';
|
||||
import { ComponentProps } from 'react';
|
||||
|
||||
type TestComponentProps = ComponentProps<typeof TestComponent>;
|
||||
|
||||
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
|
||||
const defaultProps: TestComponentProps = {
|
||||
title: 'Test',
|
||||
data: [],
|
||||
onSelect: jest.fn(),
|
||||
...props
|
||||
};
|
||||
|
||||
return render(<TestComponent {...defaultProps} />);
|
||||
};
|
||||
```
|
||||
|
||||
### Error Handling with Types
|
||||
```ts
|
||||
// ✅ GOOD - Typed error handling
|
||||
interface ApiError {
|
||||
message: string;
|
||||
code: number;
|
||||
details?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
const mockApiError: ApiError = {
|
||||
message: 'API Error',
|
||||
code: 500,
|
||||
details: { endpoint: '/users' }
|
||||
};
|
||||
|
||||
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
|
||||
```
|
||||
|
||||
## Type Safety Checklist
|
||||
- [ ] All mock functions use `jest.MockedFunction<T>`
|
||||
- [ ] All mock data has proper interfaces
|
||||
- [ ] No `any` types in test files
|
||||
- [ ] Generic types are used where appropriate
|
||||
- [ ] Error types are properly defined
|
||||
- [ ] Component props are typed
|
||||
- [ ] Hook return types are defined
|
||||
- [ ] API response types are defined
|
||||
- [ ] Global mocks are type-safe
|
||||
- [ ] Test utilities are properly typed
|
||||
|
||||
# Mock Decision Tree
|
||||
```
|
||||
Is it used in 20+ test files?
|
||||
├─ YES → Use Global Mock
|
||||
│ ├─ react-router-dom
|
||||
│ ├─ react-query
|
||||
│ ├─ antd components
|
||||
│ └─ browser APIs
|
||||
│
|
||||
└─ NO → Is it business logic?
|
||||
├─ YES → Use Local Mock
|
||||
│ ├─ API endpoints
|
||||
│ ├─ Custom hooks
|
||||
│ └─ Domain components
|
||||
│
|
||||
└─ NO → Is it test-specific?
|
||||
├─ YES → Use Local Mock
|
||||
│ ├─ Error scenarios
|
||||
│ ├─ Loading states
|
||||
│ └─ Specific data
|
||||
│
|
||||
└─ NO → Consider Global Mock
|
||||
└─ If it becomes frequently used
|
||||
```
|
||||
|
||||
# Common Anti-Patterns to Avoid
|
||||
|
||||
❌ **Don't mock global dependencies locally:**
|
||||
```js
|
||||
// BAD - This is already globally mocked
|
||||
jest.mock('react-router-dom', () => ({ ... }));
|
||||
```
|
||||
|
||||
❌ **Don't create global mocks for test-specific data:**
|
||||
```js
|
||||
// BAD - This should be local
|
||||
jest.mock('../api/tracesService', () => ({
|
||||
getTraces: jest.fn(() => specificTestData)
|
||||
}));
|
||||
```
|
||||
|
||||
✅ **Do use global mocks for infrastructure:**
|
||||
```js
|
||||
// GOOD - Use global mock
|
||||
import { useLocation } from 'react-router-dom';
|
||||
```
|
||||
|
||||
✅ **Do create local mocks for business logic:**
|
||||
```js
|
||||
// GOOD - Local mock for specific test needs
|
||||
jest.mock('../api/tracesService', () => ({
|
||||
getTraces: jest.fn(() => mockTracesData)
|
||||
}));
|
||||
```
|
||||
@@ -1,5 +1,4 @@
|
||||
node_modules
|
||||
build
|
||||
*.typegen.ts
|
||||
i18-generate-hash.js
|
||||
src/parser/TraceOperatorParser/**
|
||||
i18-generate-hash.js
|
||||
@@ -10,6 +10,4 @@ public/
|
||||
**/*.json
|
||||
|
||||
# Ignore all files in parser folder:
|
||||
src/parser/**
|
||||
|
||||
src/TraceOperator/parser/**
|
||||
src/parser/**
|
||||
@@ -1,51 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
|
||||
// Mock for uplot library used in tests
|
||||
export interface MockUPlotInstance {
|
||||
setData: jest.Mock;
|
||||
setSize: jest.Mock;
|
||||
destroy: jest.Mock;
|
||||
redraw: jest.Mock;
|
||||
setSeries: jest.Mock;
|
||||
}
|
||||
|
||||
export interface MockUPlotPaths {
|
||||
spline: jest.Mock;
|
||||
bars: jest.Mock;
|
||||
}
|
||||
|
||||
// Create mock instance methods
|
||||
const createMockUPlotInstance = (): MockUPlotInstance => ({
|
||||
setData: jest.fn(),
|
||||
setSize: jest.fn(),
|
||||
destroy: jest.fn(),
|
||||
redraw: jest.fn(),
|
||||
setSeries: jest.fn(),
|
||||
});
|
||||
|
||||
// Create mock paths
|
||||
const mockPaths: MockUPlotPaths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
|
||||
// Mock static methods
|
||||
const mockTzDate = jest.fn(
|
||||
(date: Date, _timezone: string) => new Date(date.getTime()),
|
||||
);
|
||||
|
||||
// Mock uPlot constructor - this needs to be a proper constructor function
|
||||
function MockUPlot(
|
||||
_options: unknown,
|
||||
_data: unknown,
|
||||
_target: HTMLElement,
|
||||
): MockUPlotInstance {
|
||||
return createMockUPlotInstance();
|
||||
}
|
||||
|
||||
// Add static methods to the constructor
|
||||
MockUPlot.tzDate = mockTzDate;
|
||||
MockUPlot.paths = mockPaths;
|
||||
|
||||
// Export the constructor as default
|
||||
export default MockUPlot;
|
||||
@@ -1,29 +0,0 @@
|
||||
// Mock for useSafeNavigate hook to avoid React Router version conflicts in tests
|
||||
interface SafeNavigateOptions {
|
||||
replace?: boolean;
|
||||
state?: unknown;
|
||||
}
|
||||
|
||||
interface SafeNavigateTo {
|
||||
pathname?: string;
|
||||
search?: string;
|
||||
hash?: string;
|
||||
}
|
||||
|
||||
type SafeNavigateToType = string | SafeNavigateTo;
|
||||
|
||||
interface UseSafeNavigateReturn {
|
||||
safeNavigate: jest.MockedFunction<
|
||||
(to: SafeNavigateToType, options?: SafeNavigateOptions) => void
|
||||
>;
|
||||
}
|
||||
|
||||
export const useSafeNavigate = (): UseSafeNavigateReturn => ({
|
||||
safeNavigate: jest.fn(
|
||||
(to: SafeNavigateToType, options?: SafeNavigateOptions) => {
|
||||
console.log(`Mock safeNavigate called with:`, to, options);
|
||||
},
|
||||
) as jest.MockedFunction<
|
||||
(to: SafeNavigateToType, options?: SafeNavigateOptions) => void
|
||||
>,
|
||||
});
|
||||
@@ -1,7 +1,5 @@
|
||||
import type { Config } from '@jest/types';
|
||||
|
||||
const USE_SAFE_NAVIGATE_MOCK_PATH = '<rootDir>/__mocks__/useSafeNavigate.ts';
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
clearMocks: true,
|
||||
coverageDirectory: 'coverage',
|
||||
@@ -12,17 +10,12 @@ const config: Config.InitialOptions = {
|
||||
moduleNameMapper: {
|
||||
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
|
||||
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
|
||||
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
|
||||
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
},
|
||||
globals: {
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
'ts-jest': {
|
||||
useESM: true,
|
||||
isolatedModules: true,
|
||||
tsconfig: '<rootDir>/tsconfig.jest.json',
|
||||
},
|
||||
},
|
||||
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
|
||||
@@ -32,7 +25,7 @@ const config: Config.InitialOptions = {
|
||||
'^.+\\.(js|jsx)$': 'babel-jest',
|
||||
},
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|@signozhq/table|@signozhq/calendar|@signozhq/input|@signozhq/popover|@signozhq/button|@signozhq/sonner|@signozhq/*|date-fns|d3-interpolate|d3-color|api|@codemirror|@lezer|@marijn)/)',
|
||||
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color|api|@codemirror|@lezer|@marijn)/)',
|
||||
],
|
||||
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
|
||||
testPathIgnorePatterns: ['/node_modules/', '/public/'],
|
||||
|
||||
@@ -43,21 +43,11 @@
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "8.41.0",
|
||||
"@sentry/webpack-plugin": "2.22.6",
|
||||
"@signozhq/badge": "0.0.2",
|
||||
"@signozhq/button": "0.0.2",
|
||||
"@signozhq/calendar": "0.0.0",
|
||||
"@signozhq/callout": "0.0.2",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@signozhq/input": "0.0.2",
|
||||
"@signozhq/popover": "0.0.0",
|
||||
"@signozhq/resizable": "0.0.0",
|
||||
"@signozhq/sonner": "0.1.0",
|
||||
"@signozhq/table": "0.3.7",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
"@uiw/codemirror-theme-github": "4.24.1",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
"@uiw/react-codemirror": "4.23.10",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
@@ -102,7 +92,6 @@
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
"jest": "^27.5.1",
|
||||
"js-base64": "^3.7.2",
|
||||
"kbar": "0.1.0-beta.48",
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
@@ -139,7 +128,6 @@
|
||||
"redux": "^4.0.5",
|
||||
"redux-thunk": "^2.3.0",
|
||||
"rehype-raw": "7.0.0",
|
||||
"rrule": "2.8.1",
|
||||
"stream": "^0.0.2",
|
||||
"style-loader": "1.3.0",
|
||||
"styled-components": "^5.3.11",
|
||||
@@ -278,7 +266,6 @@
|
||||
"serialize-javascript": "6.0.2",
|
||||
"prismjs": "1.30.0",
|
||||
"got": "11.8.5",
|
||||
"form-data": "4.0.4",
|
||||
"brace-expansion": "^2.0.2"
|
||||
"form-data": "4.0.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,8 +46,5 @@
|
||||
"ALERT_HISTORY": "SigNoz | Alert Rule History",
|
||||
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring"
|
||||
}
|
||||
|
||||
@@ -69,8 +69,5 @@
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | External APIs",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
"API_MONITORING": "SigNoz | External APIs"
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import AppLoading from 'components/AppLoading/AppLoading';
|
||||
import KBarCommandPalette from 'components/KBarCommandPalette/KBarCommandPalette';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
|
||||
@@ -26,7 +25,6 @@ import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
import { KBarCommandPaletteProvider } from 'providers/KBarCommandPaletteProvider';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
@@ -370,42 +368,39 @@ function App(): JSX.Element {
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<KBarCommandPaletteProvider>
|
||||
<UserpilotRouteTracker />
|
||||
<KBarCommandPalette />
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</ErrorModalProvider>
|
||||
</NotificationProvider>
|
||||
</KBarCommandPaletteProvider>
|
||||
<UserpilotRouteTracker />
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</ErrorModalProvider>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import MessagingQueues from 'pages/MessagingQueues';
|
||||
import MeterExplorer from 'pages/MeterExplorer';
|
||||
import { RouteProps } from 'react-router-dom';
|
||||
|
||||
import {
|
||||
@@ -435,28 +434,6 @@ const routes: AppRoutes[] = [
|
||||
key: 'METRICS_EXPLORER_VIEWS',
|
||||
isPrivate: true,
|
||||
},
|
||||
|
||||
{
|
||||
path: ROUTES.METER,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
key: 'METER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.METER_EXPLORER,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
key: 'METER_EXPLORER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.METER_EXPLORER_VIEWS,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
key: 'METER_EXPLORER_VIEWS',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.API_MONITORING,
|
||||
exact: true,
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import { ApiV5Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { QueryRangePayloadV5 } from 'api/v5/v5';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery';
|
||||
|
||||
interface ISubstituteVars {
|
||||
compositeQuery: ICompositeMetricQuery;
|
||||
}
|
||||
|
||||
export const getSubstituteVars = async (
|
||||
props?: Partial<QueryRangePayloadV5>,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<ISubstituteVars>> => {
|
||||
try {
|
||||
const response = await ApiV5Instance.post<{ data: ISubstituteVars }>(
|
||||
'/substitute_vars',
|
||||
props,
|
||||
{
|
||||
signal,
|
||||
headers,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
@@ -1,115 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
|
||||
import { getFieldKeys } from '../getFieldKeys';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('getFieldKeys API', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
const mockSuccessResponse = {
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
keys: {
|
||||
'service.name': [],
|
||||
'http.status_code': [],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('should call API with correct parameters when no args provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with no parameters
|
||||
await getFieldKeys();
|
||||
|
||||
// Verify API was called correctly with empty params object
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with signal parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call function with signal parameter
|
||||
await getFieldKeys('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with name parameter when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
keys: { service: [] },
|
||||
complete: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Call function with name parameter
|
||||
await getFieldKeys(undefined, 'service');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call API with both signal and name when provided', async () => {
|
||||
// Mock successful API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
keys: { service: [] },
|
||||
complete: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Call function with both parameters
|
||||
await getFieldKeys('logs', 'service');
|
||||
|
||||
// Verify API was called with both parameters
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
|
||||
params: { signal: 'logs', name: 'service' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should return properly formatted response', async () => {
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldKeys('traces');
|
||||
|
||||
// Verify the returned structure matches SuccessResponseV2 format
|
||||
expect(result).toEqual({
|
||||
httpStatusCode: 200,
|
||||
data: mockSuccessResponse.data.data,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,214 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
|
||||
import { getFieldValues } from '../getFieldValues';
|
||||
|
||||
// Mock the API instance
|
||||
jest.mock('api', () => ({
|
||||
ApiBaseInstance: {
|
||||
get: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('getFieldValues API', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should call the API with correct parameters (no options)', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
values: {
|
||||
stringValues: ['frontend', 'backend'],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Call function without parameters
|
||||
await getFieldValues();
|
||||
|
||||
// Verify API was called correctly with empty params
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with signal parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
values: {
|
||||
stringValues: ['frontend', 'backend'],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Call function with signal parameter
|
||||
await getFieldValues('traces');
|
||||
|
||||
// Verify API was called with signal parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { signal: 'traces' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with name parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
values: {
|
||||
stringValues: ['frontend', 'backend'],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Call function with name parameter
|
||||
await getFieldValues(undefined, 'service.name');
|
||||
|
||||
// Verify API was called with name parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with value parameter', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
values: {
|
||||
stringValues: ['frontend'],
|
||||
},
|
||||
complete: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Call function with value parameter
|
||||
await getFieldValues(undefined, 'service.name', 'front');
|
||||
|
||||
// Verify API was called with value parameter
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: { name: 'service.name', searchText: 'front' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call the API with time range parameters', async () => {
|
||||
// Mock API response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
values: {
|
||||
stringValues: ['frontend', 'backend'],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Call function with time range parameters
|
||||
const startUnixMilli = 1625097600000000; // Note: nanoseconds
|
||||
const endUnixMilli = 1625184000000000;
|
||||
await getFieldValues(
|
||||
'logs',
|
||||
'service.name',
|
||||
undefined,
|
||||
startUnixMilli,
|
||||
endUnixMilli,
|
||||
);
|
||||
|
||||
// Verify API was called with time range parameters (converted to milliseconds)
|
||||
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
|
||||
params: {
|
||||
signal: 'logs',
|
||||
name: 'service.name',
|
||||
startUnixMilli: '1625097600', // Should be converted to seconds (divided by 1000000)
|
||||
endUnixMilli: '1625184000', // Should be converted to seconds (divided by 1000000)
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should normalize the response values', async () => {
|
||||
// Mock API response with multiple value types
|
||||
const mockResponse = {
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
values: {
|
||||
stringValues: ['frontend', 'backend'],
|
||||
numberValues: [200, 404],
|
||||
boolValues: [true, false],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'mixed.values');
|
||||
|
||||
// Verify the response has normalized values array
|
||||
expect(result.data?.normalizedValues).toContain('frontend');
|
||||
expect(result.data?.normalizedValues).toContain('backend');
|
||||
expect(result.data?.normalizedValues).toContain('200');
|
||||
expect(result.data?.normalizedValues).toContain('404');
|
||||
expect(result.data?.normalizedValues).toContain('true');
|
||||
expect(result.data?.normalizedValues).toContain('false');
|
||||
expect(result.data?.normalizedValues?.length).toBe(6);
|
||||
});
|
||||
|
||||
it('should return a properly formatted success response', async () => {
|
||||
// Create mock response
|
||||
const mockApiResponse = {
|
||||
status: 200,
|
||||
data: {
|
||||
status: 'success',
|
||||
data: {
|
||||
values: {
|
||||
stringValues: ['frontend', 'backend'],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Mock API to return our response
|
||||
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
|
||||
|
||||
// Call the function
|
||||
const result = await getFieldValues('traces', 'service.name');
|
||||
|
||||
// Verify the returned structure matches SuccessResponseV2 format
|
||||
expect(result).toEqual({
|
||||
httpStatusCode: 200,
|
||||
data: expect.objectContaining({
|
||||
values: expect.any(Object),
|
||||
normalizedValues: expect.any(Array),
|
||||
complete: true,
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,38 +0,0 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { FieldKeyResponse } from 'types/api/dynamicVariables/getFieldKeys';
|
||||
|
||||
/**
|
||||
* Get field keys for a given signal type
|
||||
* @param signal Type of signal (traces, logs, metrics)
|
||||
* @param name Optional search text
|
||||
*/
|
||||
export const getFieldKeys = async (
|
||||
signal?: 'traces' | 'logs' | 'metrics',
|
||||
name?: string,
|
||||
): Promise<SuccessResponseV2<FieldKeyResponse>> => {
|
||||
const params: Record<string, string> = {};
|
||||
|
||||
if (signal) {
|
||||
params.signal = encodeURIComponent(signal);
|
||||
}
|
||||
|
||||
if (name) {
|
||||
params.name = encodeURIComponent(name);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/keys', { params });
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getFieldKeys;
|
||||
@@ -1,87 +0,0 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
|
||||
|
||||
/**
|
||||
* Get field values for a given signal type and field name
|
||||
* @param signal Type of signal (traces, logs, metrics)
|
||||
* @param name Name of the attribute for which values are being fetched
|
||||
* @param value Optional search text
|
||||
* @param existingQuery Optional existing query - across all present dynamic variables
|
||||
*/
|
||||
export const getFieldValues = async (
|
||||
signal?: 'traces' | 'logs' | 'metrics',
|
||||
name?: string,
|
||||
searchText?: string,
|
||||
startUnixMilli?: number,
|
||||
endUnixMilli?: number,
|
||||
existingQuery?: string,
|
||||
): Promise<SuccessResponseV2<FieldValueResponse>> => {
|
||||
const params: Record<string, string> = {};
|
||||
|
||||
if (signal) {
|
||||
params.signal = encodeURIComponent(signal);
|
||||
}
|
||||
|
||||
if (name) {
|
||||
params.name = encodeURIComponent(name);
|
||||
}
|
||||
|
||||
if (searchText) {
|
||||
params.searchText = encodeURIComponent(searchText);
|
||||
}
|
||||
|
||||
if (startUnixMilli) {
|
||||
params.startUnixMilli = Math.floor(startUnixMilli / 1000000).toString();
|
||||
}
|
||||
|
||||
if (endUnixMilli) {
|
||||
params.endUnixMilli = Math.floor(endUnixMilli / 1000000).toString();
|
||||
}
|
||||
|
||||
if (existingQuery) {
|
||||
params.existingQuery = existingQuery;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await ApiBaseInstance.get('/fields/values', { params });
|
||||
|
||||
// Normalize values from different types (stringValues, boolValues, etc.)
|
||||
if (response.data?.data?.values) {
|
||||
const allValues: string[] = [];
|
||||
Object.entries(response.data?.data?.values).forEach(
|
||||
([key, valueArray]: [string, any]) => {
|
||||
// Skip RelatedValues as they should be kept separate
|
||||
if (key === 'relatedValues') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (Array.isArray(valueArray)) {
|
||||
allValues.push(...valueArray.map(String));
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Add a normalized values array to the response
|
||||
response.data.data.normalizedValues = allValues;
|
||||
|
||||
// Add relatedValues to the response as per FieldValueResponse
|
||||
if (response.data?.data?.values?.relatedValues) {
|
||||
response.data.data.relatedValues =
|
||||
response.data?.data?.values?.relatedValues;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getFieldValues;
|
||||
@@ -2,7 +2,7 @@ import { ApiV3Instance, ApiV4Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ErrorResponse, SuccessResponse, Warning } from 'types/api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
MetricRangePayloadV3,
|
||||
QueryRangePayload,
|
||||
@@ -13,9 +13,7 @@ export const getMetricsQueryRange = async (
|
||||
version: string,
|
||||
signal: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<
|
||||
(SuccessResponse<MetricRangePayloadV3> & { warning?: Warning }) | ErrorResponse
|
||||
> => {
|
||||
): Promise<SuccessResponse<MetricRangePayloadV3> | ErrorResponse> => {
|
||||
try {
|
||||
if (version && version === ENTITY_VERSION_V4) {
|
||||
const response = await ApiV4Instance.post('/query_range', props, {
|
||||
|
||||
@@ -17,7 +17,6 @@ export const getAggregateAttribute = async ({
|
||||
aggregateOperator,
|
||||
searchText,
|
||||
dataSource,
|
||||
source,
|
||||
}: IGetAggregateAttributePayload): Promise<
|
||||
SuccessResponse<IQueryAutocompleteResponse> | ErrorResponse
|
||||
> => {
|
||||
@@ -28,7 +27,7 @@ export const getAggregateAttribute = async ({
|
||||
`/autocomplete/aggregate_attributes?${createQueryParams({
|
||||
aggregateOperator,
|
||||
searchText,
|
||||
dataSource: source === 'meter' ? 'meter' : dataSource,
|
||||
dataSource,
|
||||
})}`,
|
||||
);
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ export const getKeySuggestions = (
|
||||
metricName = '',
|
||||
fieldContext = '',
|
||||
fieldDataType = '',
|
||||
signalSource = '',
|
||||
} = props;
|
||||
|
||||
const encodedSignal = encodeURIComponent(signal);
|
||||
@@ -22,9 +21,8 @@ export const getKeySuggestions = (
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const encodedFieldContext = encodeURIComponent(fieldContext);
|
||||
const encodedFieldDataType = encodeURIComponent(fieldDataType);
|
||||
const encodedSource = encodeURIComponent(signalSource);
|
||||
|
||||
return axios.get(
|
||||
`/fields/keys?signal=${encodedSignal}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&fieldContext=${encodedFieldContext}&fieldDataType=${encodedFieldDataType}&source=${encodedSource}`,
|
||||
`/fields/keys?signal=${encodedSignal}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&fieldContext=${encodedFieldContext}&fieldDataType=${encodedFieldDataType}`,
|
||||
);
|
||||
};
|
||||
|
||||
@@ -8,15 +8,13 @@ import {
|
||||
export const getValueSuggestions = (
|
||||
props: QueryKeyValueRequestProps,
|
||||
): Promise<AxiosResponse<QueryKeyValueSuggestionsResponseProps>> => {
|
||||
const { signal, key, searchText, signalSource, metricName } = props;
|
||||
const { signal, key, searchText } = props;
|
||||
|
||||
const encodedSignal = encodeURIComponent(signal);
|
||||
const encodedKey = encodeURIComponent(key);
|
||||
const encodedMetricName = encodeURIComponent(metricName || '');
|
||||
const encodedSearchText = encodeURIComponent(searchText);
|
||||
const encodedSource = encodeURIComponent(signalSource || '');
|
||||
|
||||
return axios.get(
|
||||
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&source=${encodedSource}`,
|
||||
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}`,
|
||||
);
|
||||
};
|
||||
|
||||
@@ -4,6 +4,6 @@ import { AllViewsProps } from 'types/api/saveViews/types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export const getAllViews = (
|
||||
sourcepage: DataSource | 'meter',
|
||||
sourcepage: DataSource,
|
||||
): Promise<AxiosResponse<AllViewsProps>> =>
|
||||
axios.get(`/explorer/views?sourcePage=${sourcepage}`);
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps } from 'types/api/settings/getRetention';
|
||||
|
||||
// Only works for logs
|
||||
const getRetentionV2 = async (): Promise<
|
||||
SuccessResponseV2<PayloadProps<'logs'>>
|
||||
> => {
|
||||
try {
|
||||
const response = await ApiV2Instance.get<PayloadProps<'logs'>>(
|
||||
`/settings/ttl`,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getRetentionV2;
|
||||
@@ -1,14 +1,14 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadPropsV2, Props } from 'types/api/settings/setRetention';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/settings/setRetention';
|
||||
|
||||
const setRetention = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponseV2<PayloadPropsV2>> => {
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post<PayloadPropsV2>(
|
||||
const response = await axios.post<PayloadProps>(
|
||||
`/settings/ttl?duration=${props.totalDuration}&type=${props.type}${
|
||||
props.coldStorage
|
||||
? `&coldStorage=${props.coldStorage}&toColdDuration=${props.toColdDuration}`
|
||||
@@ -17,11 +17,13 @@ const setRetention = async (
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
import { ApiV2Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadPropsV2, PropsV2 } from 'types/api/settings/setRetention';
|
||||
|
||||
const setRetentionV2 = async ({
|
||||
type,
|
||||
defaultTTLDays,
|
||||
coldStorageVolume,
|
||||
coldStorageDuration,
|
||||
ttlConditions,
|
||||
}: PropsV2): Promise<SuccessResponseV2<PayloadPropsV2>> => {
|
||||
try {
|
||||
const response = await ApiV2Instance.post<PayloadPropsV2>(`/settings/ttl`, {
|
||||
type,
|
||||
defaultTTLDays,
|
||||
coldStorageVolume,
|
||||
coldStorageDuration,
|
||||
ttlConditions,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default setRetentionV2;
|
||||
@@ -1,31 +0,0 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/thirdPartyApis/listOverview';
|
||||
|
||||
const listOverview = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
const { start, end, show_ip: showIp, filter } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
`/third-party-apis/overview/list`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
show_ip: showIp,
|
||||
filter,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default listOverview;
|
||||
@@ -1,64 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import { ExportRawDataProps } from 'types/api/exportRawData/getExportRawData';
|
||||
|
||||
export const downloadExportData = async (
|
||||
props: ExportRawDataProps,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const queryParams = new URLSearchParams();
|
||||
|
||||
queryParams.append('start', String(props.start));
|
||||
queryParams.append('end', String(props.end));
|
||||
queryParams.append('filter', props.filter);
|
||||
props.columns.forEach((col) => {
|
||||
queryParams.append('columns', col);
|
||||
});
|
||||
queryParams.append('order_by', props.orderBy);
|
||||
queryParams.append('limit', String(props.limit));
|
||||
queryParams.append('format', props.format);
|
||||
|
||||
const response = await axios.get<Blob>(`export_raw_data?${queryParams}`, {
|
||||
responseType: 'blob', // Important: tell axios to handle response as blob
|
||||
decompress: true, // Enable automatic decompression
|
||||
headers: {
|
||||
Accept: 'application/octet-stream', // Tell server we expect binary data
|
||||
},
|
||||
timeout: 0,
|
||||
});
|
||||
|
||||
// Only proceed if the response status is 200
|
||||
if (response.status !== 200) {
|
||||
throw new Error(
|
||||
`Failed to download data: server returned status ${response.status}`,
|
||||
);
|
||||
}
|
||||
// Create blob URL from response data
|
||||
const blob = new Blob([response.data], { type: 'application/octet-stream' });
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
|
||||
// Create and configure download link
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
|
||||
// Get filename from Content-Disposition header or generate timestamped default
|
||||
const filename =
|
||||
response.headers['content-disposition']
|
||||
?.split('filename=')[1]
|
||||
?.replace(/["']/g, '') || `exported_data.${props.format || 'txt'}`;
|
||||
|
||||
link.setAttribute('download', filename);
|
||||
|
||||
// Trigger download
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
link.remove();
|
||||
URL.revokeObjectURL(url);
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default downloadExportData;
|
||||
@@ -2,7 +2,7 @@ import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { Props, Signup as PayloadProps } from 'types/api/user/loginPrecheck';
|
||||
import { PayloadProps, Props } from 'types/api/user/loginPrecheck';
|
||||
|
||||
const loginPrecheck = async (
|
||||
props: Props,
|
||||
|
||||
@@ -1,21 +1,25 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Signup } from 'types/api/user/loginPrecheck';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps } from 'types/api/user/loginPrecheck';
|
||||
import { Props } from 'types/api/user/signup';
|
||||
|
||||
const signup = async (props: Props): Promise<SuccessResponseV2<Signup>> => {
|
||||
const signup = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<null | PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.post<PayloadProps>(`/register`, {
|
||||
const response = await axios.post(`/register`, {
|
||||
...props,
|
||||
});
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data?.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -124,7 +124,7 @@ export const FUNCTION_NAMES: Record<string, FunctionName> = {
|
||||
RUNNING_DIFF: 'runningDiff',
|
||||
LOG2: 'log2',
|
||||
LOG10: 'log10',
|
||||
CUM_SUM: 'cumulativeSum',
|
||||
CUM_SUM: 'cumSum',
|
||||
EWMA3: 'ewma3',
|
||||
EWMA5: 'ewma5',
|
||||
EWMA7: 'ewma7',
|
||||
|
||||
@@ -1,284 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
MetricRangePayloadV5,
|
||||
QueryBuilderFormula,
|
||||
QueryRangeRequestV5,
|
||||
QueryRangeResponseV5,
|
||||
RequestType,
|
||||
ScalarData,
|
||||
TelemetryFieldKey,
|
||||
TimeSeries,
|
||||
TimeSeriesData,
|
||||
TimeSeriesValue,
|
||||
} from 'types/api/v5/queryRange';
|
||||
|
||||
import { convertV5ResponseToLegacy } from './convertV5Response';
|
||||
|
||||
describe('convertV5ResponseToLegacy', () => {
|
||||
function makeBaseSuccess<T>(
|
||||
payload: T,
|
||||
params: QueryRangeRequestV5,
|
||||
): SuccessResponse<T, QueryRangeRequestV5> {
|
||||
return {
|
||||
statusCode: 200,
|
||||
message: 'success',
|
||||
payload,
|
||||
error: null,
|
||||
params,
|
||||
};
|
||||
}
|
||||
|
||||
function makeBaseParams(
|
||||
requestType: RequestType,
|
||||
queries: QueryRangeRequestV5['compositeQuery']['queries'],
|
||||
): QueryRangeRequestV5 {
|
||||
return {
|
||||
schemaVersion: 'v1',
|
||||
start: 1,
|
||||
end: 2,
|
||||
requestType,
|
||||
compositeQuery: { queries },
|
||||
variables: {},
|
||||
formatOptions: { formatTableResultForUI: false, fillGaps: false },
|
||||
};
|
||||
}
|
||||
|
||||
it('converts time_series response into legacy series structure', () => {
|
||||
const timeSeries: TimeSeriesData = {
|
||||
queryName: 'A',
|
||||
aggregations: [
|
||||
{
|
||||
index: 0,
|
||||
alias: '__result_0',
|
||||
meta: {},
|
||||
series: [
|
||||
({
|
||||
labels: [
|
||||
{
|
||||
key: ({ name: 'service.name' } as unknown) as TelemetryFieldKey,
|
||||
value: 'adservice',
|
||||
},
|
||||
],
|
||||
values: [
|
||||
({ timestamp: 1000, value: 10 } as unknown) as TimeSeriesValue,
|
||||
({ timestamp: 2000, value: 12 } as unknown) as TimeSeriesValue,
|
||||
],
|
||||
} as unknown) as TimeSeries,
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'time_series',
|
||||
data: { results: [timeSeries] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('time_series', [
|
||||
{
|
||||
type: 'builder_query',
|
||||
spec: {
|
||||
name: 'A',
|
||||
signal: 'traces',
|
||||
stepInterval: 60,
|
||||
disabled: false,
|
||||
aggregations: [{ expression: 'count()' }],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const input: SuccessResponse<
|
||||
MetricRangePayloadV5,
|
||||
QueryRangeRequestV5
|
||||
> = makeBaseSuccess({ data: v5Data }, params);
|
||||
|
||||
const legendMap = { A: '{{service.name}}' };
|
||||
const result = convertV5ResponseToLegacy(input, legendMap, false);
|
||||
|
||||
expect(result.payload.data.resultType).toBe('time_series');
|
||||
expect(result.payload.data.result).toHaveLength(1);
|
||||
const q = result.payload.data.result[0];
|
||||
expect(q.queryName).toBe('A');
|
||||
expect(q.legend).toBe('{{service.name}}');
|
||||
expect(q.series?.[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
labels: { 'service.name': 'adservice' },
|
||||
values: [
|
||||
{ timestamp: 1000, value: '10' },
|
||||
{ timestamp: 2000, value: '12' },
|
||||
],
|
||||
metaData: expect.objectContaining({
|
||||
alias: '__result_0',
|
||||
index: 0,
|
||||
queryName: 'A',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('converts scalar to legacy table (formatForWeb=false) with names/ids resolved from aggregations', () => {
|
||||
const scalar: ScalarData = {
|
||||
columns: [
|
||||
// group column
|
||||
({
|
||||
name: 'service.name',
|
||||
queryName: 'A',
|
||||
aggregationIndex: 0,
|
||||
columnType: 'group',
|
||||
} as unknown) as ScalarData['columns'][number],
|
||||
// aggregation 0
|
||||
({
|
||||
name: '__result_0',
|
||||
queryName: 'A',
|
||||
aggregationIndex: 0,
|
||||
columnType: 'aggregation',
|
||||
} as unknown) as ScalarData['columns'][number],
|
||||
// aggregation 1
|
||||
({
|
||||
name: '__result_1',
|
||||
queryName: 'A',
|
||||
aggregationIndex: 1,
|
||||
columnType: 'aggregation',
|
||||
} as unknown) as ScalarData['columns'][number],
|
||||
// formula F1
|
||||
({
|
||||
name: '__result',
|
||||
queryName: 'F1',
|
||||
aggregationIndex: 0,
|
||||
columnType: 'aggregation',
|
||||
} as unknown) as ScalarData['columns'][number],
|
||||
],
|
||||
data: [['adservice', 606, 1.452, 151.5]],
|
||||
};
|
||||
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'scalar',
|
||||
data: { results: [scalar] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('scalar', [
|
||||
{
|
||||
type: 'builder_query',
|
||||
spec: {
|
||||
name: 'A',
|
||||
signal: 'traces',
|
||||
stepInterval: 60,
|
||||
disabled: false,
|
||||
aggregations: [
|
||||
{ expression: 'count()' },
|
||||
{ expression: 'avg(app.ads.count)', alias: 'avg' },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'builder_formula',
|
||||
spec: ({
|
||||
name: 'F1',
|
||||
expression: 'A * 0.25',
|
||||
} as unknown) as QueryBuilderFormula,
|
||||
},
|
||||
]);
|
||||
|
||||
const input: SuccessResponse<
|
||||
MetricRangePayloadV5,
|
||||
QueryRangeRequestV5
|
||||
> = makeBaseSuccess({ data: v5Data }, params);
|
||||
const legendMap = { A: '{{service.name}}', F1: '' };
|
||||
const result = convertV5ResponseToLegacy(input, legendMap, false);
|
||||
|
||||
expect(result.payload.data.resultType).toBe('scalar');
|
||||
const [tableEntry] = result.payload.data.result;
|
||||
expect(tableEntry.table?.columns).toEqual([
|
||||
{
|
||||
name: 'service.name',
|
||||
queryName: 'A',
|
||||
isValueColumn: false,
|
||||
id: 'service.name',
|
||||
},
|
||||
{ name: 'count()', queryName: 'A', isValueColumn: true, id: 'A.count()' },
|
||||
{
|
||||
name: 'avg',
|
||||
queryName: 'A',
|
||||
isValueColumn: true,
|
||||
id: 'A.avg(app.ads.count)',
|
||||
},
|
||||
{ name: 'F1', queryName: 'F1', isValueColumn: true, id: 'F1' },
|
||||
]);
|
||||
expect(tableEntry.table?.rows?.[0]).toEqual({
|
||||
data: {
|
||||
'service.name': 'adservice',
|
||||
'A.count()': 606,
|
||||
'A.avg(app.ads.count)': 1.452,
|
||||
F1: 151.5,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('converts scalar with formatForWeb=true to UI-friendly table', () => {
|
||||
const scalar: ScalarData = {
|
||||
columns: [
|
||||
{
|
||||
name: 'service.name',
|
||||
queryName: 'A',
|
||||
aggregationIndex: 0,
|
||||
columnType: 'group',
|
||||
} as any,
|
||||
{
|
||||
name: '__result_0',
|
||||
queryName: 'A',
|
||||
aggregationIndex: 0,
|
||||
columnType: 'aggregation',
|
||||
} as any,
|
||||
],
|
||||
data: [['adservice', 580]],
|
||||
};
|
||||
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'scalar',
|
||||
data: { results: [scalar] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('scalar', [
|
||||
{
|
||||
type: 'builder_query',
|
||||
spec: {
|
||||
name: 'A',
|
||||
signal: 'traces',
|
||||
stepInterval: 60,
|
||||
disabled: false,
|
||||
aggregations: [{ expression: 'count()' }],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const input: SuccessResponse<
|
||||
MetricRangePayloadV5,
|
||||
QueryRangeRequestV5
|
||||
> = makeBaseSuccess({ data: v5Data }, params);
|
||||
const legendMap = { A: '{{service.name}}' };
|
||||
const result = convertV5ResponseToLegacy(input, legendMap, true);
|
||||
|
||||
expect(result.payload.data.resultType).toBe('scalar');
|
||||
const [tableEntry] = result.payload.data.result;
|
||||
expect(tableEntry.table?.columns).toEqual([
|
||||
{
|
||||
name: 'service.name',
|
||||
queryName: 'A',
|
||||
isValueColumn: false,
|
||||
id: 'service.name',
|
||||
},
|
||||
// Single aggregation: name resolves to legend, id resolves to queryName
|
||||
{ name: '{{service.name}}', queryName: 'A', isValueColumn: true, id: 'A' },
|
||||
]);
|
||||
expect(tableEntry.table?.rows?.[0]).toEqual({
|
||||
data: {
|
||||
'service.name': 'adservice',
|
||||
A: 580,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import { cloneDeep, isEmpty } from 'lodash-es';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadV3 } from 'types/api/metrics/getQueryRange';
|
||||
import {
|
||||
DistributionData,
|
||||
@@ -28,18 +28,14 @@ function getColName(
|
||||
const aggregationsCount = aggregationPerQuery[col.queryName]?.length || 0;
|
||||
const isSingleAggregation = aggregationsCount === 1;
|
||||
|
||||
if (aggregationsCount > 0) {
|
||||
// Single aggregation: Priority is alias > legend > expression
|
||||
if (isSingleAggregation) {
|
||||
return alias || legend || expression || col.queryName;
|
||||
}
|
||||
|
||||
// Multiple aggregations: Each follows single rules BUT never shows legend
|
||||
// Priority: alias > expression (legend is ignored for multiple aggregations)
|
||||
return alias || expression || col.queryName;
|
||||
// Single aggregation: Priority is alias > legend > expression
|
||||
if (isSingleAggregation) {
|
||||
return alias || legend || expression;
|
||||
}
|
||||
|
||||
return legend || col.queryName;
|
||||
// Multiple aggregations: Each follows single rules BUT never shows legend
|
||||
// Priority: alias > expression (legend is ignored for multiple aggregations)
|
||||
return alias || expression;
|
||||
}
|
||||
|
||||
function getColId(
|
||||
@@ -52,14 +48,7 @@ function getColId(
|
||||
const aggregation =
|
||||
aggregationPerQuery?.[col.queryName]?.[col.aggregationIndex];
|
||||
const expression = aggregation?.expression || '';
|
||||
const aggregationsCount = aggregationPerQuery[col.queryName]?.length || 0;
|
||||
const isMultipleAggregations = aggregationsCount > 1;
|
||||
|
||||
if (isMultipleAggregations && expression) {
|
||||
return `${col.queryName}.${expression}`;
|
||||
}
|
||||
|
||||
return col.queryName;
|
||||
return `${col.queryName}.${expression}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -352,7 +341,7 @@ export function convertV5ResponseToLegacy(
|
||||
v5Response: SuccessResponse<MetricRangePayloadV5>,
|
||||
legendMap: Record<string, string>,
|
||||
formatForWeb?: boolean,
|
||||
): SuccessResponse<MetricRangePayloadV3> & { warning?: Warning } {
|
||||
): SuccessResponse<MetricRangePayloadV3> {
|
||||
const { payload, params } = v5Response;
|
||||
const v5Data = payload?.data;
|
||||
|
||||
@@ -378,18 +367,14 @@ export function convertV5ResponseToLegacy(
|
||||
legendMap,
|
||||
aggregationPerQuery,
|
||||
);
|
||||
|
||||
return {
|
||||
...v5Response,
|
||||
payload: {
|
||||
data: {
|
||||
resultType: 'scalar',
|
||||
result: webTables,
|
||||
warnings: v5Data?.data?.warning || [],
|
||||
},
|
||||
warning: v5Data?.warning || undefined,
|
||||
},
|
||||
warning: v5Data?.warning || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -405,7 +390,6 @@ export function convertV5ResponseToLegacy(
|
||||
...v5Response,
|
||||
payload: {
|
||||
data: convertedData,
|
||||
warning: v5Response.payload?.data?.warning || undefined,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -1,637 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string, simple-import-sort/imports, @typescript-eslint/indent, no-mixed-spaces-and-tabs */
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import {
|
||||
ClickHouseQuery,
|
||||
LogAggregation,
|
||||
LogBuilderQuery,
|
||||
MetricBuilderQuery,
|
||||
PromQuery,
|
||||
QueryBuilderFormula as V5QueryBuilderFormula,
|
||||
QueryEnvelope,
|
||||
QueryRangePayloadV5,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { prepareQueryRangePayloadV5 } from './prepareQueryRangePayloadV5';
|
||||
|
||||
jest.mock('lib/getStartEndRangeTime', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(() => ({ start: '100', end: '200' })),
|
||||
}));
|
||||
|
||||
describe('prepareQueryRangePayloadV5', () => {
|
||||
const start = 1_710_000_000; // seconds
|
||||
const end = 1_710_000_600; // seconds
|
||||
|
||||
const baseBuilderQuery = (
|
||||
overrides?: Partial<IBuilderQuery>,
|
||||
): IBuilderQuery => ({
|
||||
queryName: 'A',
|
||||
dataSource: DataSource.METRICS,
|
||||
aggregations: [
|
||||
{
|
||||
metricName: 'cpu_usage',
|
||||
temporality: '',
|
||||
timeAggregation: 'sum',
|
||||
spaceAggregation: 'avg',
|
||||
reduceTo: 'avg',
|
||||
},
|
||||
],
|
||||
timeAggregation: 'sum',
|
||||
spaceAggregation: 'avg',
|
||||
temporality: '',
|
||||
functions: [
|
||||
{
|
||||
name: 'timeShift',
|
||||
args: [{ value: '5m' }],
|
||||
},
|
||||
],
|
||||
filter: { expression: '' },
|
||||
filters: { items: [], op: 'AND' },
|
||||
groupBy: [],
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
having: [],
|
||||
limit: null,
|
||||
stepInterval: 600,
|
||||
orderBy: [],
|
||||
reduceTo: 'avg',
|
||||
legend: 'Legend A',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const baseFormula = (
|
||||
overrides?: Partial<IBuilderFormula>,
|
||||
): IBuilderFormula => ({
|
||||
expression: 'A + 1',
|
||||
disabled: false,
|
||||
queryName: 'F1',
|
||||
legend: 'Formula Legend',
|
||||
limit: undefined,
|
||||
having: [],
|
||||
stepInterval: undefined,
|
||||
orderBy: [],
|
||||
...overrides,
|
||||
});
|
||||
|
||||
it('builds payload for builder queries with formulas and variables', () => {
|
||||
const props: GetQueryResultsProps = {
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
id: 'q1',
|
||||
unit: undefined,
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
builder: {
|
||||
queryData: [baseBuilderQuery()],
|
||||
queryFormulas: [baseFormula()],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
start,
|
||||
end,
|
||||
variables: { svc: 'api', count: 5, flag: true },
|
||||
fillGaps: true,
|
||||
};
|
||||
|
||||
const result = prepareQueryRangePayloadV5(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
legendMap: { A: 'Legend A', F1: 'Formula Legend' },
|
||||
queryPayload: expect.objectContaining({
|
||||
compositeQuery: expect.objectContaining({
|
||||
queries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
type: 'builder_query',
|
||||
spec: expect.objectContaining({
|
||||
name: 'A',
|
||||
signal: 'metrics',
|
||||
stepInterval: 600,
|
||||
functions: [{ name: 'timeShift', args: [{ value: '5m' }] }],
|
||||
aggregations: [
|
||||
expect.objectContaining({
|
||||
metricName: 'cpu_usage',
|
||||
timeAggregation: 'sum',
|
||||
spaceAggregation: 'avg',
|
||||
reduceTo: undefined,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
}),
|
||||
expect.objectContaining({
|
||||
type: 'builder_formula',
|
||||
spec: expect.objectContaining({
|
||||
name: 'F1',
|
||||
expression: 'A + 1',
|
||||
legend: 'Formula Legend',
|
||||
}),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
requestType: 'time_series',
|
||||
formatOptions: expect.objectContaining({
|
||||
formatTableResultForUI: false,
|
||||
fillGaps: true,
|
||||
}),
|
||||
start: start * 1000,
|
||||
end: end * 1000,
|
||||
variables: expect.objectContaining({
|
||||
svc: { value: 'api' },
|
||||
count: { value: 5 },
|
||||
flag: { value: true },
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Legend map combines builder and formulas
|
||||
expect(result.legendMap).toEqual({ A: 'Legend A', F1: 'Formula Legend' });
|
||||
|
||||
const payload: QueryRangePayloadV5 = result.queryPayload;
|
||||
|
||||
expect(payload.schemaVersion).toBe('v1');
|
||||
expect(payload.start).toBe(start * 1000);
|
||||
expect(payload.end).toBe(end * 1000);
|
||||
expect(payload.requestType).toBe('time_series');
|
||||
expect(payload.formatOptions?.formatTableResultForUI).toBe(false);
|
||||
expect(payload.formatOptions?.fillGaps).toBe(true);
|
||||
|
||||
// Variables mapped as { key: { value } }
|
||||
expect(payload.variables).toEqual({
|
||||
svc: { value: 'api' },
|
||||
count: { value: 5 },
|
||||
flag: { value: true },
|
||||
});
|
||||
|
||||
// Queries include one builder_query and one builder_formula
|
||||
expect(payload.compositeQuery.queries).toHaveLength(2);
|
||||
|
||||
const builderQuery = payload.compositeQuery.queries.find(
|
||||
(q) => q.type === 'builder_query',
|
||||
) as QueryEnvelope;
|
||||
const builderSpec = builderQuery.spec as MetricBuilderQuery;
|
||||
expect(builderSpec.name).toBe('A');
|
||||
expect(builderSpec.signal).toBe('metrics');
|
||||
expect(builderSpec.aggregations?.[0]).toMatchObject({
|
||||
metricName: 'cpu_usage',
|
||||
timeAggregation: 'sum',
|
||||
spaceAggregation: 'avg',
|
||||
});
|
||||
// reduceTo should not be present for non-scalar panels
|
||||
expect(builderSpec.aggregations?.[0].reduceTo).toBeUndefined();
|
||||
// functions should be preserved/normalized
|
||||
expect(builderSpec.functions?.[0]?.name).toBe('timeShift');
|
||||
|
||||
const formulaQuery = payload.compositeQuery.queries.find(
|
||||
(q) => q.type === 'builder_formula',
|
||||
) as QueryEnvelope;
|
||||
const formulaSpec = formulaQuery.spec as V5QueryBuilderFormula;
|
||||
expect(formulaSpec.name).toBe('F1');
|
||||
expect(formulaSpec.expression).toBe('A + 1');
|
||||
expect(formulaSpec.legend).toBe('Formula Legend');
|
||||
});
|
||||
|
||||
it('builds payload for PromQL queries and respects originalGraphType for formatting', () => {
|
||||
const props: GetQueryResultsProps = {
|
||||
query: {
|
||||
queryType: EQueryType.PROM,
|
||||
id: 'q2',
|
||||
unit: undefined,
|
||||
promql: [
|
||||
{
|
||||
name: 'A',
|
||||
query: 'up',
|
||||
disabled: false,
|
||||
legend: 'LP',
|
||||
},
|
||||
],
|
||||
clickhouse_sql: [],
|
||||
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
originalGraphType: PANEL_TYPES.TABLE,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
start,
|
||||
end,
|
||||
};
|
||||
|
||||
const result = prepareQueryRangePayloadV5(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
legendMap: { A: 'LP' },
|
||||
queryPayload: expect.objectContaining({
|
||||
compositeQuery: expect.objectContaining({
|
||||
queries: [
|
||||
{
|
||||
type: 'promql',
|
||||
spec: expect.objectContaining({
|
||||
name: 'A',
|
||||
query: 'up',
|
||||
legend: 'LP',
|
||||
stats: false,
|
||||
}),
|
||||
},
|
||||
],
|
||||
}),
|
||||
requestType: 'time_series',
|
||||
formatOptions: expect.objectContaining({
|
||||
formatTableResultForUI: true,
|
||||
fillGaps: false,
|
||||
}),
|
||||
start: start * 1000,
|
||||
end: end * 1000,
|
||||
variables: {},
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.legendMap).toEqual({ A: 'LP' });
|
||||
|
||||
const payload: QueryRangePayloadV5 = result.queryPayload;
|
||||
expect(payload.requestType).toBe('time_series');
|
||||
expect(payload.formatOptions?.formatTableResultForUI).toBe(true);
|
||||
expect(payload.compositeQuery.queries).toHaveLength(1);
|
||||
|
||||
const prom = payload.compositeQuery.queries[0];
|
||||
expect(prom.type).toBe('promql');
|
||||
const promSpec = prom.spec as PromQuery;
|
||||
expect(promSpec.name).toBe('A');
|
||||
expect(promSpec.query).toBe('up');
|
||||
expect(promSpec.legend).toBe('LP');
|
||||
expect(promSpec.stats).toBe(false);
|
||||
});
|
||||
|
||||
it('builds payload for ClickHouse queries and maps requestType from panel', () => {
|
||||
const props: GetQueryResultsProps = {
|
||||
query: {
|
||||
queryType: EQueryType.CLICKHOUSE,
|
||||
id: 'q3',
|
||||
unit: undefined,
|
||||
promql: [],
|
||||
clickhouse_sql: [
|
||||
{
|
||||
name: 'Q',
|
||||
query: 'SELECT 1',
|
||||
disabled: false,
|
||||
legend: 'LC',
|
||||
},
|
||||
],
|
||||
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
|
||||
},
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
start,
|
||||
end,
|
||||
};
|
||||
|
||||
const result = prepareQueryRangePayloadV5(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
legendMap: { Q: 'LC' },
|
||||
queryPayload: expect.objectContaining({
|
||||
compositeQuery: expect.objectContaining({
|
||||
queries: [
|
||||
{
|
||||
type: 'clickhouse_sql',
|
||||
spec: expect.objectContaining({
|
||||
name: 'Q',
|
||||
query: 'SELECT 1',
|
||||
legend: 'LC',
|
||||
}),
|
||||
},
|
||||
],
|
||||
}),
|
||||
requestType: 'scalar',
|
||||
formatOptions: expect.objectContaining({
|
||||
formatTableResultForUI: true,
|
||||
fillGaps: false,
|
||||
}),
|
||||
start: start * 1000,
|
||||
end: end * 1000,
|
||||
variables: {},
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.legendMap).toEqual({ Q: 'LC' });
|
||||
|
||||
const payload: QueryRangePayloadV5 = result.queryPayload;
|
||||
expect(payload.requestType).toBe('scalar');
|
||||
expect(payload.compositeQuery.queries).toHaveLength(1);
|
||||
const ch = payload.compositeQuery.queries[0];
|
||||
expect(ch.type).toBe('clickhouse_sql');
|
||||
const chSpec = ch.spec as ClickHouseQuery;
|
||||
expect(chSpec.name).toBe('Q');
|
||||
expect(chSpec.query).toBe('SELECT 1');
|
||||
expect(chSpec.legend).toBe('LC');
|
||||
});
|
||||
|
||||
it('uses getStartEndRangeTime when start/end are not provided', () => {
|
||||
const props: GetQueryResultsProps = {
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
id: 'q4',
|
||||
unit: undefined,
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
};
|
||||
|
||||
const result = prepareQueryRangePayloadV5(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
legendMap: {},
|
||||
queryPayload: expect.objectContaining({
|
||||
compositeQuery: { queries: [] },
|
||||
requestType: 'time_series',
|
||||
formatOptions: expect.objectContaining({
|
||||
formatTableResultForUI: false,
|
||||
fillGaps: false,
|
||||
}),
|
||||
start: 100 * 1000,
|
||||
end: 200 * 1000,
|
||||
variables: {},
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const payload: QueryRangePayloadV5 = result.queryPayload;
|
||||
expect(payload.start).toBe(100 * 1000);
|
||||
expect(payload.end).toBe(200 * 1000);
|
||||
});
|
||||
|
||||
it('includes reduceTo for metrics in scalar panels (TABLE)', () => {
|
||||
const props: GetQueryResultsProps = {
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
id: 'q5',
|
||||
unit: undefined,
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
builder: {
|
||||
queryData: [baseBuilderQuery()],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
start,
|
||||
end,
|
||||
};
|
||||
|
||||
const result = prepareQueryRangePayloadV5(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
legendMap: { A: 'Legend A' },
|
||||
queryPayload: expect.objectContaining({
|
||||
compositeQuery: expect.objectContaining({
|
||||
queries: [
|
||||
{
|
||||
type: 'builder_query',
|
||||
spec: expect.objectContaining({
|
||||
name: 'A',
|
||||
signal: 'metrics',
|
||||
stepInterval: 600,
|
||||
functions: [{ name: 'timeShift', args: [{ value: '5m' }] }],
|
||||
aggregations: [
|
||||
expect.objectContaining({
|
||||
metricName: 'cpu_usage',
|
||||
timeAggregation: 'sum',
|
||||
spaceAggregation: 'avg',
|
||||
reduceTo: 'avg',
|
||||
temporality: undefined,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
],
|
||||
}),
|
||||
requestType: 'scalar',
|
||||
formatOptions: expect.objectContaining({
|
||||
formatTableResultForUI: true,
|
||||
fillGaps: false,
|
||||
}),
|
||||
start: start * 1000,
|
||||
end: end * 1000,
|
||||
variables: {},
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const payload: QueryRangePayloadV5 = result.queryPayload;
|
||||
const builderQuery = payload.compositeQuery.queries.find(
|
||||
(q) => q.type === 'builder_query',
|
||||
) as QueryEnvelope;
|
||||
const builderSpec = builderQuery.spec as MetricBuilderQuery;
|
||||
expect(builderSpec.aggregations?.[0].reduceTo).toBe('avg');
|
||||
});
|
||||
|
||||
it('omits aggregations for raw request type (LIST panel)', () => {
|
||||
const logAgg: LogAggregation[] = [{ expression: 'count()' }];
|
||||
const logsQuery = baseBuilderQuery({
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregations: logAgg,
|
||||
} as Partial<IBuilderQuery>);
|
||||
|
||||
const props: GetQueryResultsProps = {
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
id: 'q6',
|
||||
unit: undefined,
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
builder: {
|
||||
queryData: [logsQuery],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
start,
|
||||
end,
|
||||
};
|
||||
|
||||
const result = prepareQueryRangePayloadV5(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
legendMap: { A: 'Legend A' },
|
||||
queryPayload: expect.objectContaining({
|
||||
compositeQuery: expect.objectContaining({
|
||||
queries: [
|
||||
{
|
||||
type: 'builder_query',
|
||||
spec: expect.objectContaining({
|
||||
name: 'A',
|
||||
signal: 'logs',
|
||||
stepInterval: 600,
|
||||
functions: [{ name: 'timeShift', args: [{ value: '5m' }] }],
|
||||
aggregations: undefined,
|
||||
}),
|
||||
},
|
||||
],
|
||||
}),
|
||||
requestType: 'raw',
|
||||
formatOptions: expect.objectContaining({
|
||||
formatTableResultForUI: false,
|
||||
fillGaps: false,
|
||||
}),
|
||||
start: start * 1000,
|
||||
end: end * 1000,
|
||||
variables: {},
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const payload: QueryRangePayloadV5 = result.queryPayload;
|
||||
expect(payload.requestType).toBe('raw');
|
||||
const builderQuery = payload.compositeQuery.queries.find(
|
||||
(q) => q.type === 'builder_query',
|
||||
) as QueryEnvelope;
|
||||
// For RAW request type, aggregations should be omitted
|
||||
const logSpec = builderQuery.spec as LogBuilderQuery;
|
||||
expect(logSpec.aggregations).toBeUndefined();
|
||||
});
|
||||
|
||||
it('maps groupBy, order, having, aggregations and filter for logs builder query', () => {
|
||||
const getStartEndRangeTime = jest.requireMock('lib/getStartEndRangeTime')
|
||||
.default as jest.Mock;
|
||||
getStartEndRangeTime.mockReturnValueOnce({
|
||||
start: '1754623641',
|
||||
end: '1754645241',
|
||||
});
|
||||
|
||||
const props: GetQueryResultsProps = {
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
id: 'e643e387-1996-4449-97b6-9ef4498a0573',
|
||||
unit: undefined,
|
||||
promql: [{ name: 'A', query: '', legend: '', disabled: false }],
|
||||
clickhouse_sql: [{ name: 'A', legend: '', disabled: false, query: '' }],
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: DataSource.LOGS,
|
||||
queryName: 'A',
|
||||
aggregateOperator: 'count',
|
||||
aggregateAttribute: {
|
||||
key: '',
|
||||
dataType: DataTypes.EMPTY,
|
||||
type: '',
|
||||
},
|
||||
timeAggregation: 'rate',
|
||||
spaceAggregation: 'sum',
|
||||
filter: { expression: "service.name = 'adservice'" },
|
||||
aggregations: [
|
||||
{ expression: 'count() as cnt avg(code.lineno) ' } as LogAggregation,
|
||||
],
|
||||
functions: [],
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: '14c790ec-54d1-42f0-a889-3b4f0fb79852',
|
||||
op: '=',
|
||||
key: { id: 'service.name', key: 'service.name', type: '' },
|
||||
value: 'adservice',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
stepInterval: 80,
|
||||
having: { expression: 'count() > 0' },
|
||||
limit: 600,
|
||||
orderBy: [{ columnName: 'service.name', order: 'desc' }],
|
||||
groupBy: [
|
||||
{
|
||||
key: 'service.name',
|
||||
type: '',
|
||||
},
|
||||
],
|
||||
legend: '{{service.name}}',
|
||||
reduceTo: 'avg',
|
||||
offset: 0,
|
||||
pageSize: 100,
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
globalSelectedInterval: 'custom' as never,
|
||||
variables: {},
|
||||
};
|
||||
|
||||
const result = prepareQueryRangePayloadV5(props);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
legendMap: { A: '{{service.name}}' },
|
||||
queryPayload: expect.objectContaining({
|
||||
schemaVersion: 'v1',
|
||||
start: 1754623641000,
|
||||
end: 1754645241000,
|
||||
requestType: 'time_series',
|
||||
compositeQuery: expect.objectContaining({
|
||||
queries: [
|
||||
{
|
||||
type: 'builder_query',
|
||||
spec: expect.objectContaining({
|
||||
name: 'A',
|
||||
signal: 'logs',
|
||||
stepInterval: 80,
|
||||
disabled: false,
|
||||
filter: { expression: "service.name = 'adservice'" },
|
||||
groupBy: [
|
||||
{
|
||||
name: 'service.name',
|
||||
fieldDataType: '',
|
||||
fieldContext: '',
|
||||
},
|
||||
],
|
||||
limit: 600,
|
||||
order: [
|
||||
{
|
||||
key: { name: 'service.name' },
|
||||
direction: 'desc',
|
||||
},
|
||||
],
|
||||
legend: '{{service.name}}',
|
||||
having: { expression: 'count() > 0' },
|
||||
aggregations: [
|
||||
{ expression: 'count()', alias: 'cnt' },
|
||||
{ expression: 'avg(code.lineno)' },
|
||||
],
|
||||
}),
|
||||
},
|
||||
],
|
||||
}),
|
||||
formatOptions: { formatTableResultForUI: false, fillGaps: false },
|
||||
variables: {},
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,4 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
@@ -8,7 +7,7 @@ import { isEmpty } from 'lodash-es';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
QueryFunctionProps,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
BaseBuilderQuery,
|
||||
@@ -28,11 +27,9 @@ import {
|
||||
TelemetryFieldKey,
|
||||
TraceAggregation,
|
||||
VariableItem,
|
||||
VariableType,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { normalizeFunctionName } from 'utils/functionNameNormalizer';
|
||||
|
||||
type PrepareQueryRangePayloadV5Result = {
|
||||
queryPayload: QueryRangePayloadV5;
|
||||
@@ -71,46 +68,9 @@ function getSignalType(dataSource: string): 'traces' | 'logs' | 'metrics' {
|
||||
return 'metrics';
|
||||
}
|
||||
|
||||
function isDeprecatedField(fieldName: string): boolean {
|
||||
const deprecatedIntrinsicFields = [
|
||||
'traceID',
|
||||
'spanID',
|
||||
'parentSpanID',
|
||||
'spanKind',
|
||||
'durationNano',
|
||||
'statusCode',
|
||||
'statusMessage',
|
||||
'statusCodeString',
|
||||
];
|
||||
|
||||
const deprecatedCalculatedFields = [
|
||||
'responseStatusCode',
|
||||
'externalHttpUrl',
|
||||
'httpUrl',
|
||||
'externalHttpMethod',
|
||||
'httpMethod',
|
||||
'httpHost',
|
||||
'dbName',
|
||||
'dbOperation',
|
||||
'hasError',
|
||||
'isRemote',
|
||||
'serviceName',
|
||||
'httpRoute',
|
||||
'msgSystem',
|
||||
'msgOperation',
|
||||
'dbSystem',
|
||||
'rpcSystem',
|
||||
'rpcService',
|
||||
'rpcMethod',
|
||||
'peerService',
|
||||
];
|
||||
|
||||
return (
|
||||
deprecatedIntrinsicFields.includes(fieldName) ||
|
||||
deprecatedCalculatedFields.includes(fieldName)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates base spec for builder queries
|
||||
*/
|
||||
function createBaseSpec(
|
||||
queryData: IBuilderQuery,
|
||||
requestType: RequestType,
|
||||
@@ -122,7 +82,7 @@ function createBaseSpec(
|
||||
)[])?.filter((c) => ('key' in c ? c?.key : c?.name));
|
||||
|
||||
return {
|
||||
stepInterval: queryData?.stepInterval || null,
|
||||
stepInterval: queryData?.stepInterval || undefined,
|
||||
disabled: queryData.disabled,
|
||||
filter: queryData?.filter?.expression ? queryData.filter : undefined,
|
||||
groupBy:
|
||||
@@ -130,8 +90,8 @@ function createBaseSpec(
|
||||
? queryData.groupBy.map(
|
||||
(item: any): GroupByKey => ({
|
||||
name: item.key,
|
||||
fieldDataType: item?.dataType || '',
|
||||
fieldContext: item?.type || '',
|
||||
fieldDataType: item?.dataType,
|
||||
fieldContext: item?.type,
|
||||
description: item?.description,
|
||||
unit: item?.unit,
|
||||
signal: item?.signal,
|
||||
@@ -163,52 +123,34 @@ function createBaseSpec(
|
||||
functions: isEmpty(queryData.functions)
|
||||
? undefined
|
||||
: queryData.functions.map(
|
||||
(func: QueryFunction): QueryFunction => {
|
||||
// Normalize function name to handle case sensitivity
|
||||
const normalizedName = normalizeFunctionName(func?.name);
|
||||
return {
|
||||
name: normalizedName as FunctionName,
|
||||
args: isEmpty(func.namedArgs)
|
||||
? func.args?.map((arg) => ({
|
||||
value: arg?.value,
|
||||
}))
|
||||
: Object.entries(func?.namedArgs || {}).map(([name, value]) => ({
|
||||
name,
|
||||
value,
|
||||
})),
|
||||
};
|
||||
},
|
||||
(func: QueryFunctionProps): QueryFunction => ({
|
||||
name: func.name as FunctionName,
|
||||
args: isEmpty(func.namedArgs)
|
||||
? func.args.map((arg) => ({
|
||||
value: arg,
|
||||
}))
|
||||
: Object.entries(func.namedArgs).map(([name, value]) => ({
|
||||
name,
|
||||
value,
|
||||
})),
|
||||
}),
|
||||
),
|
||||
selectFields: isEmpty(nonEmptySelectColumns)
|
||||
? undefined
|
||||
: nonEmptySelectColumns?.map(
|
||||
(column: any): TelemetryFieldKey => {
|
||||
const fieldName = column.name ?? column.key;
|
||||
const isDeprecated = isDeprecatedField(fieldName);
|
||||
|
||||
const fieldObj: TelemetryFieldKey = {
|
||||
name: fieldName,
|
||||
fieldDataType:
|
||||
column?.fieldDataType ?? (column?.dataType as FieldDataType),
|
||||
signal: column?.signal ?? undefined,
|
||||
};
|
||||
|
||||
// Only add fieldContext if the field is NOT deprecated
|
||||
if (!isDeprecated && fieldName !== 'name') {
|
||||
fieldObj.fieldContext =
|
||||
column?.fieldContext ?? (column?.type as FieldContext);
|
||||
}
|
||||
|
||||
return fieldObj;
|
||||
},
|
||||
(column: any): TelemetryFieldKey => ({
|
||||
name: column.name ?? column.key,
|
||||
fieldDataType:
|
||||
column?.fieldDataType ?? (column?.dataType as FieldDataType),
|
||||
fieldContext: column?.fieldContext ?? (column?.type as FieldContext),
|
||||
signal: column?.signal ?? undefined,
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
// Utility to parse aggregation expressions with optional alias
|
||||
export function parseAggregations(
|
||||
expression: string,
|
||||
availableAlias?: string,
|
||||
): { expression: string; alias?: string }[] {
|
||||
const result: { expression: string; alias?: string }[] = [];
|
||||
// Matches function calls like "count()" or "sum(field)" with optional alias like "as 'alias'"
|
||||
@@ -217,7 +159,7 @@ export function parseAggregations(
|
||||
let match = regex.exec(expression);
|
||||
while (match !== null) {
|
||||
const expr = match[1];
|
||||
let alias = match[2] || availableAlias; // Use provided alias or availableAlias if not matched
|
||||
let alias = match[2];
|
||||
if (alias) {
|
||||
// Remove quotes if present
|
||||
alias = alias.replace(/^['"]|['"]$/g, '');
|
||||
@@ -268,14 +210,9 @@ export function createAggregation(
|
||||
}
|
||||
|
||||
if (queryData.aggregations?.length > 0) {
|
||||
return queryData.aggregations.flatMap(
|
||||
(agg: { expression: string; alias?: string }) => {
|
||||
const parsedAggregations = parseAggregations(agg.expression, agg?.alias);
|
||||
return isEmpty(parsedAggregations)
|
||||
? [{ expression: 'count()' }]
|
||||
: parsedAggregations;
|
||||
},
|
||||
);
|
||||
return isEmpty(parseAggregations(queryData.aggregations?.[0].expression))
|
||||
? [{ expression: 'count()' }]
|
||||
: parseAggregations(queryData.aggregations?.[0].expression);
|
||||
}
|
||||
|
||||
return [{ expression: 'count()' }];
|
||||
@@ -321,7 +258,6 @@ export function convertBuilderQueriesToV5(
|
||||
spec = {
|
||||
name: queryName,
|
||||
signal: 'metrics' as const,
|
||||
source: queryData.source || '',
|
||||
...baseSpec,
|
||||
aggregations: aggregations as MetricAggregation[],
|
||||
// reduceTo: queryData.reduceTo,
|
||||
@@ -337,109 +273,6 @@ export function convertBuilderQueriesToV5(
|
||||
);
|
||||
}
|
||||
|
||||
function createTraceOperatorBaseSpec(
|
||||
queryData: IBuilderTraceOperator,
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): BaseBuilderQuery {
|
||||
const nonEmptySelectColumns = (queryData.selectColumns as (
|
||||
| BaseAutocompleteData
|
||||
| TelemetryFieldKey
|
||||
)[])?.filter((c) => ('key' in c ? c?.key : c?.name));
|
||||
|
||||
const {
|
||||
stepInterval,
|
||||
groupBy,
|
||||
limit,
|
||||
offset,
|
||||
legend,
|
||||
having,
|
||||
orderBy,
|
||||
pageSize,
|
||||
} = queryData;
|
||||
|
||||
return {
|
||||
stepInterval: stepInterval || undefined,
|
||||
groupBy:
|
||||
groupBy?.length > 0
|
||||
? groupBy.map(
|
||||
(item: any): GroupByKey => ({
|
||||
name: item.key,
|
||||
fieldDataType: item?.dataType,
|
||||
fieldContext: item?.type,
|
||||
description: item?.description,
|
||||
unit: item?.unit,
|
||||
signal: item?.signal,
|
||||
materialized: item?.materialized,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
limit:
|
||||
panelType === PANEL_TYPES.TABLE || panelType === PANEL_TYPES.LIST
|
||||
? limit || pageSize || undefined
|
||||
: limit || undefined,
|
||||
offset: requestType === 'raw' || requestType === 'trace' ? offset : undefined,
|
||||
order:
|
||||
orderBy?.length > 0
|
||||
? orderBy.map(
|
||||
(order: any): OrderBy => ({
|
||||
key: {
|
||||
name: order.columnName,
|
||||
},
|
||||
direction: order.order,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
legend: isEmpty(legend) ? undefined : legend,
|
||||
having: isEmpty(having) ? undefined : (having as Having),
|
||||
selectFields: isEmpty(nonEmptySelectColumns)
|
||||
? undefined
|
||||
: nonEmptySelectColumns?.map(
|
||||
(column: any): TelemetryFieldKey => ({
|
||||
name: column.name ?? column.key,
|
||||
fieldDataType:
|
||||
column?.fieldDataType ?? (column?.dataType as FieldDataType),
|
||||
fieldContext: column?.fieldContext ?? (column?.type as FieldContext),
|
||||
signal: column?.signal ?? undefined,
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export function convertTraceOperatorToV5(
|
||||
traceOperator: Record<string, IBuilderTraceOperator>,
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(traceOperator).map(
|
||||
([queryName, traceOperatorData]): QueryEnvelope => {
|
||||
const baseSpec = createTraceOperatorBaseSpec(
|
||||
traceOperatorData,
|
||||
requestType,
|
||||
panelType,
|
||||
);
|
||||
|
||||
// Skip aggregation for raw request type
|
||||
const aggregations =
|
||||
requestType === 'raw'
|
||||
? undefined
|
||||
: createAggregation(traceOperatorData, panelType);
|
||||
|
||||
const spec: QueryEnvelope['spec'] = {
|
||||
name: queryName,
|
||||
...baseSpec,
|
||||
expression: traceOperatorData.expression || '',
|
||||
aggregations: aggregations as TraceAggregation[],
|
||||
};
|
||||
|
||||
return {
|
||||
type: 'builder_trace_operator' as QueryType,
|
||||
spec,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts PromQL queries to V5 format
|
||||
*/
|
||||
@@ -514,7 +347,6 @@ export const prepareQueryRangePayloadV5 = ({
|
||||
formatForWeb,
|
||||
originalGraphType,
|
||||
fillGaps,
|
||||
dynamicVariables,
|
||||
}: GetQueryResultsProps): PrepareQueryRangePayloadV5Result => {
|
||||
let legendMap: Record<string, string> = {};
|
||||
const requestType = mapPanelTypeToRequestType(graphType);
|
||||
@@ -522,28 +354,14 @@ export const prepareQueryRangePayloadV5 = ({
|
||||
|
||||
switch (query.queryType) {
|
||||
case EQueryType.QUERY_BUILDER: {
|
||||
const { queryData: data, queryFormulas, queryTraceOperator } = query.builder;
|
||||
const { queryData: data, queryFormulas } = query.builder;
|
||||
const currentQueryData = mapQueryDataToApi(data, 'queryName', tableParams);
|
||||
const currentFormulas = mapQueryDataToApi(queryFormulas, 'queryName');
|
||||
|
||||
const filteredTraceOperator =
|
||||
queryTraceOperator && queryTraceOperator.length > 0
|
||||
? queryTraceOperator.filter((traceOperator) =>
|
||||
Boolean(traceOperator.expression.trim()),
|
||||
)
|
||||
: [];
|
||||
|
||||
const currentTraceOperator = mapQueryDataToApi(
|
||||
filteredTraceOperator,
|
||||
'queryName',
|
||||
tableParams,
|
||||
);
|
||||
|
||||
// Combine legend maps
|
||||
legendMap = {
|
||||
...currentQueryData.newLegendMap,
|
||||
...currentFormulas.newLegendMap,
|
||||
...currentTraceOperator.newLegendMap,
|
||||
};
|
||||
|
||||
// Convert builder queries
|
||||
@@ -576,14 +394,8 @@ export const prepareQueryRangePayloadV5 = ({
|
||||
}),
|
||||
);
|
||||
|
||||
const traceOperatorQueries = convertTraceOperatorToV5(
|
||||
currentTraceOperator.data,
|
||||
requestType,
|
||||
graphType,
|
||||
);
|
||||
|
||||
// Combine all query types
|
||||
queries = [...builderQueries, ...formulaQueries, ...traceOperatorQueries];
|
||||
// Combine both types
|
||||
queries = [...builderQueries, ...formulaQueries];
|
||||
break;
|
||||
}
|
||||
case EQueryType.PROM: {
|
||||
@@ -626,12 +438,7 @@ export const prepareQueryRangePayloadV5 = ({
|
||||
fillGaps: fillGaps || false,
|
||||
},
|
||||
variables: Object.entries(variables).reduce((acc, [key, value]) => {
|
||||
acc[key] = {
|
||||
value,
|
||||
type: dynamicVariables
|
||||
?.find((v) => v.name === key)
|
||||
?.type?.toLowerCase() as VariableType,
|
||||
};
|
||||
acc[key] = { value };
|
||||
return acc;
|
||||
}, {} as Record<string, VariableItem>),
|
||||
};
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import getLocal from '../../../api/browser/localstorage/get';
|
||||
import AppLoading from '../AppLoading';
|
||||
|
||||
jest.mock('../../../api/browser/localstorage/get', () => ({
|
||||
// Mock the localStorage API
|
||||
const mockGet = jest.fn();
|
||||
jest.mock('api/browser/localstorage/get', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
default: mockGet,
|
||||
}));
|
||||
|
||||
// Access the mocked function
|
||||
const mockGet = (getLocal as unknown) as jest.Mock;
|
||||
|
||||
describe('AppLoading', () => {
|
||||
const SIGNOZ_TEXT = 'SigNoz';
|
||||
const TAGLINE_TEXT =
|
||||
|
||||
@@ -20,15 +20,13 @@
|
||||
.ant-card-body {
|
||||
height: calc(100% - 18px);
|
||||
|
||||
.widget-graph-component-container {
|
||||
.widget-graph-container {
|
||||
&.bar-panel-container {
|
||||
height: calc(100% - 110px);
|
||||
}
|
||||
.widget-graph-container {
|
||||
&.bar {
|
||||
height: calc(100% - 110px);
|
||||
}
|
||||
|
||||
&.graph-panel-container {
|
||||
height: calc(100% - 80px);
|
||||
}
|
||||
&.graph {
|
||||
height: calc(100% - 80px);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -84,11 +82,9 @@
|
||||
.ant-card-body {
|
||||
height: calc(100% - 18px);
|
||||
|
||||
.widget-graph-component-container {
|
||||
.widget-graph-container {
|
||||
&.bar-panel-container {
|
||||
height: calc(100% - 110px);
|
||||
}
|
||||
.widget-graph-container {
|
||||
&.bar {
|
||||
height: calc(100% - 110px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,8 @@ export const celeryAllStateWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
@@ -48,6 +50,8 @@ export const celeryAllStateWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -84,6 +88,7 @@ export const celeryRetryStateWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
@@ -98,6 +103,8 @@ export const celeryRetryStateWidgetData = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -112,6 +119,8 @@ export const celeryRetryStateWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.hostname--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.hostname',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -144,6 +153,8 @@ export const celeryFailedStateWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
@@ -158,6 +169,8 @@ export const celeryFailedStateWidgetData = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -172,6 +185,8 @@ export const celeryFailedStateWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.hostname--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.hostname',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -204,6 +219,8 @@ export const celerySuccessStateWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
@@ -218,6 +235,8 @@ export const celerySuccessStateWidgetData = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -232,6 +251,8 @@ export const celerySuccessStateWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.hostname--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.hostname',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -263,6 +284,7 @@ export const celeryTasksByWorkerWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: '------false',
|
||||
isColumn: false,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
@@ -279,6 +301,8 @@ export const celeryTasksByWorkerWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.hostname--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.hostname',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -314,6 +338,8 @@ export const celeryErrorByWorkerWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: 'string',
|
||||
id: 'span_id--string----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'span_id',
|
||||
type: '',
|
||||
},
|
||||
@@ -327,6 +353,8 @@ export const celeryErrorByWorkerWidgetData = (
|
||||
key: {
|
||||
dataType: DataTypes.bool,
|
||||
id: 'has_error--bool----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'has_error',
|
||||
type: '',
|
||||
},
|
||||
@@ -345,6 +373,8 @@ export const celeryErrorByWorkerWidgetData = (
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.hostname',
|
||||
type: 'tag',
|
||||
id: 'celery.hostname--string--tag--false',
|
||||
@@ -360,6 +390,8 @@ export const celeryErrorByWorkerWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: 'string',
|
||||
id: 'span_id--string----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'span_id',
|
||||
type: '',
|
||||
},
|
||||
@@ -379,6 +411,8 @@ export const celeryErrorByWorkerWidgetData = (
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.hostname',
|
||||
type: 'tag',
|
||||
id: 'celery.hostname--string--tag--false',
|
||||
@@ -411,6 +445,8 @@ export const celeryLatencyByWorkerWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
@@ -427,6 +463,8 @@ export const celeryLatencyByWorkerWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.hostname--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.hostname',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -460,6 +498,8 @@ export const celeryActiveTasksWidgetData = (
|
||||
dataType: DataTypes.Float64,
|
||||
id:
|
||||
'flower_worker_number_of_currently_executing_tasks--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'flower_worker_number_of_currently_executing_tasks',
|
||||
type: 'Gauge',
|
||||
},
|
||||
@@ -476,6 +516,8 @@ export const celeryActiveTasksWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'worker--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'worker',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -509,6 +551,8 @@ export const celeryTaskLatencyWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
@@ -525,6 +569,8 @@ export const celeryTaskLatencyWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.task_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.task_name',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -560,6 +606,8 @@ export const celerySlowestTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
@@ -576,6 +624,8 @@ export const celerySlowestTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.task_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.task_name',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -610,6 +660,8 @@ export const celeryRetryTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
@@ -624,6 +676,8 @@ export const celeryRetryTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -638,6 +692,8 @@ export const celeryRetryTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.task_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.task_name',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -673,6 +729,8 @@ export const celeryFailedTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
@@ -687,6 +745,8 @@ export const celeryFailedTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -701,6 +761,8 @@ export const celeryFailedTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.task_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.task_name',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -734,6 +796,8 @@ export const celerySuccessTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
@@ -748,6 +812,8 @@ export const celerySuccessTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -762,6 +828,8 @@ export const celerySuccessTasksTableWidgetData = getWidgetQueryBuilder(
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.task_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.task_name',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -801,6 +869,8 @@ export const celeryTimeSeriesTablesWidgetData = (
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'duration_nano--float64----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'duration_nano',
|
||||
type: '',
|
||||
},
|
||||
@@ -815,6 +885,8 @@ export const celeryTimeSeriesTablesWidgetData = (
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: `${entity}--string--tag--false`,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: `${entity}`,
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -829,6 +901,8 @@ export const celeryTimeSeriesTablesWidgetData = (
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.task_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.task_name',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -859,6 +933,8 @@ export const celeryAllStateCountWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'span_id--string----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'span_id',
|
||||
type: '',
|
||||
},
|
||||
@@ -896,6 +972,8 @@ export const celerySuccessStateCountWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'span_id--string----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'span_id',
|
||||
type: '',
|
||||
},
|
||||
@@ -910,6 +988,8 @@ export const celerySuccessStateCountWidgetData = getWidgetQueryBuilder(
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -945,6 +1025,8 @@ export const celeryFailedStateCountWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'span_id--string----true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: 'span_id',
|
||||
type: '',
|
||||
},
|
||||
@@ -959,6 +1041,8 @@ export const celeryFailedStateCountWidgetData = getWidgetQueryBuilder(
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -994,6 +1078,7 @@ export const celeryRetryStateCountWidgetData = getWidgetQueryBuilder(
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'span_id--string----true',
|
||||
isColumn: true,
|
||||
key: 'span_id',
|
||||
type: '',
|
||||
},
|
||||
@@ -1008,6 +1093,8 @@ export const celeryRetryStateCountWidgetData = getWidgetQueryBuilder(
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'celery.state--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'celery.state',
|
||||
type: 'tag',
|
||||
},
|
||||
|
||||
@@ -39,6 +39,8 @@ export function getFiltersFromQueryParams(
|
||||
key,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
id: `${key}--string--tag--false`,
|
||||
},
|
||||
op: '=',
|
||||
@@ -98,7 +100,8 @@ export const createFiltersFromData = (
|
||||
key: string;
|
||||
dataType: DataTypes;
|
||||
type: string;
|
||||
|
||||
isColumn: boolean;
|
||||
isJSON: boolean;
|
||||
id: string;
|
||||
};
|
||||
op: string;
|
||||
@@ -116,6 +119,8 @@ export const createFiltersFromData = (
|
||||
key,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
id: `${key}--string--tag--false`,
|
||||
},
|
||||
op: '=',
|
||||
|
||||
@@ -19,7 +19,6 @@ export interface NavigateToExplorerProps {
|
||||
endTime?: number;
|
||||
sameTab?: boolean;
|
||||
shouldResolveQuery?: boolean;
|
||||
widgetQuery?: Query;
|
||||
}
|
||||
|
||||
export function useNavigateToExplorer(): (
|
||||
@@ -31,34 +30,27 @@ export function useNavigateToExplorer(): (
|
||||
);
|
||||
|
||||
const prepareQuery = useCallback(
|
||||
(
|
||||
selectedFilters: TagFilterItem[],
|
||||
dataSource: DataSource,
|
||||
query?: Query,
|
||||
): Query => {
|
||||
const widgetQuery = query || currentQuery;
|
||||
return {
|
||||
...widgetQuery,
|
||||
builder: {
|
||||
...widgetQuery.builder,
|
||||
queryData: widgetQuery.builder.queryData
|
||||
.map((item) => ({
|
||||
...item,
|
||||
dataSource,
|
||||
aggregateOperator: MetricAggregateOperator.NOOP,
|
||||
filters: {
|
||||
...item.filters,
|
||||
items: [...(item.filters?.items || []), ...selectedFilters],
|
||||
op: item.filters?.op || 'AND',
|
||||
},
|
||||
groupBy: [],
|
||||
disabled: false,
|
||||
}))
|
||||
.slice(0, 1),
|
||||
queryFormulas: [],
|
||||
},
|
||||
};
|
||||
},
|
||||
(selectedFilters: TagFilterItem[], dataSource: DataSource): Query => ({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: currentQuery.builder.queryData
|
||||
.map((item) => ({
|
||||
...item,
|
||||
dataSource,
|
||||
aggregateOperator: MetricAggregateOperator.NOOP,
|
||||
filters: {
|
||||
...item.filters,
|
||||
items: [...(item.filters?.items || []), ...selectedFilters],
|
||||
op: item.filters?.op || 'AND',
|
||||
},
|
||||
groupBy: [],
|
||||
disabled: false,
|
||||
}))
|
||||
.slice(0, 1),
|
||||
queryFormulas: [],
|
||||
},
|
||||
}),
|
||||
[currentQuery],
|
||||
);
|
||||
|
||||
@@ -75,7 +67,6 @@ export function useNavigateToExplorer(): (
|
||||
endTime,
|
||||
sameTab,
|
||||
shouldResolveQuery,
|
||||
widgetQuery,
|
||||
} = props;
|
||||
const urlParams = new URLSearchParams();
|
||||
if (startTime && endTime) {
|
||||
@@ -86,7 +77,7 @@ export function useNavigateToExplorer(): (
|
||||
urlParams.set(QueryParams.endTime, (maxTime / 1000000).toString());
|
||||
}
|
||||
|
||||
let preparedQuery = prepareQuery(filters, dataSource, widgetQuery);
|
||||
let preparedQuery = prepareQuery(filters, dataSource);
|
||||
|
||||
if (shouldResolveQuery) {
|
||||
await getUpdatedQuery({
|
||||
|
||||
@@ -87,7 +87,7 @@ function ChangelogModal({ changelog, onClose }: Props): JSX.Element {
|
||||
|
||||
const onClickUpdateWorkspace = (): void => {
|
||||
window.open(
|
||||
'https://signoz.io/upgrade-path',
|
||||
'https://github.com/SigNoz/signoz/releases',
|
||||
'_blank',
|
||||
'noopener,noreferrer',
|
||||
);
|
||||
|
||||
@@ -91,7 +91,7 @@ describe('ChangelogModal', () => {
|
||||
renderChangelog();
|
||||
fireEvent.click(screen.getByText('Update my workspace'));
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://signoz.io/upgrade-path',
|
||||
'https://github.com/SigNoz/signoz/releases',
|
||||
'_blank',
|
||||
'noopener,noreferrer',
|
||||
);
|
||||
|
||||
@@ -2,28 +2,10 @@
|
||||
position: relative;
|
||||
padding-left: 20px;
|
||||
|
||||
& :is(h1, h2, h3, h4, h5, h6, p, &-section-title) {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
&-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 32px;
|
||||
}
|
||||
|
||||
&-section-title {
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
}
|
||||
|
||||
.changelog-release-date {
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
display: block;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
&-list {
|
||||
@@ -99,7 +81,12 @@
|
||||
}
|
||||
}
|
||||
|
||||
& :is(h1, h2, h3, h4, h5, h6, p, &-section-title) {
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
font-weight: 600;
|
||||
color: var(--text-vanilla-100, #fff);
|
||||
}
|
||||
@@ -109,8 +96,7 @@
|
||||
line-height: 32px;
|
||||
}
|
||||
|
||||
h2,
|
||||
&-section-title {
|
||||
h2 {
|
||||
font-size: 20px;
|
||||
line-height: 28px;
|
||||
}
|
||||
@@ -122,7 +108,6 @@
|
||||
overflow: hidden;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400, #1d212d);
|
||||
margin-bottom: 28px;
|
||||
}
|
||||
|
||||
.changelog-media-video {
|
||||
@@ -139,14 +124,17 @@
|
||||
&-line {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
& :is(h1, h2, h3, h4, h5, h6, p, li, &-section-title) {
|
||||
li,
|
||||
p {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
code {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,35 +55,33 @@ function ChangelogRenderer({ changelog }: Props): JSX.Element {
|
||||
<div className="inner-ball" />
|
||||
</div>
|
||||
<span className="changelog-release-date">{formattedReleaseDate}</span>
|
||||
<div className="changelog-renderer-content">
|
||||
{changelog.features && changelog.features.length > 0 && (
|
||||
<div className="changelog-renderer-list">
|
||||
{changelog.features.map((feature) => (
|
||||
<div key={feature.id}>
|
||||
<div className="changelog-renderer-section-title">{feature.title}</div>
|
||||
{feature.media && renderMedia(feature.media)}
|
||||
<ReactMarkdown>{feature.description}</ReactMarkdown>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{changelog.bug_fixes && changelog.bug_fixes.length > 0 && (
|
||||
<div className="changelog-renderer-bug-fixes">
|
||||
<div className="changelog-renderer-section-title">Bug Fixes</div>
|
||||
{changelog.bug_fixes && (
|
||||
<ReactMarkdown>{changelog.bug_fixes}</ReactMarkdown>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{changelog.maintenance && changelog.maintenance.length > 0 && (
|
||||
<div className="changelog-renderer-maintenance">
|
||||
<div className="changelog-renderer-section-title">Maintenance</div>
|
||||
{changelog.maintenance && (
|
||||
<ReactMarkdown>{changelog.maintenance}</ReactMarkdown>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{changelog.features && changelog.features.length > 0 && (
|
||||
<div className="changelog-renderer-list">
|
||||
{changelog.features.map((feature) => (
|
||||
<div key={feature.id}>
|
||||
<h2>{feature.title}</h2>
|
||||
{feature.media && renderMedia(feature.media)}
|
||||
<ReactMarkdown>{feature.description}</ReactMarkdown>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{changelog.bug_fixes && changelog.bug_fixes.length > 0 && (
|
||||
<div>
|
||||
<h2>Bug Fixes</h2>
|
||||
{changelog.bug_fixes && (
|
||||
<ReactMarkdown>{changelog.bug_fixes}</ReactMarkdown>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{changelog.maintenance && changelog.maintenance.length > 0 && (
|
||||
<div>
|
||||
<h2>Maintenance</h2>
|
||||
{changelog.maintenance && (
|
||||
<ReactMarkdown>{changelog.maintenance}</ReactMarkdown>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -241,6 +241,8 @@ function ClientSideQBSearch(
|
||||
key: 'body',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: 'body--string----true',
|
||||
},
|
||||
op: OPERATORS.CONTAINS,
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
.error-state-container {
|
||||
height: 240px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
flex-direction: column;
|
||||
|
||||
border-radius: 3px;
|
||||
|
||||
.error-state-container-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
.error-state-text {
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.error-state-additional-messages {
|
||||
margin-top: 8px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
.error-state-additional-text {
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
margin-left: 8px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
import './Common.styles.scss';
|
||||
|
||||
import { Typography } from 'antd';
|
||||
|
||||
import APIError from '../../types/api/error';
|
||||
|
||||
interface ErrorStateComponentProps {
|
||||
message?: string;
|
||||
error?: APIError;
|
||||
}
|
||||
|
||||
const defaultProps: Partial<ErrorStateComponentProps> = {
|
||||
message: undefined,
|
||||
error: undefined,
|
||||
};
|
||||
|
||||
function ErrorStateComponent({
|
||||
message,
|
||||
error,
|
||||
}: ErrorStateComponentProps): JSX.Element {
|
||||
// Handle API Error object
|
||||
if (error) {
|
||||
const mainMessage = error.getErrorMessage();
|
||||
const additionalErrors = error.getErrorDetails().error.errors || [];
|
||||
|
||||
return (
|
||||
<div className="error-state-container">
|
||||
<div className="error-state-container-content">
|
||||
<Typography className="error-state-text">{mainMessage}</Typography>
|
||||
{additionalErrors.length > 0 && (
|
||||
<div className="error-state-additional-messages">
|
||||
{additionalErrors.map((additionalError) => (
|
||||
<Typography
|
||||
key={`error-${additionalError.message}`}
|
||||
className="error-state-additional-text"
|
||||
>
|
||||
• {additionalError.message}
|
||||
</Typography>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Handle simple string message (backwards compatibility)
|
||||
return (
|
||||
<div className="error-state-container">
|
||||
<div className="error-state-container-content">
|
||||
<Typography className="error-state-text">{message}</Typography>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
ErrorStateComponent.defaultProps = defaultProps;
|
||||
|
||||
export default ErrorStateComponent;
|
||||
@@ -1,16 +1,6 @@
|
||||
.custom-time-picker {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
.timeSelection-input {
|
||||
&:hover {
|
||||
border-color: #1d212d !important;
|
||||
}
|
||||
}
|
||||
|
||||
.time-input-suffix {
|
||||
display: flex;
|
||||
}
|
||||
}
|
||||
|
||||
.time-options-container {
|
||||
@@ -145,7 +135,6 @@
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
gap: 6px;
|
||||
|
||||
.timezone {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -174,52 +163,6 @@
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.time-input-prefix {
|
||||
.live-dot-icon {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 50%;
|
||||
background-color: var(--bg-forest-500);
|
||||
animation: ripple 1s infinite;
|
||||
|
||||
margin-right: 4px;
|
||||
margin-left: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes ripple {
|
||||
0% {
|
||||
box-shadow: 0 0 0 0 rgba(245, 158, 11, 0.4);
|
||||
}
|
||||
70% {
|
||||
box-shadow: 0 0 0 6px rgba(245, 158, 11, 0);
|
||||
}
|
||||
100% {
|
||||
box-shadow: 0 0 0 0 rgba(245, 158, 11, 0);
|
||||
}
|
||||
}
|
||||
|
||||
.time-input-suffix-icon-badge {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 0 4px;
|
||||
border-radius: 2px;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
color: var(--bg-vanilla-100);
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
letter-spacing: -0.06px;
|
||||
cursor: pointer;
|
||||
height: 20px;
|
||||
width: 20px;
|
||||
|
||||
&:hover {
|
||||
background: rgba(171, 189, 255, 0.08);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.date-time-popover__footer {
|
||||
border-color: var(--bg-vanilla-400);
|
||||
@@ -237,26 +180,8 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-time-picker {
|
||||
.timeSelection-input {
|
||||
&:hover {
|
||||
border-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.timezone-badge {
|
||||
color: var(--bg-ink-100);
|
||||
background: rgb(179 179 179 / 15%);
|
||||
}
|
||||
|
||||
.time-input-suffix-icon-badge {
|
||||
color: var(--bg-ink-100);
|
||||
background: rgb(179 179 179 / 15%);
|
||||
|
||||
&:hover {
|
||||
background: rgb(179 179 179 / 20%);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,12 +5,13 @@ import './CustomTimePicker.styles.scss';
|
||||
import { Input, Popover, Tooltip, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
import {
|
||||
CustomTimeType,
|
||||
FixedDurationSuggestionOptions,
|
||||
Options,
|
||||
RelativeDurationSuggestionOptions,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import dayjs from 'dayjs';
|
||||
import { isValidTimeFormat } from 'lib/getMinMax';
|
||||
@@ -27,10 +28,7 @@ import {
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent';
|
||||
@@ -59,9 +57,11 @@ interface CustomTimePickerProps {
|
||||
customDateTimeVisible?: boolean;
|
||||
setCustomDTPickerVisible?: Dispatch<SetStateAction<boolean>>;
|
||||
onCustomDateHandler?: (dateTimeRange: DateTimeRangeType) => void;
|
||||
showLiveLogs?: boolean;
|
||||
onGoLive?: () => void;
|
||||
onExitLiveLogs?: () => void;
|
||||
handleGoLive?: () => void;
|
||||
onTimeChange?: (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
}
|
||||
|
||||
function CustomTimePicker({
|
||||
@@ -78,19 +78,14 @@ function CustomTimePicker({
|
||||
customDateTimeVisible,
|
||||
setCustomDTPickerVisible,
|
||||
onCustomDateHandler,
|
||||
onGoLive,
|
||||
onExitLiveLogs,
|
||||
showLiveLogs,
|
||||
handleGoLive,
|
||||
onTimeChange,
|
||||
}: CustomTimePickerProps): JSX.Element {
|
||||
const [
|
||||
selectedTimePlaceholderValue,
|
||||
setSelectedTimePlaceholderValue,
|
||||
] = useState('Select / Enter Time Range');
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const [inputValue, setInputValue] = useState('');
|
||||
const [inputStatus, setInputStatus] = useState<'' | 'error' | 'success'>('');
|
||||
const [inputErrorMessage, setInputErrorMessage] = useState<string | null>(
|
||||
@@ -169,13 +164,9 @@ function CustomTimePicker({
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (showLiveLogs) {
|
||||
setSelectedTimePlaceholderValue('Live');
|
||||
} else {
|
||||
const value = getSelectedTimeRangeLabel(selectedTime, selectedValue);
|
||||
setSelectedTimePlaceholderValue(value);
|
||||
}
|
||||
}, [selectedTime, selectedValue, showLiveLogs]);
|
||||
const value = getSelectedTimeRangeLabel(selectedTime, selectedValue);
|
||||
setSelectedTimePlaceholderValue(value);
|
||||
}, [selectedTime, selectedValue]);
|
||||
|
||||
const hide = (): void => {
|
||||
setOpen(false);
|
||||
@@ -265,11 +256,6 @@ function CustomTimePicker({
|
||||
};
|
||||
|
||||
const handleSelect = (label: string, value: string): void => {
|
||||
if (label === 'Custom') {
|
||||
setCustomDTPickerVisible?.(true);
|
||||
return;
|
||||
}
|
||||
|
||||
onSelect(value);
|
||||
setSelectedTimePlaceholderValue(label);
|
||||
setInputStatus('');
|
||||
@@ -332,118 +318,84 @@ function CustomTimePicker({
|
||||
);
|
||||
};
|
||||
|
||||
const getTooltipTitle = (): string => {
|
||||
if (selectedTime === 'custom' && inputValue === '' && !open) {
|
||||
return `${dayjs(minTime / 1000_000)
|
||||
.tz(timezone.value)
|
||||
.format(DATE_TIME_FORMATS.DD_MMM_YYYY_HH_MM_SS)} - ${dayjs(
|
||||
maxTime / 1000_000,
|
||||
)
|
||||
.tz(timezone.value)
|
||||
.format(DATE_TIME_FORMATS.DD_MMM_YYYY_HH_MM_SS)}`;
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
const getInputPrefix = (): JSX.Element => {
|
||||
if (showLiveLogs) {
|
||||
return (
|
||||
<div className="time-input-prefix">
|
||||
<div className="live-dot-icon" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="time-input-prefix">
|
||||
{inputValue && inputStatus === 'success' ? (
|
||||
<CheckCircle size={14} color="#51E7A8" />
|
||||
) : (
|
||||
<Tooltip title="Enter time in format (e.g., 1m, 2h, 3d, 4w)">
|
||||
<Clock size={14} className="cursor-pointer" />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="custom-time-picker">
|
||||
<Tooltip title={getTooltipTitle()} placement="top">
|
||||
<Popover
|
||||
className={cx(
|
||||
'timeSelection-input-container',
|
||||
selectedTime === 'custom' && inputValue === '' ? 'custom-time' : '',
|
||||
)}
|
||||
placement="bottomRight"
|
||||
getPopupContainer={popupContainer}
|
||||
rootClassName="date-time-root"
|
||||
content={
|
||||
newPopover ? (
|
||||
<CustomTimePickerPopoverContent
|
||||
setIsOpen={setOpen}
|
||||
customDateTimeVisible={defaultTo(customDateTimeVisible, false)}
|
||||
setCustomDTPickerVisible={defaultTo(setCustomDTPickerVisible, noop)}
|
||||
onCustomDateHandler={defaultTo(onCustomDateHandler, noop)}
|
||||
onSelectHandler={handleSelect}
|
||||
onGoLive={defaultTo(onGoLive, noop)}
|
||||
onExitLiveLogs={defaultTo(onExitLiveLogs, noop)}
|
||||
options={items}
|
||||
selectedTime={selectedTime}
|
||||
activeView={activeView}
|
||||
setActiveView={setActiveView}
|
||||
setIsOpenedFromFooter={setIsOpenedFromFooter}
|
||||
isOpenedFromFooter={isOpenedFromFooter}
|
||||
/>
|
||||
<Popover
|
||||
className={cx(
|
||||
'timeSelection-input-container',
|
||||
selectedTime === 'custom' && inputValue === '' ? 'custom-time' : '',
|
||||
)}
|
||||
placement="bottomRight"
|
||||
getPopupContainer={popupContainer}
|
||||
rootClassName="date-time-root"
|
||||
content={
|
||||
newPopover ? (
|
||||
<CustomTimePickerPopoverContent
|
||||
setIsOpen={setOpen}
|
||||
customDateTimeVisible={defaultTo(customDateTimeVisible, false)}
|
||||
setCustomDTPickerVisible={defaultTo(setCustomDTPickerVisible, noop)}
|
||||
onCustomDateHandler={defaultTo(onCustomDateHandler, noop)}
|
||||
onSelectHandler={handleSelect}
|
||||
handleGoLive={defaultTo(handleGoLive, noop)}
|
||||
options={items}
|
||||
selectedTime={selectedTime}
|
||||
activeView={activeView}
|
||||
setActiveView={setActiveView}
|
||||
setIsOpenedFromFooter={setIsOpenedFromFooter}
|
||||
isOpenedFromFooter={isOpenedFromFooter}
|
||||
onTimeChange={onTimeChange}
|
||||
/>
|
||||
) : (
|
||||
content
|
||||
)
|
||||
}
|
||||
arrow={false}
|
||||
trigger="click"
|
||||
open={open}
|
||||
onOpenChange={handleOpenChange}
|
||||
style={{
|
||||
padding: 0,
|
||||
}}
|
||||
>
|
||||
<Input
|
||||
className="timeSelection-input"
|
||||
type="text"
|
||||
status={inputValue && inputStatus === 'error' ? 'error' : ''}
|
||||
placeholder={
|
||||
isInputFocused
|
||||
? 'Time Format (1m or 2h or 3d or 4w)'
|
||||
: selectedTimePlaceholderValue
|
||||
}
|
||||
value={inputValue}
|
||||
onFocus={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
onChange={handleInputChange}
|
||||
data-1p-ignore
|
||||
prefix={
|
||||
inputValue && inputStatus === 'success' ? (
|
||||
<CheckCircle size={14} color="#51E7A8" />
|
||||
) : (
|
||||
content
|
||||
<Tooltip title="Enter time in format (e.g., 1m, 2h, 3d, 4w)">
|
||||
<Clock size={14} />
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
arrow={false}
|
||||
trigger="click"
|
||||
open={open}
|
||||
onOpenChange={handleOpenChange}
|
||||
style={{
|
||||
padding: 0,
|
||||
}}
|
||||
>
|
||||
<Input
|
||||
className="timeSelection-input"
|
||||
type="text"
|
||||
status={inputValue && inputStatus === 'error' ? 'error' : ''}
|
||||
placeholder={
|
||||
isInputFocused
|
||||
? 'Time Format (1m or 2h or 3d or 4w)'
|
||||
: selectedTimePlaceholderValue
|
||||
}
|
||||
value={inputValue}
|
||||
onFocus={handleFocus}
|
||||
onClick={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
onChange={handleInputChange}
|
||||
data-1p-ignore
|
||||
prefix={getInputPrefix()}
|
||||
suffix={
|
||||
<div className="time-input-suffix">
|
||||
{!!isTimezoneOverridden && activeTimezoneOffset && (
|
||||
<div className="timezone-badge" onClick={handleTimezoneHintClick}>
|
||||
<span>{activeTimezoneOffset}</span>
|
||||
</div>
|
||||
)}
|
||||
<ChevronDown
|
||||
size={14}
|
||||
className="cursor-pointer time-input-suffix-icon-badge"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
handleViewChange('datetime');
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
</Popover>
|
||||
</Tooltip>
|
||||
suffix={
|
||||
<>
|
||||
{!!isTimezoneOverridden && activeTimezoneOffset && (
|
||||
<div className="timezone-badge" onClick={handleTimezoneHintClick}>
|
||||
<span>{activeTimezoneOffset}</span>
|
||||
</div>
|
||||
)}
|
||||
<ChevronDown
|
||||
size={14}
|
||||
onClick={(): void => handleViewChange('datetime')}
|
||||
/>
|
||||
</>
|
||||
}
|
||||
/>
|
||||
</Popover>
|
||||
|
||||
{inputStatus === 'error' && inputErrorMessage && (
|
||||
<Typography.Title level={5} className="valid-format-error">
|
||||
{inputErrorMessage}
|
||||
@@ -460,8 +412,7 @@ CustomTimePicker.defaultProps = {
|
||||
customDateTimeVisible: false,
|
||||
setCustomDTPickerVisible: noop,
|
||||
onCustomDateHandler: noop,
|
||||
onGoLive: noop,
|
||||
handleGoLive: noop,
|
||||
onCustomTimeStatusUpdate: noop,
|
||||
onExitLiveLogs: noop,
|
||||
showLiveLogs: false,
|
||||
onTimeChange: undefined,
|
||||
};
|
||||
|
||||
@@ -4,30 +4,21 @@ import { Color } from '@signozhq/design-tokens';
|
||||
import { Button } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import DatePickerV2 from 'components/DatePickerV2/DatePickerV2';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
import {
|
||||
CustomTimeType,
|
||||
LexicalContext,
|
||||
Option,
|
||||
RelativeDurationSuggestionOptions,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import dayjs from 'dayjs';
|
||||
import { Clock, PenLine } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { Dispatch, SetStateAction, useMemo } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { getCustomTimeRanges } from 'utils/customTimeRangeUtils';
|
||||
|
||||
import RangePickerModal from './RangePickerModal';
|
||||
import TimezonePicker from './TimezonePicker';
|
||||
|
||||
interface CustomTimePickerPopoverContentProps {
|
||||
@@ -40,21 +31,16 @@ interface CustomTimePickerPopoverContentProps {
|
||||
lexicalContext?: LexicalContext,
|
||||
) => void;
|
||||
onSelectHandler: (label: string, value: string) => void;
|
||||
onGoLive: () => void;
|
||||
handleGoLive: () => void;
|
||||
selectedTime: string;
|
||||
activeView: 'datetime' | 'timezone';
|
||||
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
|
||||
isOpenedFromFooter: boolean;
|
||||
setIsOpenedFromFooter: Dispatch<SetStateAction<boolean>>;
|
||||
onExitLiveLogs: () => void;
|
||||
}
|
||||
|
||||
interface RecentlyUsedDateTimeRange {
|
||||
label: string;
|
||||
value: number;
|
||||
timestamp: number;
|
||||
from: string;
|
||||
to: string;
|
||||
onTimeChange?: (
|
||||
interval: Time | CustomTimeType,
|
||||
dateTimeRange?: [number, number],
|
||||
) => void;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
@@ -65,68 +51,22 @@ function CustomTimePickerPopoverContent({
|
||||
setCustomDTPickerVisible,
|
||||
onCustomDateHandler,
|
||||
onSelectHandler,
|
||||
onGoLive,
|
||||
handleGoLive,
|
||||
selectedTime,
|
||||
activeView,
|
||||
setActiveView,
|
||||
isOpenedFromFooter,
|
||||
setIsOpenedFromFooter,
|
||||
onExitLiveLogs,
|
||||
onTimeChange,
|
||||
}: CustomTimePickerPopoverContentProps): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [
|
||||
pathname,
|
||||
]);
|
||||
|
||||
const url = new URLSearchParams(window.location.search);
|
||||
|
||||
let panelTypeFromURL = url.get(QueryParams.panelTypes);
|
||||
|
||||
try {
|
||||
panelTypeFromURL = JSON.parse(panelTypeFromURL as string);
|
||||
} catch {
|
||||
// fallback → leave as-is
|
||||
}
|
||||
|
||||
const isLogsListView =
|
||||
panelTypeFromURL !== 'table' && panelTypeFromURL !== 'graph'; // we do not select list view in the url
|
||||
|
||||
const { timezone } = useTimezone();
|
||||
const activeTimezoneOffset = timezone.offset;
|
||||
|
||||
const [recentlyUsedTimeRanges, setRecentlyUsedTimeRanges] = useState<
|
||||
RecentlyUsedDateTimeRange[]
|
||||
>([]);
|
||||
|
||||
const handleExitLiveLogs = useCallback((): void => {
|
||||
if (isLogsExplorerPage) {
|
||||
onExitLiveLogs();
|
||||
}
|
||||
}, [isLogsExplorerPage, onExitLiveLogs]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!customDateTimeVisible) {
|
||||
const customTimeRanges = getCustomTimeRanges();
|
||||
|
||||
const formattedCustomTimeRanges: RecentlyUsedDateTimeRange[] = customTimeRanges.map(
|
||||
(range) => ({
|
||||
label: `${dayjs(range.from)
|
||||
.tz(timezone.value)
|
||||
.format(DATE_TIME_FORMATS.DD_MMM_YYYY_HH_MM_SS)} - ${dayjs(range.to)
|
||||
.tz(timezone.value)
|
||||
.format(DATE_TIME_FORMATS.DD_MMM_YYYY_HH_MM_SS)}`,
|
||||
from: range.from,
|
||||
to: range.to,
|
||||
value: range.timestamp,
|
||||
timestamp: range.timestamp,
|
||||
}),
|
||||
);
|
||||
|
||||
setRecentlyUsedTimeRanges(formattedCustomTimeRanges);
|
||||
}
|
||||
}, [customDateTimeVisible, timezone.value]);
|
||||
|
||||
function getTimeChips(options: Option[]): JSX.Element {
|
||||
return (
|
||||
<div className="relative-date-time-section">
|
||||
@@ -136,7 +76,6 @@ function CustomTimePickerPopoverContent({
|
||||
className="time-btns"
|
||||
key={option.label + option.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onSelectHandler(option.label, option.value);
|
||||
}}
|
||||
>
|
||||
@@ -170,87 +109,53 @@ function CustomTimePickerPopoverContent({
|
||||
);
|
||||
}
|
||||
|
||||
const handleGoLive = (): void => {
|
||||
onGoLive();
|
||||
setIsOpen(false);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="date-time-popover">
|
||||
{!customDateTimeVisible && (
|
||||
<div className="date-time-options">
|
||||
{isLogsExplorerPage && isLogsListView && (
|
||||
<Button className="data-time-live" type="text" onClick={handleGoLive}>
|
||||
Live
|
||||
</Button>
|
||||
)}
|
||||
{options.map((option) => (
|
||||
<Button
|
||||
type="text"
|
||||
key={option.label + option.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onSelectHandler(option.label, option.value);
|
||||
}}
|
||||
className={cx(
|
||||
'date-time-options-btn',
|
||||
customDateTimeVisible
|
||||
? option.value === 'custom' && 'active'
|
||||
: selectedTime === option.value && 'active',
|
||||
)}
|
||||
>
|
||||
{option.label}
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
<div className="date-time-options">
|
||||
{isLogsExplorerPage && (
|
||||
<Button className="data-time-live" type="text" onClick={handleGoLive}>
|
||||
Live
|
||||
</Button>
|
||||
)}
|
||||
{options.map((option) => (
|
||||
<Button
|
||||
type="text"
|
||||
key={option.label + option.value}
|
||||
onClick={(): void => {
|
||||
onSelectHandler(option.label, option.value);
|
||||
}}
|
||||
className={cx(
|
||||
'date-time-options-btn',
|
||||
customDateTimeVisible
|
||||
? option.value === 'custom' && 'active'
|
||||
: selectedTime === option.value && 'active',
|
||||
)}
|
||||
>
|
||||
{option.label}
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
<div
|
||||
className={cx(
|
||||
'relative-date-time',
|
||||
customDateTimeVisible ? 'date-picker' : 'relative-times',
|
||||
selectedTime === 'custom' || customDateTimeVisible
|
||||
? 'date-picker'
|
||||
: 'relative-times',
|
||||
)}
|
||||
>
|
||||
{customDateTimeVisible ? (
|
||||
<DatePickerV2
|
||||
onSetCustomDTPickerVisible={setCustomDTPickerVisible}
|
||||
{selectedTime === 'custom' || customDateTimeVisible ? (
|
||||
<RangePickerModal
|
||||
setCustomDTPickerVisible={setCustomDTPickerVisible}
|
||||
setIsOpen={setIsOpen}
|
||||
onCustomDateHandler={onCustomDateHandler}
|
||||
selectedTime={selectedTime}
|
||||
onTimeChange={onTimeChange}
|
||||
/>
|
||||
) : (
|
||||
<div className="time-selector-container">
|
||||
<div className="relative-times-container">
|
||||
<div className="time-heading">RELATIVE TIMES</div>
|
||||
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
|
||||
</div>
|
||||
|
||||
<div className="recently-used-container">
|
||||
<div className="time-heading">RECENTLY USED</div>
|
||||
<div className="recently-used-range">
|
||||
{recentlyUsedTimeRanges.map((range: RecentlyUsedDateTimeRange) => (
|
||||
<div
|
||||
className="recently-used-range-item"
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
handleExitLiveLogs();
|
||||
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
|
||||
setIsOpen(false);
|
||||
}
|
||||
}}
|
||||
key={range.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
|
||||
setIsOpen(false);
|
||||
}}
|
||||
>
|
||||
{range.label}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="relative-times-container">
|
||||
<div className="time-heading">RELATIVE TIMES</div>
|
||||
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
@@ -284,4 +189,8 @@ function CustomTimePickerPopoverContent({
|
||||
);
|
||||
}
|
||||
|
||||
CustomTimePickerPopoverContent.defaultProps = {
|
||||
onTimeChange: undefined,
|
||||
};
|
||||
|
||||
export default CustomTimePickerPopoverContent;
|
||||
|
||||
@@ -119,9 +119,7 @@ const filterAndSortTimezones = (
|
||||
return createTimezoneEntry(normalizedTz, offset);
|
||||
});
|
||||
|
||||
export const generateTimezoneData = (
|
||||
includeEtcTimezones = false,
|
||||
): Timezone[] => {
|
||||
const generateTimezoneData = (includeEtcTimezones = false): Timezone[] => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const allTimezones = (Intl as any).supportedValuesOf('timeZone');
|
||||
const timezones: Timezone[] = [];
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
.date-picker-v2-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
.custom-date-time-picker-v2 {
|
||||
padding: 12px;
|
||||
|
||||
.periscope-calendar {
|
||||
border-radius: 4px;
|
||||
border: none !important;
|
||||
background: none !important;
|
||||
padding: 8px 0 !important;
|
||||
}
|
||||
|
||||
.periscope-calendar-day {
|
||||
background: none !important;
|
||||
|
||||
&.periscope-calendar-today {
|
||||
&.text-accent-foreground {
|
||||
color: var(--bg-vanilla-100) !important;
|
||||
}
|
||||
}
|
||||
|
||||
button {
|
||||
&:hover {
|
||||
background-color: var(--bg-robin-500) !important;
|
||||
color: var(--bg-vanilla-100) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-time-selector {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
|
||||
.time-input {
|
||||
border-radius: 4px;
|
||||
border: none !important;
|
||||
background: none !important;
|
||||
padding: 8px 4px !important;
|
||||
color: var(--bg-vanilla-100) !important;
|
||||
|
||||
&::-webkit-calendar-picker-indicator {
|
||||
display: none !important;
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
}
|
||||
|
||||
&:focus {
|
||||
border: none !important;
|
||||
outline: none !important;
|
||||
box-shadow: none !important;
|
||||
}
|
||||
|
||||
&:focus-visible {
|
||||
border: none !important;
|
||||
outline: none !important;
|
||||
box-shadow: none !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-date-time-picker-footer {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
justify-content: flex-end;
|
||||
margin-top: 16px;
|
||||
|
||||
.next-btn {
|
||||
width: 80px;
|
||||
}
|
||||
|
||||
.clear-btn {
|
||||
width: 80px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.invalid-date-range-tooltip {
|
||||
.ant-tooltip-inner {
|
||||
color: var(--bg-sakura-500) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.custom-date-time-picker-v2 {
|
||||
.periscope-calendar-day {
|
||||
&.periscope-calendar-today {
|
||||
&.text-accent-foreground {
|
||||
color: var(--bg-ink-500) !important;
|
||||
}
|
||||
}
|
||||
|
||||
button {
|
||||
&:hover {
|
||||
background-color: var(--bg-robin-500) !important;
|
||||
color: var(--bg-ink-500) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.custom-time-selector {
|
||||
.time-input {
|
||||
color: var(--bg-ink-500) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,311 +0,0 @@
|
||||
import './DatePickerV2.styles.scss';
|
||||
|
||||
import { Calendar } from '@signozhq/calendar';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import dayjs, { Dayjs } from 'dayjs';
|
||||
import { CornerUpLeft, MoveRight } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { useRef, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { addCustomTimeRange } from 'utils/customTimeRangeUtils';
|
||||
|
||||
function DatePickerV2({
|
||||
onSetCustomDTPickerVisible,
|
||||
setIsOpen,
|
||||
onCustomDateHandler,
|
||||
}: {
|
||||
onSetCustomDTPickerVisible: (visible: boolean) => void;
|
||||
setIsOpen: (isOpen: boolean) => void;
|
||||
onCustomDateHandler: (
|
||||
dateTimeRange: DateTimeRangeType,
|
||||
lexicalContext?: LexicalContext,
|
||||
) => void;
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const timeInputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
const [selectedDateTimeFor, setSelectedDateTimeFor] = useState<'to' | 'from'>(
|
||||
'from',
|
||||
);
|
||||
|
||||
const [selectedFromDateTime, setSelectedFromDateTime] = useState<Dayjs | null>(
|
||||
dayjs(minTime / 1000_000).tz(timezone.value),
|
||||
);
|
||||
|
||||
const [selectedToDateTime, setSelectedToDateTime] = useState<Dayjs | null>(
|
||||
dayjs(maxTime / 1000_000).tz(timezone.value),
|
||||
);
|
||||
|
||||
const handleNext = (): void => {
|
||||
if (selectedDateTimeFor === 'to') {
|
||||
onCustomDateHandler([selectedFromDateTime, selectedToDateTime]);
|
||||
|
||||
addCustomTimeRange([selectedFromDateTime, selectedToDateTime]);
|
||||
|
||||
setIsOpen(false);
|
||||
onSetCustomDTPickerVisible(false);
|
||||
setSelectedDateTimeFor('from');
|
||||
} else {
|
||||
setSelectedDateTimeFor('to');
|
||||
}
|
||||
};
|
||||
|
||||
const handleDateChange = (date: Date | undefined): void => {
|
||||
if (!date) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (selectedDateTimeFor === 'from') {
|
||||
const prevFromDateTime = selectedFromDateTime;
|
||||
|
||||
const newDate = dayjs(date);
|
||||
|
||||
const updatedFromDateTime = prevFromDateTime
|
||||
? prevFromDateTime
|
||||
.year(newDate.year())
|
||||
.month(newDate.month())
|
||||
.date(newDate.date())
|
||||
: dayjs(date).tz(timezone.value);
|
||||
|
||||
setSelectedFromDateTime(updatedFromDateTime);
|
||||
} else {
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setSelectedToDateTime((prev) => {
|
||||
const newDate = dayjs(date);
|
||||
|
||||
// Update only the date part, keeping time from existing state
|
||||
return prev
|
||||
? prev.year(newDate.year()).month(newDate.month()).date(newDate.date())
|
||||
: dayjs(date).tz(timezone.value);
|
||||
});
|
||||
}
|
||||
|
||||
// focus the time input
|
||||
timeInputRef?.current?.focus();
|
||||
};
|
||||
|
||||
const handleTimeChange = (time: string): void => {
|
||||
// time should have format HH:mm:ss
|
||||
if (!/^\d{2}:\d{2}:\d{2}$/.test(time)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (selectedDateTimeFor === 'from') {
|
||||
setSelectedFromDateTime((prev) => {
|
||||
if (prev) {
|
||||
return prev
|
||||
.set('hour', parseInt(time.split(':')[0], 10))
|
||||
.set('minute', parseInt(time.split(':')[1], 10))
|
||||
.set('second', parseInt(time.split(':')[2], 10));
|
||||
}
|
||||
|
||||
return prev;
|
||||
});
|
||||
}
|
||||
if (selectedDateTimeFor === 'to') {
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setSelectedToDateTime((prev) => {
|
||||
if (prev) {
|
||||
return prev
|
||||
.set('hour', parseInt(time.split(':')[0], 10))
|
||||
.set('minute', parseInt(time.split(':')[1], 10))
|
||||
.set('second', parseInt(time.split(':')[2], 10));
|
||||
}
|
||||
|
||||
return prev;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getDefaultMonth = (): Date => {
|
||||
let defaultDate = null;
|
||||
|
||||
if (selectedDateTimeFor === 'from') {
|
||||
defaultDate = selectedFromDateTime?.toDate();
|
||||
} else if (selectedDateTimeFor === 'to') {
|
||||
defaultDate = selectedToDateTime?.toDate();
|
||||
}
|
||||
|
||||
return defaultDate ?? new Date();
|
||||
};
|
||||
|
||||
const isValidRange = (): boolean => {
|
||||
if (selectedDateTimeFor === 'to') {
|
||||
return selectedToDateTime?.isAfter(selectedFromDateTime) ?? false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const handleBack = (): void => {
|
||||
setSelectedDateTimeFor('from');
|
||||
};
|
||||
|
||||
const handleHideCustomDTPicker = (): void => {
|
||||
onSetCustomDTPickerVisible(false);
|
||||
};
|
||||
|
||||
const handleSelectDateTimeFor = (selectedDateTimeFor: 'to' | 'from'): void => {
|
||||
setSelectedDateTimeFor(selectedDateTimeFor);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="date-picker-v2-container">
|
||||
<div className="date-time-custom-options-container">
|
||||
<div
|
||||
className="back-btn"
|
||||
onClick={handleHideCustomDTPicker}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
handleHideCustomDTPicker();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<CornerUpLeft size={16} />
|
||||
<span>Back</span>
|
||||
</div>
|
||||
|
||||
<div className="date-time-custom-options">
|
||||
<div
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
handleSelectDateTimeFor('from');
|
||||
}
|
||||
}}
|
||||
className={cx(
|
||||
'date-time-custom-option-from',
|
||||
selectedDateTimeFor === 'from' && 'active',
|
||||
)}
|
||||
onClick={(): void => {
|
||||
handleSelectDateTimeFor('from');
|
||||
}}
|
||||
>
|
||||
<div className="date-time-custom-option-from-title">FROM</div>
|
||||
<div className="date-time-custom-option-from-value">
|
||||
{selectedFromDateTime?.format('YYYY-MM-DD HH:mm:ss')}
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
handleSelectDateTimeFor('to');
|
||||
}
|
||||
}}
|
||||
className={cx(
|
||||
'date-time-custom-option-to',
|
||||
selectedDateTimeFor === 'to' && 'active',
|
||||
)}
|
||||
onClick={(): void => {
|
||||
handleSelectDateTimeFor('to');
|
||||
}}
|
||||
>
|
||||
<div className="date-time-custom-option-to-title">TO</div>
|
||||
<div className="date-time-custom-option-to-value">
|
||||
{selectedToDateTime?.format('YYYY-MM-DD HH:mm:ss')}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="custom-date-time-picker-v2">
|
||||
<Calendar
|
||||
mode="single"
|
||||
required
|
||||
selected={
|
||||
selectedDateTimeFor === 'from'
|
||||
? selectedFromDateTime?.toDate()
|
||||
: selectedToDateTime?.toDate()
|
||||
}
|
||||
key={selectedDateTimeFor + selectedDateTimeFor}
|
||||
onSelect={handleDateChange}
|
||||
defaultMonth={getDefaultMonth()}
|
||||
disabled={(current): boolean => {
|
||||
if (selectedDateTimeFor === 'to') {
|
||||
// disable dates after today and before selectedFromDateTime
|
||||
const currentDay = dayjs(current);
|
||||
return currentDay.isAfter(dayjs()) || false;
|
||||
}
|
||||
|
||||
if (selectedDateTimeFor === 'from') {
|
||||
// disable dates after selectedToDateTime
|
||||
|
||||
return dayjs(current).isAfter(dayjs()) || false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}}
|
||||
className="rounded-md border"
|
||||
navLayout="after"
|
||||
/>
|
||||
|
||||
<div className="custom-time-selector">
|
||||
<label className="text-xs font-normal block" htmlFor="time-picker">
|
||||
Timestamp
|
||||
</label>
|
||||
|
||||
<MoveRight size={16} />
|
||||
|
||||
<div className="time-input-container">
|
||||
<Input
|
||||
type="time"
|
||||
ref={timeInputRef}
|
||||
className="time-input"
|
||||
value={
|
||||
selectedDateTimeFor === 'from'
|
||||
? selectedFromDateTime?.format('HH:mm:ss')
|
||||
: selectedToDateTime?.format('HH:mm:ss')
|
||||
}
|
||||
onChange={(e): void => handleTimeChange(e.target.value)}
|
||||
step="1"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="custom-date-time-picker-footer">
|
||||
{selectedDateTimeFor === 'to' && (
|
||||
<Button
|
||||
className="periscope-btn secondary clear-btn"
|
||||
type="default"
|
||||
onClick={handleBack}
|
||||
>
|
||||
Back
|
||||
</Button>
|
||||
)}
|
||||
<Tooltip
|
||||
title={
|
||||
!isValidRange() ? 'Invalid range: TO date should be after FROM date' : ''
|
||||
}
|
||||
overlayClassName="invalid-date-range-tooltip"
|
||||
>
|
||||
<Button
|
||||
className="periscope-btn primary next-btn"
|
||||
type="primary"
|
||||
onClick={handleNext}
|
||||
disabled={!isValidRange()}
|
||||
>
|
||||
{selectedDateTimeFor === 'from' ? 'Next' : 'Apply'}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DatePickerV2;
|
||||
@@ -19,6 +19,20 @@ beforeAll(() => {
|
||||
});
|
||||
});
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('react-dnd', () => ({
|
||||
useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]),
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
import ErrorContent from 'components/ErrorModal/components/ErrorContent';
|
||||
import { ReactNode } from 'react';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
interface ErrorInPlaceProps {
|
||||
/** The error object to display */
|
||||
error: APIError;
|
||||
/** Custom class name */
|
||||
className?: string;
|
||||
/** Custom style */
|
||||
style?: React.CSSProperties;
|
||||
/** Whether to show a border */
|
||||
bordered?: boolean;
|
||||
/** Background color */
|
||||
background?: string;
|
||||
/** Padding */
|
||||
padding?: string | number;
|
||||
/** Height - defaults to 100% to take available space */
|
||||
height?: string | number;
|
||||
/** Width - defaults to 100% to take available space */
|
||||
width?: string | number;
|
||||
/** Custom content instead of ErrorContent */
|
||||
children?: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* ErrorInPlace - A component that renders error content directly in the available space
|
||||
* of its parent container. Perfect for displaying errors in widgets, cards, or any
|
||||
* container where you want the error to take up the full available space.
|
||||
*
|
||||
* @example
|
||||
* <ErrorInPlace error={error} />
|
||||
*
|
||||
* @example
|
||||
* <ErrorInPlace error={error} bordered background="#f5f5f5" padding={16} />
|
||||
*/
|
||||
function ErrorInPlace({
|
||||
error,
|
||||
className = '',
|
||||
style,
|
||||
bordered = false,
|
||||
background,
|
||||
padding = 16,
|
||||
height = '100%',
|
||||
width = '100%',
|
||||
children,
|
||||
}: ErrorInPlaceProps): JSX.Element {
|
||||
const containerStyle: React.CSSProperties = {
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
width,
|
||||
height,
|
||||
padding: typeof padding === 'number' ? `${padding}px` : padding,
|
||||
backgroundColor: background,
|
||||
border: bordered ? '1px solid var(--bg-slate-400, #374151)' : 'none',
|
||||
borderRadius: bordered ? '4px' : '0',
|
||||
overflow: 'auto',
|
||||
...style,
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`error-in-place ${className}`.trim()} style={containerStyle}>
|
||||
{children || <ErrorContent error={error} />}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
ErrorInPlace.defaultProps = {
|
||||
className: undefined,
|
||||
style: undefined,
|
||||
bordered: undefined,
|
||||
background: undefined,
|
||||
padding: undefined,
|
||||
height: undefined,
|
||||
width: undefined,
|
||||
children: undefined,
|
||||
};
|
||||
|
||||
export default ErrorInPlace;
|
||||
@@ -1,4 +1,4 @@
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
import ErrorModal from './ErrorModal';
|
||||
@@ -56,8 +56,9 @@ describe('ErrorModal Component', () => {
|
||||
|
||||
// Click the close button
|
||||
const closeButton = screen.getByTestId('close-button');
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
await user.click(closeButton);
|
||||
act(() => {
|
||||
fireEvent.click(closeButton);
|
||||
});
|
||||
|
||||
// Check if onClose was called
|
||||
expect(onCloseMock).toHaveBeenCalledTimes(1);
|
||||
@@ -148,8 +149,9 @@ it('should open the modal when the trigger component is clicked', async () => {
|
||||
|
||||
// Click the trigger component
|
||||
const triggerButton = screen.getByText('Open Error Modal');
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
await user.click(triggerButton);
|
||||
act(() => {
|
||||
fireEvent.click(triggerButton);
|
||||
});
|
||||
|
||||
// Check if the modal is displayed
|
||||
expect(screen.getByText('An error occurred')).toBeInTheDocument();
|
||||
@@ -168,15 +170,18 @@ it('should close the modal when the onCancel event is triggered', async () => {
|
||||
|
||||
// Click the trigger component
|
||||
const triggerButton = screen.getByText('error');
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
await user.click(triggerButton);
|
||||
act(() => {
|
||||
fireEvent.click(triggerButton);
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('An error occurred')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Trigger the onCancel event
|
||||
await user.click(screen.getByTestId('close-button'));
|
||||
act(() => {
|
||||
fireEvent.click(screen.getByTestId('close-button'));
|
||||
});
|
||||
|
||||
// Check if the modal is closed
|
||||
expect(onCloseMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { Popover, PopoverProps } from 'antd';
|
||||
import { ReactNode } from 'react';
|
||||
|
||||
interface ErrorPopoverProps extends Omit<PopoverProps, 'content'> {
|
||||
/** Content to display in the popover */
|
||||
content: ReactNode;
|
||||
/** Element that triggers the popover */
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* ErrorPopover - A clean wrapper around Ant Design's Popover
|
||||
* that provides a simple interface for displaying content in a popover.
|
||||
*
|
||||
* @example
|
||||
* <ErrorPopover content={<ErrorContent error={error} />}>
|
||||
* <CircleX />
|
||||
* </ErrorPopover>
|
||||
*/
|
||||
function ErrorPopover({
|
||||
content,
|
||||
children,
|
||||
...popoverProps
|
||||
}: ErrorPopoverProps): JSX.Element {
|
||||
return (
|
||||
<Popover content={content} {...popoverProps}>
|
||||
{children}
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
|
||||
export default ErrorPopover;
|
||||
@@ -1,15 +0,0 @@
|
||||
import { Typography } from 'antd';
|
||||
|
||||
function AnnouncementsModal(): JSX.Element {
|
||||
return (
|
||||
<div className="announcements-modal-container">
|
||||
<div className="announcements-modal-container-header">
|
||||
<Typography.Text className="announcements-modal-title">
|
||||
Announcements
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AnnouncementsModal;
|
||||
@@ -1,160 +0,0 @@
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { Button, Input, Radio, RadioChangeEvent, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { handleContactSupport } from 'pages/Integrations/utils';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
function FeedbackModal({ onClose }: { onClose: () => void }): JSX.Element {
|
||||
const [activeTab, setActiveTab] = useState('feedback');
|
||||
const [feedback, setFeedback] = useState('');
|
||||
const location = useLocation();
|
||||
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const handleSubmit = async (): Promise<void> => {
|
||||
setIsLoading(true);
|
||||
|
||||
let entityName = 'Feedback';
|
||||
if (activeTab === 'reportBug') {
|
||||
entityName = 'Bug report';
|
||||
} else if (activeTab === 'featureRequest') {
|
||||
entityName = 'Feature request';
|
||||
}
|
||||
|
||||
logEvent('Feedback: Submitted', {
|
||||
data: feedback,
|
||||
type: activeTab,
|
||||
page: location.pathname,
|
||||
})
|
||||
.then(() => {
|
||||
onClose();
|
||||
|
||||
toast.success(`${entityName} submitted successfully`, {
|
||||
position: 'top-right',
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
console.error(`Failed to submit ${entityName}`);
|
||||
toast.error(`Failed to submit ${entityName}`, {
|
||||
position: 'top-right',
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
setIsLoading(false);
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(
|
||||
() => (): void => {
|
||||
setFeedback('');
|
||||
setActiveTab('feedback');
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const items = [
|
||||
{
|
||||
label: (
|
||||
<div className="feedback-modal-tab-label">
|
||||
<div className="tab-icon dot feedback-tab" />
|
||||
Feedback
|
||||
</div>
|
||||
),
|
||||
key: 'feedback',
|
||||
value: 'feedback',
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<div className="feedback-modal-tab-label">
|
||||
<div className="tab-icon dot bug-tab" />
|
||||
Report a bug
|
||||
</div>
|
||||
),
|
||||
key: 'reportBug',
|
||||
value: 'reportBug',
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<div className="feedback-modal-tab-label">
|
||||
<div className="tab-icon dot feature-tab" />
|
||||
Feature request
|
||||
</div>
|
||||
),
|
||||
key: 'featureRequest',
|
||||
value: 'featureRequest',
|
||||
},
|
||||
];
|
||||
|
||||
const handleFeedbackChange = (
|
||||
e: React.ChangeEvent<HTMLTextAreaElement>,
|
||||
): void => {
|
||||
setFeedback(e.target.value);
|
||||
};
|
||||
|
||||
const handleContactSupportClick = useCallback((): void => {
|
||||
handleContactSupport(isCloudUserVal);
|
||||
}, [isCloudUserVal]);
|
||||
|
||||
return (
|
||||
<div className="feedback-modal-container">
|
||||
<div className="feedback-modal-header">
|
||||
<Radio.Group
|
||||
value={activeTab}
|
||||
defaultValue={activeTab}
|
||||
optionType="button"
|
||||
className="feedback-modal-tabs"
|
||||
options={items}
|
||||
onChange={(e: RadioChangeEvent): void => setActiveTab(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="feedback-modal-content">
|
||||
<div className="feedback-modal-content-header">
|
||||
<Input.TextArea
|
||||
placeholder="Write your feedback here..."
|
||||
rows={6}
|
||||
required
|
||||
className="feedback-input"
|
||||
value={feedback}
|
||||
onChange={handleFeedbackChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="feedback-modal-content-footer">
|
||||
<Button
|
||||
className="periscope-btn primary"
|
||||
type="primary"
|
||||
onClick={handleSubmit}
|
||||
loading={isLoading}
|
||||
disabled={feedback.length === 0}
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
<div className="feedback-modal-content-footer-info-text">
|
||||
<Typography.Text>
|
||||
Have a specific issue?{' '}
|
||||
<Typography.Link
|
||||
className="contact-support-link"
|
||||
onClick={handleContactSupportClick}
|
||||
>
|
||||
Contact Support{' '}
|
||||
</Typography.Link>
|
||||
or{' '}
|
||||
<a
|
||||
href="https://signoz.io/docs/introduction/"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="read-docs-link"
|
||||
>
|
||||
Read our docs
|
||||
</a>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default FeedbackModal;
|
||||
@@ -1,253 +0,0 @@
|
||||
.header-right-section-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.share-modal-content,
|
||||
.feedback-modal-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
padding: 12px;
|
||||
width: 460px;
|
||||
|
||||
border-radius: 4px;
|
||||
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
||||
backdrop-filter: blur(20px);
|
||||
|
||||
.absolute-relative-time-toggler-container {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
|
||||
.absolute-relative-time-toggler-label {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
|
||||
.absolute-relative-time-toggler {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.absolute-relative-time-error {
|
||||
font-size: 12px;
|
||||
color: var(--bg-amber-600);
|
||||
}
|
||||
|
||||
.share-link {
|
||||
.url-share-container {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
|
||||
.url-share-container-header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
.url-share-title,
|
||||
.url-share-sub-title {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.url-share-sub-title {
|
||||
font-size: 12px;
|
||||
color: var(--bg-vanilla-300);
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-modal-container {
|
||||
.feedback-modal-tabs {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
|
||||
.ant-radio-button-wrapper {
|
||||
flex: 1;
|
||||
margin: 0px !important;
|
||||
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
|
||||
&:before {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.ant-radio-button-checked {
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-modal-tab-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.tab-icon {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
}
|
||||
|
||||
.feedback-tab {
|
||||
background-color: var(--bg-sakura-500);
|
||||
}
|
||||
|
||||
.bug-tab {
|
||||
background-color: var(--bg-amber-500);
|
||||
}
|
||||
|
||||
.feature-tab {
|
||||
background-color: var(--bg-robin-500);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-tabs-nav-list {
|
||||
.ant-tabs-tab {
|
||||
padding: 6px 16px;
|
||||
|
||||
border-radius: 2px;
|
||||
background: var(--bg-ink-400);
|
||||
box-shadow: 0 0 8px 0 rgba(0, 0, 0, 0.1);
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
|
||||
margin: 0 !important;
|
||||
|
||||
.ant-tabs-tab-btn {
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 166.667% */
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
|
||||
&-active {
|
||||
background: var(--bg-slate-400);
|
||||
color: var(--bg-vanilla-100);
|
||||
|
||||
border-bottom: none !important;
|
||||
|
||||
.ant-tabs-tab-btn {
|
||||
color: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-modal-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.feedback-input {
|
||||
resize: none;
|
||||
|
||||
text-area {
|
||||
resize: none;
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-content-include-console-logs {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-modal-content-footer {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.feedback-modal-content-footer-info-text {
|
||||
font-size: 12px;
|
||||
color: var(--bg-vanilla-400, #c0c1c3);
|
||||
text-align: center;
|
||||
|
||||
/* button/ small */
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 24px; /* 200% */
|
||||
|
||||
.contact-support-link,
|
||||
.read-docs-link {
|
||||
color: var(--bg-robin-400);
|
||||
font-weight: 500;
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.share-modal-content,
|
||||
.feedback-modal-container {
|
||||
.absolute-relative-time-toggler-container {
|
||||
.absolute-relative-time-toggler-label {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.share-link {
|
||||
.url-share-container {
|
||||
.url-share-container-header {
|
||||
.url-share-title,
|
||||
.url-share-sub-title {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.url-share-sub-title {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-modal-container {
|
||||
.feedback-modal-tabs {
|
||||
.ant-radio-button-wrapper {
|
||||
flex: 1;
|
||||
margin: 0px !important;
|
||||
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
&:before {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.ant-radio-button-checked {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-modal-content-footer {
|
||||
.feedback-modal-content-footer-info-text {
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
import './HeaderRightSection.styles.scss';
|
||||
|
||||
import { Button, Popover } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { Globe, Inbox, SquarePen } from 'lucide-react';
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import AnnouncementsModal from './AnnouncementsModal';
|
||||
import FeedbackModal from './FeedbackModal';
|
||||
import ShareURLModal from './ShareURLModal';
|
||||
|
||||
interface HeaderRightSectionProps {
|
||||
enableAnnouncements: boolean;
|
||||
enableShare: boolean;
|
||||
enableFeedback: boolean;
|
||||
}
|
||||
|
||||
function HeaderRightSection({
|
||||
enableAnnouncements,
|
||||
enableShare,
|
||||
enableFeedback,
|
||||
}: HeaderRightSectionProps): JSX.Element | null {
|
||||
const location = useLocation();
|
||||
|
||||
const [openFeedbackModal, setOpenFeedbackModal] = useState(false);
|
||||
const [openShareURLModal, setOpenShareURLModal] = useState(false);
|
||||
const [openAnnouncementsModal, setOpenAnnouncementsModal] = useState(false);
|
||||
|
||||
const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
const handleOpenFeedbackModal = useCallback((): void => {
|
||||
logEvent('Feedback: Clicked', {
|
||||
page: location.pathname,
|
||||
});
|
||||
|
||||
setOpenFeedbackModal(true);
|
||||
setOpenShareURLModal(false);
|
||||
setOpenAnnouncementsModal(false);
|
||||
}, [location.pathname]);
|
||||
|
||||
const handleOpenShareURLModal = useCallback((): void => {
|
||||
logEvent('Share: Clicked', {
|
||||
page: location.pathname,
|
||||
});
|
||||
|
||||
setOpenShareURLModal(true);
|
||||
setOpenFeedbackModal(false);
|
||||
setOpenAnnouncementsModal(false);
|
||||
}, [location.pathname]);
|
||||
|
||||
const handleCloseFeedbackModal = (): void => {
|
||||
setOpenFeedbackModal(false);
|
||||
};
|
||||
|
||||
const handleOpenFeedbackModalChange = (open: boolean): void => {
|
||||
setOpenFeedbackModal(open);
|
||||
};
|
||||
|
||||
const handleOpenAnnouncementsModalChange = (open: boolean): void => {
|
||||
setOpenAnnouncementsModal(open);
|
||||
};
|
||||
|
||||
const handleOpenShareURLModalChange = (open: boolean): void => {
|
||||
setOpenShareURLModal(open);
|
||||
};
|
||||
|
||||
const isLicenseEnabled = isEnterpriseSelfHostedUser || isCloudUser;
|
||||
|
||||
return (
|
||||
<div className="header-right-section-container">
|
||||
{enableFeedback && isLicenseEnabled && (
|
||||
<Popover
|
||||
rootClassName="header-section-popover-root"
|
||||
className="shareable-link-popover"
|
||||
placement="bottomRight"
|
||||
content={<FeedbackModal onClose={handleCloseFeedbackModal} />}
|
||||
destroyTooltipOnHide
|
||||
arrow={false}
|
||||
trigger="click"
|
||||
open={openFeedbackModal}
|
||||
onOpenChange={handleOpenFeedbackModalChange}
|
||||
>
|
||||
<Button
|
||||
className="share-feedback-btn periscope-btn ghost"
|
||||
icon={<SquarePen size={14} />}
|
||||
onClick={handleOpenFeedbackModal}
|
||||
/>
|
||||
</Popover>
|
||||
)}
|
||||
|
||||
{enableAnnouncements && (
|
||||
<Popover
|
||||
rootClassName="header-section-popover-root"
|
||||
className="shareable-link-popover"
|
||||
placement="bottomRight"
|
||||
content={<AnnouncementsModal />}
|
||||
arrow={false}
|
||||
destroyTooltipOnHide
|
||||
trigger="click"
|
||||
open={openAnnouncementsModal}
|
||||
onOpenChange={handleOpenAnnouncementsModalChange}
|
||||
>
|
||||
<Button
|
||||
icon={<Inbox size={14} />}
|
||||
className="periscope-btn ghost announcements-btn"
|
||||
onClick={(): void => {
|
||||
logEvent('Announcements: Clicked', {
|
||||
page: location.pathname,
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Popover>
|
||||
)}
|
||||
|
||||
{enableShare && (
|
||||
<Popover
|
||||
rootClassName="header-section-popover-root"
|
||||
className="shareable-link-popover"
|
||||
placement="bottomRight"
|
||||
content={<ShareURLModal />}
|
||||
open={openShareURLModal}
|
||||
destroyTooltipOnHide
|
||||
arrow={false}
|
||||
trigger="click"
|
||||
onOpenChange={handleOpenShareURLModalChange}
|
||||
>
|
||||
<Button
|
||||
className="share-link-btn periscope-btn ghost"
|
||||
icon={<Globe size={14} />}
|
||||
onClick={handleOpenShareURLModal}
|
||||
>
|
||||
Share
|
||||
</Button>
|
||||
</Popover>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HeaderRightSection;
|
||||
@@ -1,171 +0,0 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Switch, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import GetMinMax from 'lib/getMinMax';
|
||||
import { Check, Info, Link2 } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
const routesToBeSharedWithTime = [
|
||||
ROUTES.LOGS_EXPLORER,
|
||||
ROUTES.TRACES_EXPLORER,
|
||||
ROUTES.METRICS_EXPLORER_EXPLORER,
|
||||
ROUTES.METER_EXPLORER,
|
||||
];
|
||||
|
||||
function ShareURLModal(): JSX.Element {
|
||||
const urlQuery = useUrlQuery();
|
||||
const location = useLocation();
|
||||
const { selectedTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const [enableAbsoluteTime, setEnableAbsoluteTime] = useState(
|
||||
selectedTime !== 'custom',
|
||||
);
|
||||
|
||||
const startTime = urlQuery.get(QueryParams.startTime);
|
||||
const endTime = urlQuery.get(QueryParams.endTime);
|
||||
const relativeTime = urlQuery.get(QueryParams.relativeTime);
|
||||
|
||||
const [isURLCopied, setIsURLCopied] = useState(false);
|
||||
const [, handleCopyToClipboard] = useCopyToClipboard();
|
||||
|
||||
const isValidateRelativeTime = useMemo(
|
||||
() =>
|
||||
selectedTime !== 'custom' ||
|
||||
(startTime && endTime && selectedTime === 'custom'),
|
||||
[startTime, endTime, selectedTime],
|
||||
);
|
||||
|
||||
const shareURLWithTime = useMemo(
|
||||
() => relativeTime || (startTime && endTime),
|
||||
[relativeTime, startTime, endTime],
|
||||
);
|
||||
|
||||
const isRouteToBeSharedWithTime = useMemo(
|
||||
() =>
|
||||
routesToBeSharedWithTime.some((route) =>
|
||||
matchPath(location.pathname, { path: route, exact: true }),
|
||||
),
|
||||
[location.pathname],
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const processURL = (): string => {
|
||||
let currentUrl = window.location.href;
|
||||
const isCustomTime = !!(startTime && endTime && selectedTime === 'custom');
|
||||
|
||||
if (shareURLWithTime || isRouteToBeSharedWithTime) {
|
||||
if (enableAbsoluteTime || isCustomTime) {
|
||||
if (selectedTime === 'custom') {
|
||||
if (startTime && endTime) {
|
||||
urlQuery.set(QueryParams.startTime, startTime.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTime.toString());
|
||||
}
|
||||
} else {
|
||||
const { minTime, maxTime } = GetMinMax(selectedTime);
|
||||
|
||||
urlQuery.set(QueryParams.startTime, minTime.toString());
|
||||
urlQuery.set(QueryParams.endTime, maxTime.toString());
|
||||
}
|
||||
|
||||
urlQuery.delete(QueryParams.relativeTime);
|
||||
|
||||
currentUrl = `${window.location.origin}${
|
||||
location.pathname
|
||||
}?${urlQuery.toString()}`;
|
||||
} else {
|
||||
urlQuery.delete(QueryParams.startTime);
|
||||
urlQuery.delete(QueryParams.endTime);
|
||||
|
||||
urlQuery.set(QueryParams.relativeTime, selectedTime);
|
||||
currentUrl = `${window.location.origin}${
|
||||
location.pathname
|
||||
}?${urlQuery.toString()}`;
|
||||
}
|
||||
}
|
||||
|
||||
return currentUrl;
|
||||
};
|
||||
|
||||
const handleCopyURL = (): void => {
|
||||
const URL = processURL();
|
||||
|
||||
handleCopyToClipboard(URL);
|
||||
setIsURLCopied(true);
|
||||
|
||||
logEvent('Share: Copy link clicked', {
|
||||
page: location.pathname,
|
||||
URL,
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
setIsURLCopied(false);
|
||||
}, 1000);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="share-modal-content">
|
||||
{(shareURLWithTime || isRouteToBeSharedWithTime) && (
|
||||
<>
|
||||
<div className="absolute-relative-time-toggler-container">
|
||||
<Typography.Text className="absolute-relative-time-toggler-label">
|
||||
Enable absolute time
|
||||
</Typography.Text>
|
||||
|
||||
<div className="absolute-relative-time-toggler">
|
||||
{!isValidateRelativeTime && (
|
||||
<Info size={14} color={Color.BG_AMBER_600} />
|
||||
)}
|
||||
<Switch
|
||||
checked={enableAbsoluteTime}
|
||||
disabled={!isValidateRelativeTime}
|
||||
size="small"
|
||||
onChange={(): void => {
|
||||
setEnableAbsoluteTime((prev) => !prev);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{!isValidateRelativeTime && (
|
||||
<div className="absolute-relative-time-error">
|
||||
Please select / enter valid relative time to toggle.
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="share-link">
|
||||
<div className="url-share-container">
|
||||
<div className="url-share-container-header">
|
||||
<Typography.Text className="url-share-title">
|
||||
Share page link
|
||||
</Typography.Text>
|
||||
<Typography.Text className="url-share-sub-title">
|
||||
Share the current page link with your team member
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
className="periscope-btn secondary"
|
||||
onClick={handleCopyURL}
|
||||
icon={isURLCopied ? <Check size={14} /> : <Link2 size={14} />}
|
||||
>
|
||||
Copy page link
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ShareURLModal;
|
||||
@@ -1,29 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import AnnouncementsModal from '../AnnouncementsModal';
|
||||
|
||||
describe('AnnouncementsModal', () => {
|
||||
it('should render announcements modal with title', () => {
|
||||
render(<AnnouncementsModal />);
|
||||
|
||||
expect(screen.getByText('Announcements')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should have proper structure and classes', () => {
|
||||
render(<AnnouncementsModal />);
|
||||
|
||||
const container = screen
|
||||
.getByText('Announcements')
|
||||
.closest('.announcements-modal-container');
|
||||
expect(container).toBeInTheDocument();
|
||||
|
||||
const headerContainer = screen
|
||||
.getByText('Announcements')
|
||||
.closest('.announcements-modal-container-header');
|
||||
expect(headerContainer).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render without any errors', () => {
|
||||
expect(() => render(<AnnouncementsModal />)).not.toThrow();
|
||||
});
|
||||
});
|
||||
@@ -1,274 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
// Mock dependencies before imports
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { handleContactSupport } from 'pages/Integrations/utils';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import FeedbackModal from '../FeedbackModal';
|
||||
|
||||
jest.mock('api/common/logEvent', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(() => Promise.resolve()),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@signozhq/sonner', () => ({
|
||||
toast: {
|
||||
success: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useGetTenantLicense', () => ({
|
||||
useGetTenantLicense: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('pages/Integrations/utils', () => ({
|
||||
handleContactSupport: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockLogEvent = logEvent as jest.MockedFunction<typeof logEvent>;
|
||||
const mockUseLocation = useLocation as jest.Mock;
|
||||
const mockUseGetTenantLicense = useGetTenantLicense as jest.Mock;
|
||||
const mockHandleContactSupport = handleContactSupport as jest.Mock;
|
||||
const mockToast = toast as jest.Mocked<typeof toast>;
|
||||
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
const mockLocation = {
|
||||
pathname: '/test-path',
|
||||
};
|
||||
|
||||
describe('FeedbackModal', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseLocation.mockReturnValue(mockLocation);
|
||||
mockUseGetTenantLicense.mockReturnValue({
|
||||
isCloudUser: false,
|
||||
});
|
||||
mockToast.success.mockClear();
|
||||
mockToast.error.mockClear();
|
||||
});
|
||||
|
||||
it('should render feedback modal with all tabs', () => {
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
expect(screen.getByText('Feedback')).toBeInTheDocument();
|
||||
expect(screen.getByText('Report a bug')).toBeInTheDocument();
|
||||
expect(screen.getByText('Feature request')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByPlaceholderText('Write your feedback here...'),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByRole('button', { name: /submit/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should switch between tabs when clicked', async () => {
|
||||
const user = userEvent.setup();
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
// Initially, feedback radio should be active
|
||||
const feedbackRadio = screen.getByRole('radio', { name: 'Feedback' });
|
||||
expect(feedbackRadio).toBeChecked();
|
||||
|
||||
const bugTab = screen.getByText('Report a bug');
|
||||
await user.click(bugTab);
|
||||
|
||||
// Bug radio should now be active
|
||||
const bugRadio = screen.getByRole('radio', { name: 'Report a bug' });
|
||||
expect(bugRadio).toBeChecked();
|
||||
|
||||
const featureTab = screen.getByText('Feature request');
|
||||
await user.click(featureTab);
|
||||
|
||||
// Feature radio should now be active
|
||||
const featureRadio = screen.getByRole('radio', { name: 'Feature request' });
|
||||
expect(featureRadio).toBeChecked();
|
||||
});
|
||||
|
||||
it('should update feedback text when typing in textarea', async () => {
|
||||
const user = userEvent.setup();
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
const textarea = screen.getByPlaceholderText('Write your feedback here...');
|
||||
const testFeedback = 'This is my feedback';
|
||||
|
||||
await user.type(textarea, testFeedback);
|
||||
|
||||
expect(textarea).toHaveValue(testFeedback);
|
||||
});
|
||||
|
||||
it('should submit feedback and log event when submit button is clicked', async () => {
|
||||
const user = userEvent.setup();
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
const textarea = screen.getByPlaceholderText('Write your feedback here...');
|
||||
const submitButton = screen.getByRole('button', { name: /submit/i });
|
||||
const testFeedback = 'Test feedback content';
|
||||
|
||||
await user.type(textarea, testFeedback);
|
||||
await user.click(submitButton);
|
||||
|
||||
expect(mockLogEvent).toHaveBeenCalledWith('Feedback: Submitted', {
|
||||
data: testFeedback,
|
||||
type: 'feedback',
|
||||
page: mockLocation.pathname,
|
||||
});
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
expect(mockToast.success).toHaveBeenCalledWith(
|
||||
'Feedback submitted successfully',
|
||||
{
|
||||
position: 'top-right',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should submit bug report with correct type', async () => {
|
||||
const user = userEvent.setup();
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
// Switch to bug report tab
|
||||
const bugTab = screen.getByText('Report a bug');
|
||||
await user.click(bugTab);
|
||||
|
||||
// Verify bug report radio is now active
|
||||
const bugRadio = screen.getByRole('radio', { name: 'Report a bug' });
|
||||
expect(bugRadio).toBeChecked();
|
||||
|
||||
const textarea = screen.getByPlaceholderText('Write your feedback here...');
|
||||
const submitButton = screen.getByRole('button', { name: /submit/i });
|
||||
const testFeedback = 'This is a bug report';
|
||||
|
||||
await user.type(textarea, testFeedback);
|
||||
await user.click(submitButton);
|
||||
|
||||
expect(mockLogEvent).toHaveBeenCalledWith('Feedback: Submitted', {
|
||||
data: testFeedback,
|
||||
type: 'reportBug',
|
||||
page: mockLocation.pathname,
|
||||
});
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
expect(mockToast.success).toHaveBeenCalledWith(
|
||||
'Bug report submitted successfully',
|
||||
{
|
||||
position: 'top-right',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should submit feature request with correct type', async () => {
|
||||
const user = userEvent.setup();
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
// Switch to feature request tab
|
||||
const featureTab = screen.getByText('Feature request');
|
||||
await user.click(featureTab);
|
||||
|
||||
// Verify feature request radio is now active
|
||||
const featureRadio = screen.getByRole('radio', { name: 'Feature request' });
|
||||
expect(featureRadio).toBeChecked();
|
||||
|
||||
const textarea = screen.getByPlaceholderText('Write your feedback here...');
|
||||
const submitButton = screen.getByRole('button', { name: /submit/i });
|
||||
const testFeedback = 'This is a feature request';
|
||||
|
||||
await user.type(textarea, testFeedback);
|
||||
await user.click(submitButton);
|
||||
|
||||
expect(mockLogEvent).toHaveBeenCalledWith('Feedback: Submitted', {
|
||||
data: testFeedback,
|
||||
type: 'featureRequest',
|
||||
page: mockLocation.pathname,
|
||||
});
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
expect(mockToast.success).toHaveBeenCalledWith(
|
||||
'Feature request submitted successfully',
|
||||
{
|
||||
position: 'top-right',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should call handleContactSupport when contact support link is clicked', async () => {
|
||||
const user = userEvent.setup();
|
||||
const isCloudUser = true;
|
||||
mockUseGetTenantLicense.mockReturnValue({
|
||||
isCloudUser,
|
||||
});
|
||||
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
const contactSupportLink = screen.getByText('Contact Support');
|
||||
await user.click(contactSupportLink);
|
||||
|
||||
expect(mockHandleContactSupport).toHaveBeenCalledWith(isCloudUser);
|
||||
});
|
||||
|
||||
it('should handle non-cloud user for contact support', async () => {
|
||||
const user = userEvent.setup();
|
||||
const isCloudUser = false;
|
||||
mockUseGetTenantLicense.mockReturnValue({
|
||||
isCloudUser,
|
||||
});
|
||||
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
const contactSupportLink = screen.getByText('Contact Support');
|
||||
await user.click(contactSupportLink);
|
||||
|
||||
expect(mockHandleContactSupport).toHaveBeenCalledWith(isCloudUser);
|
||||
});
|
||||
|
||||
it('should render docs link with correct attributes', () => {
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
const docsLink = screen.getByText('Read our docs');
|
||||
expect(docsLink).toHaveAttribute(
|
||||
'href',
|
||||
'https://signoz.io/docs/introduction/',
|
||||
);
|
||||
expect(docsLink).toHaveAttribute('target', '_blank');
|
||||
expect(docsLink).toHaveAttribute('rel', 'noreferrer');
|
||||
});
|
||||
|
||||
it('should reset form state when component unmounts', async () => {
|
||||
const user = userEvent.setup();
|
||||
|
||||
// Render component
|
||||
const { unmount } = render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
// Change the form state first
|
||||
const textArea = screen.getByPlaceholderText('Write your feedback here...');
|
||||
await user.type(textArea, 'Some feedback text');
|
||||
|
||||
// Change the active tab
|
||||
const bugTab = screen.getByText('Report a bug');
|
||||
await user.click(bugTab);
|
||||
|
||||
// Verify state has changed
|
||||
expect(textArea).toHaveValue('Some feedback text');
|
||||
|
||||
// Unmount the component - this should trigger cleanup
|
||||
unmount();
|
||||
|
||||
// Re-render the component to verify state was reset
|
||||
render(<FeedbackModal onClose={mockOnClose} />);
|
||||
|
||||
// Verify form state is reset
|
||||
const newTextArea = screen.getByPlaceholderText(
|
||||
'Write your feedback here...',
|
||||
);
|
||||
expect(newTextArea).toHaveValue(''); // Should be empty
|
||||
|
||||
// Verify active radio is reset to default (Feedback radio)
|
||||
const feedbackRadio = screen.getByRole('radio', { name: 'Feedback' });
|
||||
expect(feedbackRadio).toBeChecked();
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user