Compare commits

..

6 Commits

Author SHA1 Message Date
SagarRajput-7
1e02bb77a5 feat: changed query param name 2025-08-20 11:02:47 +05:30
SagarRajput-7
5f4c1d151a feat: added user-friendly format to dashboard variable url
"
2025-08-20 11:02:47 +05:30
SagarRajput-7
4fe1d1b3f7 feat: resolved conflicts 2025-08-20 11:02:16 +05:30
SagarRajput-7
11e4971db4 feat: added variable in url and made dashboard sync around that and sharable (#7944)
* feat: added dynamic variable to the dashboard details

* feat: added new component to existing variables

* feat: added enhancement to multiselect and select for dyn-variables

* feat: added refetch method between all dynamic-variables

* feat: correct error handling

* feat: correct error handling

* feat: enforced non-empty selectedvalues and default value

* feat: added client and server side searches

* feat: retry on error

* feat: correct error handling

* feat: handle defautl value in existing variables

* feat: lowercase the source for payload

* feat: fixed the incorrect assignment of active indices

* feat: improved handling of all option

* feat: improved the ALL option visuals

* feat: handled default value enforcement in existing variables

* feat: added unix time to values call

* feat: added incomplete data message and info to search

* feat: changed dashboard panel call handling with existing variables

* feat: adjusted the response type and data with the new API schema for values

* feat: code refactor

* feat: made dyn-variable option as the default

* feat: added test cases for dyn variable creation and completion

* feat: updated test cases

* feat: added variable in url and made dashboard sync around that and sharable

* feat: added test cases

* feat: added safety check

* feat: enabled url setting on first load itself

* feat: code refactor

* feat: cleared options query param when on dashboard list page
2025-08-20 11:02:16 +05:30
SagarRajput-7
a0ab69ac4c feat: added dynamic variable to the dashboard details (#7755)
* feat: added dynamic variable to the dashboard details

* feat: added new component to existing variables

* feat: added enhancement to multiselect and select for dyn-variables

* feat: added refetch method between all dynamic-variables

* feat: correct error handling

* feat: correct error handling

* feat: enforced non-empty selectedvalues and default value

* feat: added client and server side searches

* feat: retry on error

* feat: correct error handling

* feat: handle defautl value in existing variables

* feat: lowercase the source for payload

* feat: fixed the incorrect assignment of active indices

* feat: improved handling of all option

* feat: improved the ALL option visuals

* feat: handled default value enforcement in existing variables

* feat: added unix time to values call

* feat: added incomplete data message and info to search

* feat: changed dashboard panel call handling with existing variables

* feat: adjusted the response type and data with the new API schema for values

* feat: code refactor

* feat: made dyn-variable option as the default

* feat: added test cases for dyn variable creation and completion

* feat: updated test cases
2025-08-20 11:02:16 +05:30
SagarRajput-7
972fadf79a feat: added dynamic variables creation flow (#7541)
* feat: added dynamic variables creation flow

* feat: added keys and value apis and hooks

* feat: added api and select component changes

* feat: added keys fetching and preview values

* feat: added dynamic variable to variable items

* feat: handled value persistence and tab switches

* feat: added default value and formed a schema for dyn-variables

* feat: added client and server side searches

* feat: corrected the initial load getfieldKey api

* feat: removed fetch on mount restriction
2025-08-20 11:02:16 +05:30
1276 changed files with 22383 additions and 93876 deletions

View File

@@ -1,6 +1,6 @@
services:
clickhouse:
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
container_name: clickhouse
volumes:
- ${PWD}/fs/etc/clickhouse-server/config.d/config.xml:/etc/clickhouse-server/config.d/config.xml
@@ -23,8 +23,6 @@ services:
retries: 3
depends_on:
- zookeeper
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
zookeeper:
image: signoz/zookeeper:3.7.1
container_name: zookeeper
@@ -42,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.0
container_name: schema-migrator-sync
command:
- sync
@@ -55,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.0
container_name: schema-migrator-async
command:
- async

View File

@@ -1,6 +1,6 @@
services:
signoz-otel-collector:
image: signoz/signoz-otel-collector:v0.129.6
image: signoz/signoz-otel-collector:v0.128.2
container_name: signoz-otel-collector-dev
command:
- --config=/etc/otel-collector-config.yaml

43
.github/CODEOWNERS vendored
View File

@@ -5,45 +5,6 @@
/frontend/ @SigNoz/frontend @YounixM
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
# Dashboard, Alert, Metrics, Service Map, Services
/frontend/src/container/ListOfDashboard/ @srikanthccv
/frontend/src/container/NewDashboard/ @srikanthccv
/frontend/src/pages/DashboardsListPage/ @srikanthccv
/frontend/src/pages/DashboardWidget/ @srikanthccv
/frontend/src/pages/NewDashboard/ @srikanthccv
/frontend/src/providers/Dashboard/ @srikanthccv
# Alerts
/frontend/src/container/AlertHistory/ @srikanthccv
/frontend/src/container/AllAlertChannels/ @srikanthccv
/frontend/src/container/AnomalyAlertEvaluationView/ @srikanthccv
/frontend/src/container/CreateAlertChannels/ @srikanthccv
/frontend/src/container/CreateAlertRule/ @srikanthccv
/frontend/src/container/EditAlertChannels/ @srikanthccv
/frontend/src/container/FormAlertChannels/ @srikanthccv
/frontend/src/container/FormAlertRules/ @srikanthccv
/frontend/src/container/ListAlertRules/ @srikanthccv
/frontend/src/container/TriggeredAlerts/ @srikanthccv
/frontend/src/pages/AlertChannelCreate/ @srikanthccv
/frontend/src/pages/AlertDetails/ @srikanthccv
/frontend/src/pages/AlertHistory/ @srikanthccv
/frontend/src/pages/AlertList/ @srikanthccv
/frontend/src/pages/CreateAlert/ @srikanthccv
/frontend/src/providers/Alert.tsx @srikanthccv
# Metrics
/frontend/src/container/MetricsExplorer/ @srikanthccv
/frontend/src/pages/MetricsApplication/ @srikanthccv
/frontend/src/pages/MetricsExplorer/ @srikanthccv
# Services and Service Map
/frontend/src/container/ServiceApplication/ @srikanthccv
/frontend/src/container/ServiceTable/ @srikanthccv
/frontend/src/pages/Services/ @srikanthccv
/frontend/src/pages/ServiceTopLevelOperations/ @srikanthccv
/frontend/src/container/Home/Services/ @srikanthccv
/deploy/ @SigNoz/devops
.github @SigNoz/devops
@@ -81,7 +42,3 @@
/pkg/telemetrymetadata/ @srikanthccv
/pkg/telemetrymetrics/ @srikanthccv
/pkg/telemetrytraces/ @srikanthccv
# AuthN / AuthZ Owners
/pkg/authz/ @vikrantgupta25 @grandwizard28

View File

@@ -62,7 +62,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.24
GO_VERSION: 1.23
GO_NAME: signoz-community
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build

View File

@@ -93,7 +93,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.24
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./cmd/enterprise

View File

@@ -92,7 +92,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.24
GO_VERSION: 1.23
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
GO_INPUT_ARTIFACT_PATH: frontend/build
GO_BUILD_CONTEXT: ./cmd/enterprise

View File

@@ -18,7 +18,7 @@ jobs:
with:
PRIMUS_REF: main
GO_TEST_CONTEXT: ./...
GO_VERSION: 1.24
GO_VERSION: 1.23
fmt:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -27,7 +27,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.24
GO_VERSION: 1.23
lint:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -36,7 +36,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.24
GO_VERSION: 1.23
deps:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -45,7 +45,7 @@ jobs:
secrets: inherit
with:
PRIMUS_REF: main
GO_VERSION: 1.24
GO_VERSION: 1.23
build:
if: |
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
@@ -57,7 +57,7 @@ jobs:
- name: go-install
uses: actions/setup-go@v5
with:
go-version: "1.24"
go-version: "1.23"
- name: qemu-install
uses: docker/setup-qemu-action@v3
- name: aarch64-install

View File

@@ -58,7 +58,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.24"
go-version: "1.23"
- name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest'
run: |
@@ -122,7 +122,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.24"
go-version: "1.23"
# copy the caches from build
- name: get-sha

View File

@@ -72,7 +72,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.24"
go-version: "1.23"
- name: cross-compilation-tools
if: matrix.os == 'ubuntu-latest'
run: |
@@ -135,7 +135,7 @@ jobs:
- name: setup-go
uses: actions/setup-go@v5
with:
go-version: "1.24"
go-version: "1.23"
# copy the caches from build
- name: get-sha

View File

@@ -15,17 +15,16 @@ jobs:
matrix:
src:
- bootstrap
- passwordauthn
- callbackauthn
- auth
- querier
- ttl
sqlstore-provider:
- postgres
- sqlite
clickhouse-version:
- 24.1.2-alpine
- 25.5.6
schema-migrator-version:
- v0.129.6
- v0.128.1
postgres-version:
- 15
if: |
@@ -44,20 +43,6 @@ jobs:
python -m pip install poetry==2.1.2
python -m poetry config virtualenvs.in-project true
cd tests/integration && poetry install --no-root
- name: webdriver
run: |
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" | sudo tee -a /etc/apt/sources.list.d/google-chrome.list
sudo apt-get update -qqy
sudo apt-get -qqy install google-chrome-stable
CHROME_VERSION=$(google-chrome-stable --version)
CHROME_FULL_VERSION=${CHROME_VERSION%%.*}
CHROME_MAJOR_VERSION=${CHROME_FULL_VERSION//[!0-9]}
sudo rm /etc/apt/sources.list.d/google-chrome.list
export CHROMEDRIVER_VERSION=`curl -s https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_${CHROME_MAJOR_VERSION%%.*}`
curl -L -O "https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip"
unzip chromedriver-linux64.zip && chmod +x chromedriver && sudo mv chromedriver /usr/local/bin
chromedriver -version
- name: run
run: |
cd tests/integration && \

6
.gitignore vendored
View File

@@ -86,8 +86,6 @@ queries.active
.devenv/**/tmp/**
.qodo
.dev
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
@@ -230,6 +228,4 @@ poetry.toml
# LSP config files
pyrightconfig.json
# cursor files
frontend/.cursor/
# End of https://www.toptal.com/developers/gitignore/api/python

View File

@@ -8,7 +8,6 @@ linters:
- depguard
- iface
- unparam
- forbidigo
linters-settings:
sloglint:
@@ -25,10 +24,6 @@ linters-settings:
deny:
- pkg: "go.uber.org/zap"
desc: "Do not use zap logger. Use slog instead."
noerrors:
deny:
- pkg: "errors"
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
iface:
enable:
- identical

View File

@@ -78,5 +78,4 @@ Need assistance? Join our Slack community:
- Set up your [development environment](docs/contributing/development.md)
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)
- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future.
- Write [integration tests](docs/contributing/go/integration.md)
- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future.

View File

@@ -3,11 +3,11 @@ package main
import (
"context"
"log/slog"
"time"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -32,7 +32,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
Short: "Run the SigNoz server",
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
RunE: func(currCmd *cobra.Command, args []string) error {
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
if err != nil {
return err
}
@@ -56,9 +56,12 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return err
}
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
signoz, err := signoz.New(
ctx,
config,
jwt,
zeus.Config{},
noopzeus.NewProviderFactory(),
licensing.Config{},
@@ -73,16 +76,13 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
},
signoz.NewSQLStoreProviderFactories(),
signoz.NewTelemetryStoreProviderFactories(),
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
return err
}
server, err := app.NewServer(config, signoz)
server, err := app.NewServer(config, signoz, jwt)
if err != nil {
logger.ErrorContext(ctx, "failed to create server", "error", err)
return err

View File

@@ -2,7 +2,9 @@ package cmd
import (
"context"
"fmt"
"log/slog"
"os"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
@@ -10,10 +12,9 @@ import (
"github.com/SigNoz/signoz/pkg/signoz"
)
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.DeprecatedFlags) (signoz.Config, error) {
func NewSigNozConfig(ctx context.Context, flags signoz.DeprecatedFlags) (signoz.Config, error) {
config, err := signoz.NewConfig(
ctx,
logger,
config.ResolverConfig{
Uris: []string{"env:"},
ProviderFactories: []config.ProviderFactory{
@@ -29,3 +30,16 @@ func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.Depr
return config, nil
}
func NewJWTSecret(_ context.Context, _ *slog.Logger) string {
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
if len(jwtSecret) == 0 {
fmt.Println("🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!")
fmt.Println("SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application.")
fmt.Println("Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access.")
fmt.Println("Please set the SIGNOZ_JWT_SECRET environment variable immediately.")
fmt.Println("For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
}
return jwtSecret
}

View File

@@ -2,11 +2,10 @@ FROM node:18-bullseye AS build
WORKDIR /opt/
COPY ./frontend/ ./
ENV NODE_OPTIONS=--max-old-space-size=8192
RUN CI=1 yarn install
RUN CI=1 yarn build
FROM golang:1.24-bullseye
FROM golang:1.23-bullseye
ARG OS="linux"
ARG TARGETARCH

View File

@@ -6,8 +6,6 @@ import (
"time"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
@@ -16,7 +14,6 @@ import (
enterprisezeus "github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
@@ -38,7 +35,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
Short: "Run the SigNoz server",
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
RunE: func(currCmd *cobra.Command, args []string) error {
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
if err != nil {
return err
}
@@ -57,14 +54,17 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
// add enterprise sqlstore factories to the community sqlstore factories
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil {
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
return err
}
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
signoz, err := signoz.New(
ctx,
config,
jwt,
enterprisezeus.Config(),
httpzeus.NewProviderFactory(),
enterpriselicensing.Config(24*time.Hour, 3),
@@ -84,34 +84,13 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
},
sqlstoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
samlCallbackAuthN, err := samlcallbackauthn.New(ctx, store, licensing)
if err != nil {
return nil, err
}
oidcCallbackAuthN, err := oidccallbackauthn.New(store, licensing, providerSettings)
if err != nil {
return nil, err
}
authNs, err := signoz.NewAuthNs(ctx, providerSettings, store, licensing)
if err != nil {
return nil, err
}
authNs[authtypes.AuthNProviderSAML] = samlCallbackAuthN
authNs[authtypes.AuthNProviderOIDC] = oidcCallbackAuthN
return authNs, nil
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
return err
}
server, err := enterpriseapp.NewServer(config, signoz)
server, err := enterpriseapp.NewServer(config, signoz, jwt)
if err != nil {
logger.ErrorContext(ctx, "failed to create server", "error", err)
return err

View File

@@ -137,7 +137,10 @@ prometheus:
##################### Alertmanager #####################
alertmanager:
# Specifies the alertmanager provider to use.
provider: signoz
provider: legacy
legacy:
# The API URL (with prefix) of the legacy Alertmanager instance.
api_url: http://localhost:9093/api
signoz:
# The poll interval for periodically syncing the alertmanager with the config in the store.
poll_interval: 1m
@@ -243,28 +246,3 @@ statsreporter:
gateway:
# The URL of the gateway's api.
url: http://localhost:8080
##################### Tokenizer #####################
tokenizer:
# Specifies the tokenizer provider to use.
provider: jwt
lifetime:
# The duration for which a user can be idle before being required to authenticate.
idle: 168h
# The duration for which a user can remain logged in before being asked to login.
max: 720h
rotation:
# The interval to rotate tokens in.
interval: 30m
# The duration for which the previous token pair remains valid after a token pair is rotated.
duration: 60s
jwt:
# The secret to sign the JWT tokens.
secret: secret
opaque:
gc:
# The interval to perform garbage collection.
interval: 1h
token:
# The maximum number of tokens a user can have. This limits the number of concurrent sessions a user can have.
max_per_user: 5

View File

@@ -11,7 +11,7 @@ x-common: &common
max-file: "3"
x-clickhouse-defaults: &clickhouse-defaults
!!merge <<: *common
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
deploy:
labels:
@@ -37,8 +37,6 @@ x-clickhouse-defaults: &clickhouse-defaults
nofile:
soft: 262144
hard: 262144
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
x-zookeeper-defaults: &zookeeper-defaults
!!merge <<: *common
image: signoz/zookeeper:3.7.1
@@ -65,7 +63,7 @@ x-db-depend: &db-depend
services:
init-clickhouse:
!!merge <<: *common
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
command:
- bash
- -c
@@ -176,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.97.0
image: signoz/signoz:v0.92.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -209,7 +207,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.7
image: signoz/signoz-otel-collector:v0.129.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -233,7 +231,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.0
deploy:
restart_policy:
condition: on-failure

View File

@@ -11,7 +11,7 @@ x-common: &common
max-file: "3"
x-clickhouse-defaults: &clickhouse-defaults
!!merge <<: *common
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
deploy:
labels:
@@ -36,8 +36,6 @@ x-clickhouse-defaults: &clickhouse-defaults
nofile:
soft: 262144
hard: 262144
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
x-zookeeper-defaults: &zookeeper-defaults
!!merge <<: *common
image: signoz/zookeeper:3.7.1
@@ -62,7 +60,7 @@ x-db-depend: &db-depend
services:
init-clickhouse:
!!merge <<: *common
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
command:
- bash
- -c
@@ -117,7 +115,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.97.0
image: signoz/signoz:v0.92.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -150,7 +148,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.7
image: signoz/signoz-otel-collector:v0.129.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -176,7 +174,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.0
deploy:
restart_policy:
condition: on-failure

View File

@@ -10,7 +10,7 @@ x-common: &common
x-clickhouse-defaults: &clickhouse-defaults
!!merge <<: *common
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
labels:
signoz.io/scrape: "true"
@@ -40,8 +40,6 @@ x-clickhouse-defaults: &clickhouse-defaults
nofile:
soft: 262144
hard: 262144
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
x-zookeeper-defaults: &zookeeper-defaults
!!merge <<: *common
image: signoz/zookeeper:3.7.1
@@ -67,7 +65,7 @@ x-db-depend: &db-depend
services:
init-clickhouse:
!!merge <<: *common
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
container_name: signoz-init-clickhouse
command:
- bash
@@ -179,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.97.0}
image: signoz/signoz:${VERSION:-v0.92.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -213,7 +211,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -239,7 +237,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-sync
command:
- sync
@@ -250,7 +248,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -9,7 +9,8 @@ x-common: &common
max-file: "3"
x-clickhouse-defaults: &clickhouse-defaults
!!merge <<: *common
image: clickhouse/clickhouse-server:25.5.6
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
labels:
signoz.io/scrape: "true"
@@ -35,8 +36,6 @@ x-clickhouse-defaults: &clickhouse-defaults
nofile:
soft: 262144
hard: 262144
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
x-zookeeper-defaults: &zookeeper-defaults
!!merge <<: *common
image: signoz/zookeeper:3.7.1
@@ -62,7 +61,7 @@ x-db-depend: &db-depend
services:
init-clickhouse:
!!merge <<: *common
image: clickhouse/clickhouse-server:25.5.6
image: clickhouse/clickhouse-server:24.1.2-alpine
container_name: signoz-init-clickhouse
command:
- bash
@@ -111,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.97.0}
image: signoz/signoz:${VERSION:-v0.92.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +165,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +177,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -1,213 +0,0 @@
# Integration Tests
SigNoz uses integration tests to verify that different components work together correctly in a real environment. These tests run against actual services (ClickHouse, PostgreSQL, etc.) to ensure end-to-end functionality.
## How to set up the integration test environment?
### Prerequisites
Before running integration tests, ensure you have the following installed:
- Python 3.13+
- Poetry (for dependency management)
- Docker (for containerized services)
### Initial Setup
1. Navigate to the integration tests directory:
```bash
cd tests/integration
```
2. Install dependencies using Poetry:
```bash
poetry install --no-root
```
### Starting the Test Environment
To spin up all the containers necessary for writing integration tests and keep them running:
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup
```
This command will:
- Start all required services (ClickHouse, PostgreSQL, Zookeeper, etc.)
- Keep containers running due to the `--reuse` flag
- Verify that the setup is working correctly
### Stopping the Test Environment
When you're done writing integration tests, clean up the environment:
```bash
poetry run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown
```
This will destroy the running integration test setup and clean up resources.
## Understanding the Integration Test Framework
Python and pytest form the foundation of the integration testing framework. Testcontainers are used to spin up disposable integration environments. Wiremock is used to spin up **test doubles** of other services.
- **Why Python/pytest?** It's expressive, low-boilerplate, and has powerful fixture capabilities that make integration testing straightforward. Extensive libraries for HTTP requests, JSON handling, and data analysis (numpy) make it easier to test APIs and verify data
- **Why testcontainers?** They let us spin up isolated dependencies that match our production environment without complex setup.
- **Why wiremock?** Well maintained, documented and extensible.
```
.
├── conftest.py
├── fixtures
│ ├── __init__.py
│ ├── auth.py
│ ├── clickhouse.py
│ ├── fs.py
│ ├── http.py
│ ├── migrator.py
│ ├── network.py
│ ├── postgres.py
│ ├── signoz.py
│ ├── sql.py
│ ├── sqlite.py
│ ├── types.py
│ └── zookeeper.py
├── poetry.lock
├── pyproject.toml
└── src
└── bootstrap
├── __init__.py
├── a_database.py
├── b_register.py
└── c_license.py
```
Each test suite follows some important principles:
1. **Organization**: Test suites live under `src/` in self-contained packages. Fixtures (a pytest concept) live inside `fixtures/`.
2. **Execution Order**: Files are prefixed with `a_`, `b_`, `c_` to ensure sequential execution.
3. **Time Constraints**: Each suite should complete in under 10 minutes (setup takes ~4 mins).
### Test Suite Design
Test suites should target functional domains or subsystems within SigNoz. When designing a test suite, consider these principles:
- **Functional Cohesion**: Group tests around a specific capability or service boundary
- **Data Flow**: Follow the path of data through related components
- **Change Patterns**: Components frequently modified together should be tested together
The exact boundaries for modules are intentionally flexible, allowing teams to define logical groupings based on their specific context and knowledge of the system.
Eg: The **bootstrap** integration test suite validates core system functionality:
- Database initialization
- Version check
Other test suites can be **pipelines, auth, querier.**
## How to write an integration test?
Now start writing an integration test. Create a new file `src/bootstrap/e_version.py` and paste the following:
```python
import requests
from fixtures import types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def test_version(signoz: types.SigNoz) -> None:
response = requests.get(signoz.self.host_config.get("/api/v1/version"), timeout=2)
logger.info(response)
```
We have written a simple test which calls the `version` endpoint of the container in step 1. In **order to just run this function, run the following command:**
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/e_version.py::test_version
```
> Note: The `--reuse` flag is used to reuse the environment if it is already running. Always use this flag when writing and running integration tests. If you don't use this flag, the environment will be destroyed and recreated every time you run the test.
Here's another example of how to write a more comprehensive integration test:
```python
from http import HTTPStatus
import requests
from fixtures import types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def test_user_registration(signoz: types.SigNoz) -> None:
"""Test user registration functionality."""
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/register"),
json={
"name": "testuser",
"orgId": "",
"orgName": "test.org",
"email": "test@example.com",
"password": "password123Z$",
},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["setupCompleted"] is True
```
## How to run integration tests?
### Running All Tests
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/
```
### Running Specific Test Categories
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>
# Run querier tests
poetry run pytest --basetemp=./tmp/ -vv --reuse src/querier/
# Run auth tests
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/
```
### Running Individual Tests
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>/<file>.py::test_name
# Run test_register in file a_register.py in auth suite
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/a_register.py::test_register
```
## How to configure different options for integration tests?
Tests can be configured using pytest options:
- `--sqlstore-provider` - Choose database provider (default: postgres)
- `--postgres-version` - PostgreSQL version (default: 15)
- `--clickhouse-version` - ClickHouse version (default: 25.5.6)
- `--zookeeper-version` - Zookeeper version (default: 3.7.1)
Example:
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
```
## What should I remember?
- **Always use the `--reuse` flag** when setting up the environment to keep containers running
- **Use the `--teardown` flag** when cleaning up to avoid resource leaks
- **Follow the naming convention** with alphabetical prefixes for test execution order
- **Use proper timeouts** in HTTP requests to avoid hanging tests
- **Clean up test data** between tests to avoid interference
- **Use descriptive test names** that clearly indicate what is being tested
- **Leverage fixtures** for common setup and authentication
- **Test both success and failure scenarios** to ensure robust functionality

View File

@@ -232,7 +232,7 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
// moving avg of the previous period series + z score threshold * std dev of the series
// moving avg of the previous period series - z score threshold * std dev of the series
func (p *BaseSeasonalProvider) getBounds(
series, predictedSeries, weekSeries *qbtypes.TimeSeries,
series, predictedSeries *qbtypes.TimeSeries,
zScoreThreshold float64,
) (*qbtypes.TimeSeries, *qbtypes.TimeSeries) {
upperBoundSeries := &qbtypes.TimeSeries{
@@ -246,8 +246,8 @@ func (p *BaseSeasonalProvider) getBounds(
}
for idx, curr := range series.Values {
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(weekSeries)
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(weekSeries)
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
upperBoundSeries.Values = append(upperBoundSeries.Values, &qbtypes.TimeSeriesValue{
Timestamp: curr.Timestamp,
Value: upperBound,
@@ -398,6 +398,8 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
aggOfInterest := result.Aggregations[0]
for _, series := range aggOfInterest.Series {
stdDev := p.getStdDev(series)
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
pastPeriodSeries := p.getMatchingSeries(ctx, pastPeriodResult, series)
currentSeasonSeries := p.getMatchingSeries(ctx, currentSeasonResult, series)
@@ -405,9 +407,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
past2SeasonSeries := p.getMatchingSeries(ctx, past2SeasonResult, series)
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
stdDev := p.getStdDev(currentSeasonSeries)
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
prevSeriesAvg := p.getAvg(pastPeriodSeries)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
@@ -436,7 +435,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
upperBoundSeries, lowerBoundSeries := p.getBounds(
series,
predictedSeries,
currentSeasonSeries,
zScoreThreshold,
)
aggOfInterest.UpperBoundSeries = append(aggOfInterest.UpperBoundSeries, upperBoundSeries)

View File

@@ -1,191 +0,0 @@
package oidccallbackauthn
import (
"context"
"net/url"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/coreos/go-oidc/v3/oidc"
"golang.org/x/oauth2"
)
const (
redirectPath string = "/api/v1/complete/oidc"
)
var (
scopes []string = []string{"email", oidc.ScopeOpenID}
)
var _ authn.CallbackAuthN = (*AuthN)(nil)
type AuthN struct {
store authtypes.AuthNStore
licensing licensing.Licensing
httpClient *client.Client
}
func New(store authtypes.AuthNStore, licensing licensing.Licensing, providerSettings factory.ProviderSettings) (*AuthN, error) {
httpClient, err := client.New(providerSettings.Logger, providerSettings.TracerProvider, providerSettings.MeterProvider)
if err != nil {
return nil, err
}
return &AuthN{
store: store,
licensing: licensing,
httpClient: httpClient,
}, nil
}
func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) {
if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderOIDC {
return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "domain type is not oidc")
}
_, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, siteURL, authDomain)
if err != nil {
return "", err
}
return oauth2Config.AuthCodeURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String()), nil
}
func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtypes.CallbackIdentity, error) {
if err := query.Get("error"); err != "" {
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: error while authenticating").WithAdditional(query.Get("error_description"))
}
state, err := authtypes.NewStateFromString(query.Get("state"))
if err != nil {
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "oidc: invalid state").WithAdditional(err.Error())
}
authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID)
if err != nil {
return nil, err
}
_, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
oidcProvider, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, state.URL, authDomain)
if err != nil {
return nil, err
}
ctx = context.WithValue(ctx, oauth2.HTTPClient, a.httpClient.Client())
token, err := oauth2Config.Exchange(ctx, query.Get("code"))
if err != nil {
var retrieveError *oauth2.RetrieveError
if errors.As(err, &retrieveError) {
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body))
}
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get token").WithAdditional(err.Error())
}
claims, err := a.claimsFromIDToken(ctx, authDomain, oidcProvider, token)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
if claims == nil && authDomain.AuthDomainConfig().OIDC.GetUserInfo {
claims, err = a.claimsFromUserInfo(ctx, oidcProvider, token)
if err != nil {
return nil, err
}
}
emailClaim, ok := claims[authDomain.AuthDomainConfig().OIDC.ClaimMapping.Email].(string)
if !ok {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email in claims")
}
email, err := valuer.NewEmail(emailClaim)
if err != nil {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to parse email").WithAdditional(err.Error())
}
if !authDomain.AuthDomainConfig().OIDC.InsecureSkipEmailVerified {
emailVerifiedClaim, ok := claims["email_verified"].(bool)
if !ok {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email_verified in claims")
}
if !emailVerifiedClaim {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "oidc: email is not verified")
}
}
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
}
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)
}
oidcProvider, err := oidc.NewProvider(ctx, authDomain.AuthDomainConfig().OIDC.Issuer)
if err != nil {
return nil, nil, err
}
return oidcProvider, &oauth2.Config{
ClientID: authDomain.AuthDomainConfig().OIDC.ClientID,
ClientSecret: authDomain.AuthDomainConfig().OIDC.ClientSecret,
Endpoint: oidcProvider.Endpoint(),
Scopes: scopes,
RedirectURL: (&url.URL{
Scheme: siteURL.Scheme,
Host: siteURL.Host,
Path: redirectPath,
}).String(),
}, nil
}
func (a *AuthN) claimsFromIDToken(ctx context.Context, authDomain *authtypes.AuthDomain, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) {
rawIDToken, ok := token.Extra("id_token").(string)
if !ok {
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "oidc: no id_token in token response")
}
verifier := provider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().OIDC.ClientID})
idToken, err := verifier.Verify(ctx, rawIDToken)
if err != nil {
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to verify token").WithAdditional(err.Error())
}
var claims map[string]any
if err := idToken.Claims(&claims); err != nil {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error())
}
return claims, nil
}
func (a *AuthN) claimsFromUserInfo(ctx context.Context, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) {
var claims map[string]any
userInfo, err := provider.UserInfo(ctx, oauth2.StaticTokenSource(&oauth2.Token{
AccessToken: token.AccessToken,
TokenType: "Bearer", // The UserInfo endpoint requires a bearer token as per RFC6750
}))
if err != nil {
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get user info").WithAdditional(err.Error())
}
if err := userInfo.Claims(&claims); err != nil {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error())
}
return claims, nil
}

View File

@@ -1,155 +0,0 @@
package samlcallbackauthn
import (
"context"
"crypto/x509"
"encoding/base64"
"encoding/pem"
"net/url"
"strings"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
saml2 "github.com/russellhaering/gosaml2"
dsig "github.com/russellhaering/goxmldsig"
)
const (
redirectPath string = "/api/v1/complete/saml"
)
var _ authn.CallbackAuthN = (*AuthN)(nil)
type AuthN struct {
store authtypes.AuthNStore
licensing licensing.Licensing
}
func New(ctx context.Context, store authtypes.AuthNStore, licensing licensing.Licensing) (*AuthN, error) {
return &AuthN{
store: store,
licensing: licensing,
}, nil
}
func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) {
if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderSAML {
return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "saml: domain type is not saml")
}
sp, err := a.serviceProvider(siteURL, authDomain)
if err != nil {
return "", err
}
url, err := sp.BuildAuthURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String())
if err != nil {
return "", err
}
return url, nil
}
func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*authtypes.CallbackIdentity, error) {
state, err := authtypes.NewStateFromString(formValues.Get("RelayState"))
if err != nil {
return nil, errors.New(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "saml: invalid state").WithAdditional(err.Error())
}
authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID)
if err != nil {
return nil, err
}
_, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
sp, err := a.serviceProvider(state.URL, authDomain)
if err != nil {
return nil, err
}
assertionInfo, err := sp.RetrieveAssertionInfo(formValues.Get("SAMLResponse"))
if err != nil {
if errors.As(err, &saml2.ErrVerification{}) {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, err.Error())
}
if errors.As(err, &saml2.ErrMissingElement{}) {
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, err.Error())
}
return nil, err
}
if assertionInfo.WarningInfo.InvalidTime {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "saml: expired saml response")
}
email, err := valuer.NewEmail(assertionInfo.NameID)
if err != nil {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "saml: invalid email").WithAdditional("The nameID assertion is used to retrieve the email address, please check your IDP configuration and try again.")
}
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
}
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
certStore, err := a.getCertificateStore(authDomain)
if err != nil {
return nil, err
}
acsURL := &url.URL{Scheme: siteURL.Scheme, Host: siteURL.Host, Path: redirectPath}
// Note:
// The ServiceProviderIssuer is the client id in case of keycloak. Since we set it to the host here, we need to set the client id == host in keycloak.
// For AWSSSO, this is the value of Application SAML audience.
return &saml2.SAMLServiceProvider{
IdentityProviderSSOURL: authDomain.AuthDomainConfig().SAML.SamlIdp,
IdentityProviderIssuer: authDomain.AuthDomainConfig().SAML.SamlEntity,
ServiceProviderIssuer: siteURL.Host,
AssertionConsumerServiceURL: acsURL.String(),
SignAuthnRequests: !authDomain.AuthDomainConfig().SAML.InsecureSkipAuthNRequestsSigned,
AllowMissingAttributes: true,
IDPCertificateStore: certStore,
SPKeyStore: dsig.RandomKeyStoreForTest(),
}, nil
}
func (a *AuthN) getCertificateStore(authDomain *authtypes.AuthDomain) (dsig.X509CertificateStore, error) {
certStore := &dsig.MemoryX509CertificateStore{
Roots: []*x509.Certificate{},
}
var certBytes []byte
if strings.Contains(authDomain.AuthDomainConfig().SAML.SamlCert, "-----BEGIN CERTIFICATE-----") {
block, _ := pem.Decode([]byte(authDomain.AuthDomainConfig().SAML.SamlCert))
if block == nil {
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid pem cert found")
}
certBytes = block.Bytes
} else {
certData, err := base64.StdEncoding.DecodeString(authDomain.AuthDomainConfig().SAML.SamlCert)
if err != nil {
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to read certificate: %s", err.Error())
}
certBytes = certData
}
idpCert, err := x509.ParseCertificate(certBytes)
if err != nil {
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to prepare saml request, invalid cert: %s", err.Error())
}
certStore.Roots = append(certStore.Roots, idpCert)
return certStore, nil
}

View File

@@ -1,79 +0,0 @@
package openfgaauthz
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
)
type provider struct {
pkgAuthzService authz.AuthZ
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.pkgAuthzService.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.pkgAuthzService.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.pkgAuthzService.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -1,40 +0,0 @@
module base
type organisation
relations
define read: [user, role#assignee]
define update: [user, role#assignee]
type user
relations
define read: [user, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
type anonymous
type role
relations
define assignee: [user]
define read: [user, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
type resources
relations
define create: [user, role#assignee]
define list: [user, role#assignee]
type resource
relations
define read: [user, anonymous, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
define block: [user, role#assignee]
type telemetry
relations
define read: [user, anonymous, role#assignee]

View File

@@ -1,29 +0,0 @@
package openfgaschema
import (
"context"
_ "embed"
"github.com/SigNoz/signoz/pkg/authz"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
)
var (
//go:embed base.fga
baseDSL string
)
type schema struct{}
func NewSchema() authz.Schema {
return &schema{}
}
func (schema *schema) Get(ctx context.Context) []openfgapkgtransformer.ModuleFile {
return []openfgapkgtransformer.ModuleFile{
{
Name: "base.fga",
Contents: baseDSL,
},
}
}

View File

@@ -20,6 +20,7 @@ import (
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -34,7 +35,10 @@ type APIHandlerOptions struct {
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
FluxInterval time.Duration
UseLogsNewSchema bool
UseTraceNewSchema bool
JWT *authtypes.JWT
}
type APIHandler struct {
@@ -89,8 +93,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
// paid plans specific routes
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.Signoz.Handlers.Session.CreateSessionBySAMLCallback)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/complete/oidc", am.OpenAccess(ah.Signoz.Handlers.Session.CreateSessionByOIDCCallback)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
// base overrides
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)

View File

@@ -0,0 +1,107 @@
package api
import (
"context"
"encoding/base64"
"fmt"
"net/http"
"net/url"
"go.uber.org/zap"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/valuer"
)
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
ssoError := []byte("Login failed. Please contact your system administrator")
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
base64.StdEncoding.Encode(dst, ssoError)
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
}
// receiveSAML completes a SAML request and gets user logged in
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
// this is the source url that initiated the login request
redirectUri := constants.GetDefaultSiteURL()
ctx := context.Background()
err := r.ParseForm()
if err != nil {
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
// the relay state is sent when a login request is submitted to
// Idp.
relayState := r.FormValue("RelayState")
zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState))
parsedState, err := url.Parse(relayState)
if err != nil || relayState == "" {
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
// upgrade redirect url from the relay state for better accuracy
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
// fetch domain by parsing relay state.
domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState)
if err != nil {
handleSsoError(w, r, redirectUri)
return
}
orgID, err := valuer.NewUUID(domain.OrgID)
if err != nil {
handleSsoError(w, r, redirectUri)
return
}
_, err = ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
sp, err := domain.PrepareSamlRequest(parsedState)
if err != nil {
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
if err != nil {
zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
if assertionInfo.WarningInfo.InvalidTime {
zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
email := assertionInfo.NameID
if email == "" {
zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String()))
handleSsoError(w, r, redirectUri)
return
}
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email)
if err != nil {
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
http.Redirect(w, r, nextPage, http.StatusSeeOther)
}

View File

@@ -13,11 +13,11 @@ import (
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
@@ -168,22 +168,38 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
}
if integrationUserResult != nil {
return &integrationUserResult.User, nil
}
zap.L().Info(
"cloud integration user not found. Attempting to create the user",
zap.String("cloudProvider", cloudProvider),
)
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration user: %w", err,
))
}
password, err := types.NewFactorPassword(uuid.NewString())
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
}
return cloudIntegrationUser, nil
return integrationUser, nil
}
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (

View File

@@ -7,11 +7,6 @@ import (
"net"
"net/http"
_ "net/http/pprof" // http profiler
"slices"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
"github.com/gorilla/handlers"
@@ -28,6 +23,7 @@ import (
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/web"
"github.com/rs/cors"
"github.com/soheilhy/cmux"
@@ -48,10 +44,24 @@ import (
"go.uber.org/zap"
)
type ServerOptions struct {
Config signoz.Config
SigNoz *signoz.SigNoz
HTTPHostPort string
PrivateHostPort string
PreferSpanMetrics bool
FluxInterval string
FluxIntervalForTraceDetail string
Cluster string
GatewayUrl string
Jwt *authtypes.JWT
}
// Server runs HTTP, Mux and a grpc server
type Server struct {
config signoz.Config
signoz *signoz.SigNoz
jwt *authtypes.JWT
ruleManager *baserules.Manager
// public http router
@@ -59,6 +69,11 @@ type Server struct {
httpServer *http.Server
httpHostPort string
// private http
privateConn net.Listener
privateHTTP *http.Server
privateHostPort string
opampServer *opamp.Server
// Usage manager
@@ -68,7 +83,7 @@ type Server struct {
}
// NewServer creates and initializes Server
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
if err != nil {
return nil, err
@@ -154,6 +169,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
JWT: jwt,
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
@@ -164,8 +180,10 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
s := &Server{
config: config,
signoz: signoz,
jwt: jwt,
ruleManager: rm,
httpHostPort: baseconst.HTTPHostPort,
privateHostPort: baseconst.PrivateHostPort,
unavailableChannel: make(chan healthcheck.Status),
usageManager: usageManager,
}
@@ -178,6 +196,13 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
s.httpServer = httpServer
privateServer, err := s.createPrivateServer(apiHandler)
if err != nil {
return nil, err
}
s.privateHTTP = privateServer
s.opampServer = opamp.InitializeServer(
&opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation,
)
@@ -190,21 +215,41 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
return s.unavailableChannel
}
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
r.Use(otelmux.Middleware(
"apiserver",
otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()),
otelmux.WithTracerProvider(s.signoz.Instrumentation.TracerProvider()),
otelmux.WithPropagators(propagation.NewCompositeTextMapPropagator(propagation.Baggage{}, propagation.TraceContext{})),
otelmux.WithFilter(func(r *http.Request) bool {
return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path)
}),
otelmux.WithPublicEndpoint(),
))
r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterPrivateRoutes(r)
c := cors.New(cors.Options{
//todo(amol): find out a way to add exact domain or
// ip here for alert manager
AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"},
})
handler := c.Handler(r)
handler = handlers.CompressHandler(handler)
return &http.Server{
Handler: handler,
}, nil
}
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
@@ -212,7 +257,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
s.config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewComment().Wrap)
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
@@ -265,6 +309,19 @@ func (s *Server) initListeners() error {
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
// listen on private port to support internal services
privateHostPort := s.privateHostPort
if privateHostPort == "" {
return fmt.Errorf("baseconst.PrivateHostPort is required")
}
s.privateConn, err = net.Listen("tcp", privateHostPort)
if err != nil {
return err
}
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort))
return nil
}
@@ -303,6 +360,26 @@ func (s *Server) Start(ctx context.Context) error {
}
}()
var privatePort int
if port, err := utils.GetPort(s.privateConn.Addr()); err == nil {
privatePort = port
}
go func() {
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort))
switch err := s.privateHTTP.Serve(s.privateConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do
zap.L().Info("private http server closed")
default:
zap.L().Error("Could not start private HTTP server", zap.Error(err))
}
s.unavailableChannel <- healthcheck.Unavailable
}()
go func() {
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
@@ -322,6 +399,12 @@ func (s *Server) Stop(ctx context.Context) error {
}
}
if s.privateHTTP != nil {
if err := s.privateHTTP.Shutdown(ctx); err != nil {
return err
}
}
s.opampServer.Stop()
if s.ruleManager != nil {
@@ -334,9 +417,17 @@ func (s *Server) Stop(ctx context.Context) error {
return nil
}
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, logger *slog.Logger) (*baserules.Manager, error) {
ruleStore := sqlrulestore.NewRuleStore(sqlstore)
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
func makeRulesManager(
ch baseint.Reader,
cache cache.Cache,
alertmanager alertmanager.Alertmanager,
sqlstore sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
orgGetter organization.Getter,
querier querier.Querier,
logger *slog.Logger,
) (*baserules.Manager, error) {
// create manager opts
managerOpts := &baserules.ManagerOptions{
TelemetryStore: telemetryStore,
@@ -351,10 +442,8 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
PrepareTaskFunc: rules.PrepareTaskFunc,
PrepareTestRuleFunc: rules.TestNotification,
Alertmanager: alertmanager,
SQLStore: sqlstore,
OrgGetter: orgGetter,
RuleStore: ruleStore,
MaintenanceStore: maintenanceStore,
SqlStore: sqlstore,
}
// create Manager

View File

@@ -4,6 +4,10 @@ import (
"os"
)
const (
DefaultSiteURL = "https://localhost:8080"
)
var LicenseSignozIo = "https://license.signoz.io/api/v1"
var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
@@ -23,13 +27,20 @@ func GetOrDefaultEnv(key string, fallback string) string {
// constant functions that override env vars
// GetDefaultSiteURL returns default site url, primarily
// used to send saml request and allowing backend to
// handle http redirect
func GetDefaultSiteURL() string {
return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
}
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
var IsDotMetricsEnabled = false
var IsPreferSpanMetrics = false
func init() {
if GetOrDefaultEnv(DotMetricsEnabled, "true") == "true" {
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
IsDotMetricsEnabled = true
}

View File

@@ -1,7 +1,7 @@
package model
import (
"errors"
"fmt"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
)
@@ -57,7 +57,7 @@ func Unauthorized(err error) *ApiError {
func BadRequestStr(s string) *ApiError {
return &ApiError{
Typ: basemodel.ErrorBadData,
Err: errors.New(s),
Err: fmt.Errorf(s),
}
}
@@ -73,7 +73,7 @@ func InternalError(err error) *ApiError {
func InternalErrorStr(s string) *ApiError {
return &ApiError{
Typ: basemodel.ErrorInternal,
Err: errors.New(s),
Err: fmt.Errorf(s),
}
}

View File

@@ -35,6 +35,7 @@ import (
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
yaml "gopkg.in/yaml.v2"
)
const (
@@ -78,6 +79,11 @@ func NewAnomalyRule(
opts = append(opts, baserules.WithLogger(logger))
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
target := -1 * *p.RuleCondition.Target
p.RuleCondition.Target = &target
}
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
if err != nil {
return nil, err
@@ -161,9 +167,16 @@ func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
)
st, en := r.Timestamps(ts)
start := st.UnixMilli()
end := en.UnixMilli()
start := ts.Add(-time.Duration(r.EvalWindow())).UnixMilli()
end := ts.UnixMilli()
if r.EvalDelay() > 0 {
start = start - int64(r.EvalDelay().Milliseconds())
end = end - int64(r.EvalDelay().Milliseconds())
}
// round to minute otherwise we could potentially miss data
start = start - (start % (60 * 1000))
end = end - (end % (60 * 1000))
compositeQuery := r.Condition().CompositeQuery
@@ -240,17 +253,10 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
for _, series := range queryResult.AnomalyScores {
if r.Condition() != nil && r.Condition().RequireMinPoints {
if len(series.Points) < r.Condition().RequiredNumPoints {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
smpl, shouldAlert := r.ShouldAlert(*series)
if shouldAlert {
resultVector = append(resultVector, smpl)
}
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
if err != nil {
return nil, err
}
resultVector = append(resultVector, results...)
}
return resultVector, nil
}
@@ -290,17 +296,10 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
for _, series := range queryResult.AnomalyScores {
if r.Condition().RequireMinPoints {
if len(series.Points) < r.Condition().RequiredNumPoints {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
smpl, shouldAlert := r.ShouldAlert(*series)
if shouldAlert {
resultVector = append(resultVector, smpl)
}
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
if err != nil {
return nil, err
}
resultVector = append(resultVector, results...)
}
return resultVector, nil
}
@@ -331,19 +330,14 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
resultFPs := map[uint64]struct{}{}
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
ruleReceivers := r.Threshold.GetRuleReceivers()
ruleReceiverMap := make(map[string][]string)
for _, value := range ruleReceivers {
ruleReceiverMap[value.Name] = value.Channels
}
for _, smpl := range res {
l := make(map[string]string, len(smpl.Metric))
for _, lbl := range smpl.Metric {
l[lbl.Name] = lbl.Value
}
value := valueFormatter.Format(smpl.V, r.Unit())
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold)
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
@@ -387,7 +381,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
}
if smpl.IsMissing {
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
lb.Set(labels.NoDataLabel, "true")
}
lbs := lb.Labels()
@@ -408,12 +401,13 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
State: model.StatePending,
Value: smpl.V,
GeneratorURL: r.GeneratorURL(),
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
Receivers: r.PreferredChannels(),
Missing: smpl.IsMissing,
}
}
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
@@ -422,9 +416,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
alert.Value = a.Value
alert.Annotations = a.Annotations
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
alert.Receivers = ruleReceiverMap[v]
}
alert.Receivers = r.PreferredChannels()
continue
}
@@ -507,7 +499,7 @@ func (r *AnomalyRule) String() string {
PreferredChannels: r.PreferredChannels(),
}
byt, err := json.Marshal(ar)
byt, err := yaml.Marshal(ar)
if err != nil {
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
}

View File

@@ -3,10 +3,8 @@ package rules
import (
"context"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
@@ -22,10 +20,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
var task baserules.Task
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
evaluation, err := opts.Rule.Evaluation.GetEvaluation()
if err != nil {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err)
}
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
// create a threshold rule
tr, err := baserules.NewThresholdRule(
@@ -46,7 +40,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, tr)
// create ch rule task for evalution
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -68,7 +62,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, pr)
// create promql rule task for evalution
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule
@@ -90,7 +84,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, ar)
// create anomaly rule task for evalution
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -126,6 +120,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
// add special labels for test alerts
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
parsedRule.Labels[labels.RuleSourceLabel] = ""
parsedRule.Labels[labels.AlertRuleIdLabel] = ""

View File

@@ -1,484 +0,0 @@
# Persona
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
# Auto-detect TypeScript Usage
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
Adjust syntax based on this detection.
# TypeScript Type Safety for Jest Tests
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
**Type Safety Requirements:**
- Use proper TypeScript interfaces for all mock data
- Type all Jest mock functions with `jest.MockedFunction<T>`
- Use generic types for React components and hooks
- Define proper return types for mock functions
- Use `as const` for literal types when needed
- Avoid `any` type use proper typing instead
# Unit Testing Focus
Focus on critical functionality (business logic, utility functions, component behavior)
Mock dependencies (API calls, external modules) before imports
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
Write maintainable tests with descriptive names grouped in describe blocks
# Global vs Local Mocks
**Use Global Mocks for:**
- High-frequency dependencies (20+ test files)
- Core infrastructure (react-router-dom, react-query, antd)
- Standard implementations across the app
- Browser APIs (ResizeObserver, matchMedia, localStorage)
- Utility libraries (date-fns, lodash)
**Use Local Mocks for:**
- Business logic dependencies (5-15 test files)
- Test-specific behavior (different data per test)
- API endpoints with specific responses
- Domain-specific components
- Error scenarios and edge cases
**Global Mock Files Available (from jest.config.ts):**
- `uplot` → `__mocks__/uplotMock.ts`
# Repo-specific Testing Conventions
## Imports
Always import from our harness:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
```
For API mocks:
```ts
import { server, rest } from 'mocks-server/server';
```
Do not import directly from `@testing-library/react`.
## Router
Use the router built into render:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on them.
## Hook Mocks
Pattern:
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## MSW
Global MSW server runs automatically.
Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large responses in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
- Always await interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
```ts
// Example: virtualized list scroll (no userEvent helper)
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
scroller.scrollTop = targetScrollTop;
act(() => { fireEvent.scroll(scroller); });
```
## Timers
❌ No global fake timers.
✅ Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
Fallback: visible text.
Last resort: `data-testid`.
# Example Test (using only configured global mocks)
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# Best Practices
- **Critical Functionality**: Prioritize testing business logic and utilities
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
- **Data Scenarios**: Always test valid, invalid, and edge cases
- **Descriptive Names**: Make test intent clear
- **Organization**: Group related tests in describe
- **Consistency**: Match repo conventions
- **Edge Cases**: Test null, undefined, unexpected values
- **Limit Scope**: 35 focused tests per file
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
- **No Any**: Enforce type safety
# Example Test
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# TypeScript Type Safety Examples
## Proper Mock Typing
```ts
// ✅ GOOD - Properly typed mocks
interface User {
id: number;
name: string;
email: string;
}
interface ApiResponse<T> {
data: T;
status: number;
message: string;
}
// Type the mock functions
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
// Mock implementation with proper typing
mockFetchUser.mockResolvedValue({
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
status: 200,
message: 'Success'
});
// ❌ BAD - Using any type
const mockFetchUser = jest.fn() as any; // Don't do this
```
## React Component Testing with Types
```ts
// ✅ GOOD - Properly typed component testing
interface ComponentProps {
title: string;
data: User[];
onUserSelect: (user: User) => void;
isLoading?: boolean;
}
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
// Component implementation
};
describe('TestComponent', () => {
it('should render with proper props', () => {
// Arrange - Type the props properly
const mockProps: ComponentProps = {
title: 'Test Title',
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
isLoading: false
};
// Act
render(<TestComponent {...mockProps} />);
// Assert
expect(screen.getByText('Test Title')).toBeInTheDocument();
});
});
```
## Hook Testing with Types
```ts
// ✅ GOOD - Properly typed hook testing
interface UseUserDataReturn {
user: User | null;
loading: boolean;
error: string | null;
refetch: () => void;
}
const useUserData = (id: number): UseUserDataReturn => {
// Hook implementation
};
describe('useUserData', () => {
it('should return user data with proper typing', () => {
// Arrange
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
mockFetchUser.mockResolvedValue({
data: mockUser,
status: 200,
message: 'Success'
});
// Act
const { result } = renderHook(() => useUserData(1));
// Assert
expect(result.current.user).toEqual(mockUser);
expect(result.current.loading).toBe(false);
expect(result.current.error).toBeNull();
});
});
```
## Global Mock Type Safety
```ts
// ✅ GOOD - Type-safe global mocks
// In __mocks__/routerMock.ts
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
pathname: '/traces',
search: '',
hash: '',
state: null,
key: 'test-key',
...overrides,
});
// In test files
const location = useLocation(); // Properly typed from global mock
expect(location.pathname).toBe('/traces');
```
# TypeScript Configuration for Jest
## Required Jest Configuration
```json
// jest.config.ts
{
"preset": "ts-jest/presets/js-with-ts-esm",
"globals": {
"ts-jest": {
"useESM": true,
"isolatedModules": true,
"tsconfig": "<rootDir>/tsconfig.jest.json"
}
},
"extensionsToTreatAsEsm": [".ts", ".tsx"],
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
}
```
## TypeScript Jest Configuration
```json
// tsconfig.jest.json
{
"extends": "./tsconfig.json",
"compilerOptions": {
"types": ["jest", "@testing-library/jest-dom"],
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "node"
},
"include": [
"src/**/*",
"**/*.test.ts",
"**/*.test.tsx",
"__mocks__/**/*"
]
}
```
## Common Type Safety Patterns
### Mock Function Typing
```ts
// ✅ GOOD - Proper mock function typing
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
// ❌ BAD - Using any
const mockApiCall = jest.fn() as any;
```
### Generic Mock Typing
```ts
// ✅ GOOD - Generic mock typing
interface MockApiResponse<T> {
data: T;
status: number;
}
const mockFetchData = jest.fn() as jest.MockedFunction<
<T>(endpoint: string) => Promise<MockApiResponse<T>>
>;
// Usage
mockFetchData<User>('/users').mockResolvedValue({
data: { id: 1, name: 'John' },
status: 200
});
```
### React Testing Library with Types
```ts
// ✅ GOOD - Typed testing utilities
import { render, screen, RenderResult } from '@testing-library/react';
import { ComponentProps } from 'react';
type TestComponentProps = ComponentProps<typeof TestComponent>;
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
const defaultProps: TestComponentProps = {
title: 'Test',
data: [],
onSelect: jest.fn(),
...props
};
return render(<TestComponent {...defaultProps} />);
};
```
### Error Handling with Types
```ts
// ✅ GOOD - Typed error handling
interface ApiError {
message: string;
code: number;
details?: Record<string, unknown>;
}
const mockApiError: ApiError = {
message: 'API Error',
code: 500,
details: { endpoint: '/users' }
};
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
```
## Type Safety Checklist
- [ ] All mock functions use `jest.MockedFunction<T>`
- [ ] All mock data has proper interfaces
- [ ] No `any` types in test files
- [ ] Generic types are used where appropriate
- [ ] Error types are properly defined
- [ ] Component props are typed
- [ ] Hook return types are defined
- [ ] API response types are defined
- [ ] Global mocks are type-safe
- [ ] Test utilities are properly typed
# Mock Decision Tree
```
Is it used in 20+ test files?
├─ YES → Use Global Mock
│ ├─ react-router-dom
│ ├─ react-query
│ ├─ antd components
│ └─ browser APIs
└─ NO → Is it business logic?
├─ YES → Use Local Mock
│ ├─ API endpoints
│ ├─ Custom hooks
│ └─ Domain components
└─ NO → Is it test-specific?
├─ YES → Use Local Mock
│ ├─ Error scenarios
│ ├─ Loading states
│ └─ Specific data
└─ NO → Consider Global Mock
└─ If it becomes frequently used
```
# Common Anti-Patterns to Avoid
❌ **Don't mock global dependencies locally:**
```js
// BAD - This is already globally mocked
jest.mock('react-router-dom', () => ({ ... }));
```
❌ **Don't create global mocks for test-specific data:**
```js
// BAD - This should be local
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => specificTestData)
}));
```
✅ **Do use global mocks for infrastructure:**
```js
// GOOD - Use global mock
import { useLocation } from 'react-router-dom';
```
✅ **Do create local mocks for business logic:**
```js
// GOOD - Local mock for specific test needs
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData)
}));
```

View File

@@ -1,5 +1,4 @@
node_modules
build
*.typegen.ts
i18-generate-hash.js
src/parser/TraceOperatorParser/**
i18-generate-hash.js

View File

@@ -10,6 +10,4 @@ public/
**/*.json
# Ignore all files in parser folder:
src/parser/**
src/TraceOperator/parser/**
src/parser/**

View File

@@ -1,51 +0,0 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
// Mock for uplot library used in tests
export interface MockUPlotInstance {
setData: jest.Mock;
setSize: jest.Mock;
destroy: jest.Mock;
redraw: jest.Mock;
setSeries: jest.Mock;
}
export interface MockUPlotPaths {
spline: jest.Mock;
bars: jest.Mock;
}
// Create mock instance methods
const createMockUPlotInstance = (): MockUPlotInstance => ({
setData: jest.fn(),
setSize: jest.fn(),
destroy: jest.fn(),
redraw: jest.fn(),
setSeries: jest.fn(),
});
// Create mock paths
const mockPaths: MockUPlotPaths = {
spline: jest.fn(),
bars: jest.fn(),
};
// Mock static methods
const mockTzDate = jest.fn(
(date: Date, _timezone: string) => new Date(date.getTime()),
);
// Mock uPlot constructor - this needs to be a proper constructor function
function MockUPlot(
_options: unknown,
_data: unknown,
_target: HTMLElement,
): MockUPlotInstance {
return createMockUPlotInstance();
}
// Add static methods to the constructor
MockUPlot.tzDate = mockTzDate;
MockUPlot.paths = mockPaths;
// Export the constructor as default
export default MockUPlot;

View File

@@ -1,29 +0,0 @@
// Mock for useSafeNavigate hook to avoid React Router version conflicts in tests
interface SafeNavigateOptions {
replace?: boolean;
state?: unknown;
}
interface SafeNavigateTo {
pathname?: string;
search?: string;
hash?: string;
}
type SafeNavigateToType = string | SafeNavigateTo;
interface UseSafeNavigateReturn {
safeNavigate: jest.MockedFunction<
(to: SafeNavigateToType, options?: SafeNavigateOptions) => void
>;
}
export const useSafeNavigate = (): UseSafeNavigateReturn => ({
safeNavigate: jest.fn(
(to: SafeNavigateToType, options?: SafeNavigateOptions) => {
console.log(`Mock safeNavigate called with:`, to, options);
},
) as jest.MockedFunction<
(to: SafeNavigateToType, options?: SafeNavigateOptions) => void
>,
});

View File

@@ -1,9 +1,6 @@
import type { Config } from '@jest/types';
const USE_SAFE_NAVIGATE_MOCK_PATH = '<rootDir>/__mocks__/useSafeNavigate.ts';
const config: Config.InitialOptions = {
silent: true,
clearMocks: true,
coverageDirectory: 'coverage',
coverageReporters: ['text', 'cobertura', 'html', 'json-summary'],
@@ -13,17 +10,12 @@ const config: Config.InitialOptions = {
moduleNameMapper: {
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
},
globals: {
extensionsToTreatAsEsm: ['.ts'],
'ts-jest': {
useESM: true,
isolatedModules: true,
tsconfig: '<rootDir>/tsconfig.jest.json',
},
},
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
@@ -33,7 +25,7 @@ const config: Config.InitialOptions = {
'^.+\\.(js|jsx)$': 'babel-jest',
},
transformIgnorePatterns: [
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|@signozhq/table|@signozhq/calendar|@signozhq/input|@signozhq/popover|@signozhq/button|@signozhq/sonner|@signozhq/*|date-fns|d3-interpolate|d3-color|api|@codemirror|@lezer|@marijn)/)',
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color|api|@codemirror|@lezer|@marijn)/)',
],
setupFilesAfterEnv: ['<rootDir>jest.setup.ts'],
testPathIgnorePatterns: ['/node_modules/', '/public/'],

View File

@@ -43,21 +43,11 @@
"@radix-ui/react-tooltip": "1.0.7",
"@sentry/react": "8.41.0",
"@sentry/webpack-plugin": "2.22.6",
"@signozhq/badge": "0.0.2",
"@signozhq/button": "0.0.2",
"@signozhq/calendar": "0.0.0",
"@signozhq/callout": "0.0.2",
"@signozhq/design-tokens": "1.1.4",
"@signozhq/input": "0.0.2",
"@signozhq/popover": "0.0.0",
"@signozhq/resizable": "0.0.0",
"@signozhq/sonner": "0.1.0",
"@signozhq/table": "0.3.7",
"@signozhq/tooltip": "0.0.2",
"@tanstack/react-table": "8.20.6",
"@tanstack/react-virtual": "3.11.2",
"@uiw/codemirror-theme-copilot": "4.23.11",
"@uiw/codemirror-theme-github": "4.24.1",
"@uiw/codemirror-theme-copilot": "4.23.11",
"@uiw/react-codemirror": "4.23.10",
"@uiw/react-md-editor": "3.23.5",
"@visx/group": "3.3.0",
@@ -102,7 +92,6 @@
"i18next-http-backend": "^1.3.2",
"jest": "^27.5.1",
"js-base64": "^3.7.2",
"kbar": "0.1.0-beta.48",
"less": "^4.1.2",
"less-loader": "^10.2.0",
"lodash-es": "^4.17.21",
@@ -139,7 +128,6 @@
"redux": "^4.0.5",
"redux-thunk": "^2.3.0",
"rehype-raw": "7.0.0",
"rrule": "2.8.1",
"stream": "^0.0.2",
"style-loader": "1.3.0",
"styled-components": "^5.3.11",
@@ -278,7 +266,6 @@
"serialize-javascript": "6.0.2",
"prismjs": "1.30.0",
"got": "11.8.5",
"form-data": "4.0.4",
"brace-expansion": "^2.0.2"
"form-data": "4.0.4"
}
}

View File

@@ -1,81 +0,0 @@
<svg
width="32"
height="32"
viewBox="0 0 32 32"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M19.11 16.8483L14.0369 17.7304C14.0369 17.7304 12.3481 22.0324 12.2437 22.3235C12.1392 22.6146 12.1437 23.0746 12.6037 23.0746C13.1881 23.0746 16.1546 23.0591 16.1546 23.0591C16.1546 23.0591 15.4346 26.5322 15.3924 26.8433C15.3502 27.1544 15.6835 27.4277 15.9768 27.1144C16.2701 26.8011 20.3121 21.6058 20.4877 21.3525C20.801 20.8992 20.4988 20.4814 20.1433 20.4614C19.7877 20.4414 17.4056 20.4925 17.4056 20.4925L19.11 16.8483Z"
fill="#FECA18"
/>
<path
d="M17.7589 17.4527C17.7589 17.4527 16.6279 19.9548 16.5856 20.097C16.4612 20.5192 17.0078 20.6903 17.1634 20.3481C17.3189 20.0037 18.6655 17.2194 18.6655 17.2194L17.7589 17.4527Z"
fill="#FDB900"
/>
<path
d="M12.8859 22.2592C13.1836 22.2503 15.7968 22.2436 16.0146 22.2281C16.4213 22.1969 16.4835 22.7591 16.0146 22.7591C15.528 22.7591 12.9637 22.768 12.8081 22.7747C12.4481 22.7925 12.3992 22.2747 12.8859 22.2592Z"
fill="#FDB900"
/>
<path
d="M14.9813 17.1127C14.9813 17.1127 13.6481 20.4592 13.5725 20.7103C13.2592 21.7591 14.3858 21.728 14.6836 21.1325C14.8302 20.837 16.2635 17.8016 16.3257 17.2527C16.3879 16.7061 14.9813 17.1127 14.9813 17.1127Z"
fill="#FFE36A"
/>
<path
d="M15.3347 21.0148C15.1436 20.8837 14.9125 20.9992 14.7725 21.2192C14.6325 21.4392 14.428 21.797 14.7414 21.9858C15.0236 22.1547 15.2724 21.8147 15.3835 21.657C15.4924 21.4992 15.6324 21.217 15.3347 21.0148Z"
fill="#FFE36A"
/>
<path
d="M17.6301 21.7326C17.1212 21.6237 16.9568 22.0459 16.8479 22.5459C16.739 23.0459 16.3302 24.6636 16.2546 25.0635C16.1457 25.6257 16.6612 25.7057 16.8812 25.188C17.0457 24.8013 17.759 23.057 17.8501 22.7792C17.9901 22.3592 18.1456 21.8437 17.6301 21.7326Z"
fill="#FFE36A"
/>
<path
d="M25.7585 12.0573C25.7585 12.0573 26.3363 4.28441 19.69 3.2978C13.7147 2.41118 12.5415 8.08421 12.5415 8.08421C12.5415 8.08421 10.2838 7.55757 8.66166 9.00639C7.05064 10.4463 7.17508 12.1507 7.17508 12.1507C7.17508 12.1507 3.20195 11.524 2.79531 14.935C2.41533 18.1215 7.11286 17.3282 7.11286 17.3282L29.4183 15.686C29.4183 15.686 29.9472 14.0106 28.2606 12.7462C27.2585 11.9929 25.7585 12.0573 25.7585 12.0573Z"
fill="#E4EAEE"
/>
<path
d="M13.7347 13.8196C13.9213 13.7574 14.9857 14.6662 18.0522 14.6951C22.2653 14.7373 25.4563 12.2552 25.4563 12.2552C25.4563 12.2552 25.5629 13.0108 25.2274 13.5485C24.8096 14.2151 24.163 14.3418 24.163 14.3418C24.163 14.3418 25.3318 15.1551 26.9362 15.0307C28.3828 14.9173 29.4294 14.4262 29.4294 14.4262C29.4294 14.4262 29.5694 15.1395 29.4916 15.8351C29.3361 17.2217 28.5228 17.7861 27.6251 17.8883C26.9651 17.9638 21.3276 17.9905 19.0122 18.0127C16.9256 18.0327 6.29285 18.2994 5.20624 18.1794C4.07963 18.0549 3.22412 17.3772 2.91303 16.4061C2.67526 15.6706 2.78859 15.1973 2.78859 15.1973C2.78859 15.1973 4.85959 15.7462 6.02175 15.6151C7.48167 15.4484 8.46162 14.5307 8.46162 14.5307C8.46162 14.5307 9.33713 15.0307 11.277 14.864C12.9458 14.7173 13.7347 13.8196 13.7347 13.8196Z"
fill="url(#paint0_radial_811_5475)"
/>
<path
d="M24.8653 18.2661C24.6386 18.1394 23.832 18.8927 23.3787 19.346C23.1009 19.6238 22.2165 20.3504 22.0965 21.0504C21.7988 22.7703 23.7698 23.1614 24.552 22.3637C25.143 21.7615 25.0364 20.586 25.0364 20.1904C25.0386 19.6416 25.1475 18.4216 24.8653 18.2661Z"
fill="#52C0EE"
/>
<path
d="M8.67058 19.1904C8.45504 19.0659 7.68174 19.7948 7.24621 20.237C6.97956 20.5058 6.13294 21.2103 6.01294 21.8947C5.71962 23.5723 7.5973 23.9657 8.34837 23.1924C8.91501 22.608 8.82168 21.4591 8.82391 21.0725C8.82613 20.537 8.93723 19.3459 8.67058 19.1904Z"
fill="#52C0EE"
/>
<path
d="M12.2548 24.1634C12.0126 24.0723 11.1971 24.8145 10.7438 25.2678C10.466 25.5456 9.64386 26.2566 9.52386 26.9566C9.2261 28.6765 11.1349 29.0832 11.9171 28.2854C12.5081 27.6832 12.4104 26.5077 12.4015 26.1122C12.3793 25.0812 12.4215 24.2256 12.2548 24.1634Z"
fill="#52C0EE"
/>
<path
d="M23.5054 20.4926C23.1943 20.3304 22.8165 20.3993 22.5765 20.8992C22.3365 21.3992 22.5343 21.797 22.7854 21.9103C23.0743 22.0436 23.432 21.9214 23.672 21.5147C23.912 21.1081 23.7654 20.6281 23.5054 20.4926Z"
fill="#B2E6FE"
/>
<path
d="M10.6682 26.4279C10.3482 26.3168 9.99715 26.4345 9.83716 26.9478C9.67717 27.4611 9.92382 27.8122 10.1794 27.8878C10.4749 27.9744 10.7993 27.8078 10.9727 27.3834C11.1438 26.9589 10.9349 26.5212 10.6682 26.4279Z"
fill="#B2E6FE"
/>
<path
d="M7.12613 21.3258C6.78837 21.2325 6.43283 21.3769 6.30395 21.9169C6.17507 22.4569 6.45061 22.8035 6.71948 22.8613C7.03058 22.9302 7.35278 22.7369 7.50389 22.288C7.65277 21.8436 7.41056 21.4036 7.12613 21.3258Z"
fill="#B2E6FE"
/>
<defs>
<radialGradient
id="paint0_radial_811_5475"
cx="0"
cy="0"
r="1"
gradientTransform="matrix(0.18837 -6.53799 9.79456 0.282554 16.401 18.5051)"
gradientUnits="userSpaceOnUse"
>
<stop offset="0.1934" stopColor="#FFE366" />
<stop offset="0.3305" stopColor="#EDDD82" />
<stop offset="0.5709" stopColor="#D0D4AD" />
<stop offset="0.7589" stopColor="#BFCFC7" />
<stop offset="0.8699" stopColor="#B8CDD1" />
</radialGradient>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 5.4 KiB

View File

@@ -1,16 +0,0 @@
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 92.2 65" style="enable-background:new 0 0 92.2 65;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<metadata>
<sfw xmlns="ns_sfw;">
<slices>
</slices>
<sliceSourceBounds bottomLeftOrigin="true" height="65" width="92.2" x="-43.7" y="-98">
</sliceSourceBounds>
</sfw>
</metadata>
<path class="st0" d="M66.5,0H52.4l25.7,65h14.1L66.5,0z M25.7,0L0,65h14.4l5.3-13.6h26.9L51.8,65h14.4L40.5,0C40.5,0,25.7,0,25.7,0z
M24.3,39.3l8.8-22.8l8.8,22.8H24.3z">
</path>
</svg>

Before

Width:  |  Height:  |  Size: 714 B

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Claude</title><path d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z" fill="#D97757" fill-rule="nonzero"></path></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>DeepSeek</title><path d="M23.748 4.482c-.254-.124-.364.113-.512.234-.051.039-.094.09-.137.136-.372.397-.806.657-1.373.626-.829-.046-1.537.214-2.163.848-.133-.782-.575-1.248-1.247-1.548-.352-.156-.708-.311-.955-.65-.172-.241-.219-.51-.305-.774-.055-.16-.11-.323-.293-.35-.2-.031-.278.136-.356.276-.313.572-.434 1.202-.422 1.84.027 1.436.633 2.58 1.838 3.393.137.093.172.187.129.323-.082.28-.18.552-.266.833-.055.179-.137.217-.329.14a5.526 5.526 0 01-1.736-1.18c-.857-.828-1.631-1.742-2.597-2.458a11.365 11.365 0 00-.689-.471c-.985-.957.13-1.743.388-1.836.27-.098.093-.432-.779-.428-.872.004-1.67.295-2.687.684a3.055 3.055 0 01-.465.137 9.597 9.597 0 00-2.883-.102c-1.885.21-3.39 1.102-4.497 2.623C.082 8.606-.231 10.684.152 12.85c.403 2.284 1.569 4.175 3.36 5.653 1.858 1.533 3.997 2.284 6.438 2.14 1.482-.085 3.133-.284 4.994-1.86.47.234.962.327 1.78.397.63.059 1.236-.03 1.705-.128.735-.156.684-.837.419-.961-2.155-1.004-1.682-.595-2.113-.926 1.096-1.296 2.746-2.642 3.392-7.003.05-.347.007-.565 0-.845-.004-.17.035-.237.23-.256a4.173 4.173 0 001.545-.475c1.396-.763 1.96-2.015 2.093-3.517.02-.23-.004-.467-.247-.588zM11.581 18c-2.089-1.642-3.102-2.183-3.52-2.16-.392.024-.321.471-.235.763.09.288.207.486.371.739.114.167.192.416-.113.603-.673.416-1.842-.14-1.897-.167-1.361-.802-2.5-1.86-3.301-3.307-.774-1.393-1.224-2.887-1.298-4.482-.02-.386.093-.522.477-.592a4.696 4.696 0 011.529-.039c2.132.312 3.946 1.265 5.468 2.774.868.86 1.525 1.887 2.202 2.891.72 1.066 1.494 2.082 2.48 2.914.348.292.625.514.891.677-.802.09-2.14.11-3.054-.614zm1-6.44a.306.306 0 01.415-.287.302.302 0 01.2.288.306.306 0 01-.31.307.303.303 0 01-.304-.308zm3.11 1.596c-.2.081-.399.151-.59.16a1.245 1.245 0 01-.798-.254c-.274-.23-.47-.358-.552-.758a1.73 1.73 0 01.016-.588c.07-.327-.008-.537-.239-.727-.187-.156-.426-.199-.688-.199a.559.559 0 01-.254-.078c-.11-.054-.2-.19-.114-.358.028-.054.16-.186.192-.21.356-.202.767-.136 1.146.016.352.144.618.408 1.001.782.391.451.462.576.685.914.176.265.336.537.445.848.067.195-.019.354-.25.452z" fill="#4D6BFE"></path></svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Gemini</title><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="#3186FF"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-0)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-1)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-2)"></path><defs><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-0" x1="7" x2="11" y1="15.5" y2="12"><stop stop-color="#08B962"></stop><stop offset="1" stop-color="#08B962" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-1" x1="8" x2="11.5" y1="5.5" y2="11"><stop stop-color="#F94543"></stop><stop offset="1" stop-color="#F94543" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-2" x1="3.5" x2="17.5" y1="13.5" y2="12"><stop stop-color="#FABC12"></stop><stop offset=".46" stop-color="#FABC12" stop-opacity="0"></stop></linearGradient></defs></svg>

Before

Width:  |  Height:  |  Size: 2.8 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>LangChain</title><path d="M8.373 14.502c.013-.06.024-.118.038-.17l.061.145c.115.28.229.557.506.714-.012.254-.334.357-.552.326-.048-.114-.115-.228-.255-.164-.143.056-.3-.01-.266-.185.333-.012.407-.371.468-.666zM18.385 9.245c-.318 0-.616.122-.839.342l-.902.887c-.243.24-.368.572-.343.913l.006.056c.032.262.149.498.337.682.13.128.273.21.447.266a.866.866 0 01-.247.777l-.056.055a2.022 2.022 0 01-1.355-1.555l-.01-.057-.046.037c-.03.024-.06.05-.088.078l-.902.887a1.156 1.156 0 000 1.65c.231.228.535.342.84.342.304 0 .607-.114.838-.341l.902-.888a1.156 1.156 0 00-.436-1.921.953.953 0 01.276-.842 2.062 2.062 0 011.371 1.57l.01.057.047-.037c.03-.024.06-.05.088-.078l.902-.888a1.155 1.155 0 000-1.65 1.188 1.188 0 00-.84-.342z" fill="#1C3C3C"></path><path clip-rule="evenodd" d="M17.901 6H6.1C2.736 6 0 8.692 0 12s2.736 6 6.099 6H17.9C21.264 18 24 15.308 24 12s-2.736-6-6.099-6zm-5.821 9.407c-.195.04-.414.047-.562-.106-.045.1-.136.077-.221.056a.797.797 0 00-.061-.014c-.01.025-.017.048-.026.073-.329.021-.575-.309-.732-.558a4.991 4.991 0 00-.473-.21c-.172-.07-.345-.14-.509-.23a2.218 2.218 0 00-.004.173c-.002.244-.004.503-.227.651-.007.295.236.292.476.29.207-.003.41-.005.447.184a.485.485 0 01-.05.003c-.046 0-.092 0-.127.034-.117.111-.242.063-.372.013-.12-.046-.243-.094-.367-.02a2.318 2.318 0 00-.262.154.97.97 0 01-.548.194c-.024-.036-.014-.059.006-.08a.562.562 0 00.043-.056c.019-.028.035-.057.051-.084.054-.095.103-.18.242-.22-.185-.029-.344.055-.5.137l-.004.002a4.21 4.21 0 01-.065.034c-.097.04-.154.009-.212-.023-.082-.045-.168-.092-.376.04-.04-.032-.02-.061.002-.086.091-.109.21-.125.345-.119-.351-.193-.604-.056-.81.055-.182.098-.327.176-.471-.012-.065.017-.102.063-.138.108-.015.02-.03.038-.047.055-.035-.039-.027-.083-.018-.128l.005-.026a.242.242 0 00.003-.03l-.027-.01c-.053-.022-.105-.044-.09-.124-.117-.04-.2.03-.286.094-.054-.041-.01-.095.032-.145a.279.279 0 00.045-.065c.038-.065.103-.067.166-.069.054-.001.108-.003.145-.042.133-.075.297-.036.462.003.121.028.242.057.354.042.203.025.454-.18.352-.385-.186-.233-.184-.528-.183-.813v-.143c-.016-.108-.172-.233-.328-.358-.12-.095-.24-.191-.298-.28-.16-.177-.285-.382-.409-.585l-.015-.024c-.212-.404-.297-.86-.382-1.315-.103-.546-.205-1.09-.526-1.54-.266.144-.612.075-.841-.118-.12.107-.13.247-.138.396l-.001.014c-.297-.292-.26-.844-.023-1.17.097-.128.213-.233.342-.326.03-.021.04-.042.039-.074.235-1.04 1.836-.839 2.342-.103.167.206.281.442.395.678.137.283.273.566.5.795.22.237.452.463.684.689.359.35.718.699 1.032 1.089.49.587.839 1.276 1.144 1.97.05.092.08.193.11.293.044.15.089.299.2.417.026.035.084.088.149.148.156.143.357.328.289.409.009.019.027.04.05.06.032.028.074.058.116.088.122.087.25.178.16.25zm7.778-3.545l-.902.887c-.24.237-.537.413-.859.51l-.017.005-.006.015A2.021 2.021 0 0117.6 14l-.902.888c-.393.387-.916.6-1.474.6-.557 0-1.08-.213-1.474-.6a2.03 2.03 0 010-2.9l.902-.888c.242-.238.531-.409.859-.508l.016-.004.006-.016c.105-.272.265-.516.475-.724l.902-.887c.393-.387.917-.6 1.474-.6.558 0 1.08.213 1.474.6.394.387.61.902.61 1.45 0 .549-.216 1.064-.61 1.45v.001z" fill="#1C3C3C" fill-rule="evenodd"></path></svg>

Before

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>LlamaIndex</title><path d="M15.855 17.122c-2.092.924-4.358.545-5.23.24 0 .21-.01.857-.048 1.78-.038.924-.332 1.507-.475 1.684.016.577.029 1.837-.047 2.26a1.93 1.93 0 01-.476.914H8.295c.114-.577.555-.946.761-1.058.114-1.193-.11-2.229-.238-2.597-.126.449-.437 1.49-.665 2.068a6.418 6.418 0 01-.713 1.299h-.951c-.048-.578.27-.77.475-.77.095-.177.323-.731.476-1.54.152-.807-.064-2.324-.19-2.981v-2.068c-1.522-.818-2.092-1.636-2.473-2.55-.304-.73-.222-1.843-.142-2.308-.096-.176-.373-.625-.476-1.25-.142-.866-.063-1.491 0-1.828-.095-.096-.285-.587-.285-1.78 0-1.192.349-1.811.523-1.972v-.529c-.666-.048-1.331-.336-1.712-.721-.38-.385-.095-.962.143-1.154.238-.193.475-.049.808-.145.333-.096.618-.192.76-.48C4.512 1.403 4.287.448 4.16 0c.57.077.935.577 1.046.818V0c.713.337 1.997 1.154 2.425 2.934.342 1.424.586 4.409.665 5.723 1.823.016 4.137-.26 6.229.193 1.901.412 2.757 1.25 3.755 1.25.999 0 1.57-.577 2.282-.096.714.481 1.094 1.828.999 2.838-.076.808-.697 1.074-.998 1.106-.38 1.27 0 2.485.237 2.934v1.827c.111.16.333.655.333 1.347 0 .693-.222 1.154-.333 1.299.19 1.077-.08 2.18-.238 2.597h-1.283c.152-.385.412-.481.523-.481.228-1.193.063-2.293-.048-2.693-.722-.424-1.188-1.17-1.331-1.491.016.272-.029 1.029-.333 1.875-.304.847-.76 1.347-.95 1.491v1.01h-1.284c0-.615.348-.737.523-.721.222-.4.76-1.01.76-2.212 0-1.015-.713-1.492-1.236-2.405-.248-.434-.127-.978-.047-1.203z" fill="url(#lobe-icons-llama-index-fill)"></path><defs><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-llama-index-fill" x1="4.021" x2="24.613" y1="2.02" y2="19.277"><stop offset=".062" stop-color="#F6DCD9"></stop><stop offset=".326" stop-color="#FFA5EA"></stop><stop offset=".589" stop-color="#45DFF8"></stop><stop offset="1" stop-color="#BC8DEB"></stop></linearGradient></defs></svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 5.5 KiB

View File

@@ -1,2 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="none"><path fill="#06D092" d="M8 0L1 4v8l7 4 7-4V4L8 0zm3.119 8.797L9.254 9.863 7.001 8.65v2.549l-2.118 1.33v-5.33l1.68-1.018 2.332 1.216V4.794l2.23-1.322-.006 5.325z"/></svg>

Before

Width:  |  Height:  |  Size: 389 B

View File

@@ -1,4 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128">
<path fill="#f5a800" d="M67.648 69.797c-5.246 5.25-5.246 13.758 0 19.008 5.25 5.246 13.758 5.246 19.004 0 5.25-5.25 5.25-13.758 0-19.008-5.246-5.246-13.754-5.246-19.004 0Zm14.207 14.219a6.649 6.649 0 0 1-9.41 0 6.65 6.65 0 0 1 0-9.407 6.649 6.649 0 0 1 9.41 0c2.598 2.586 2.598 6.809 0 9.407ZM86.43 3.672l-8.235 8.234a4.17 4.17 0 0 0 0 5.875l32.149 32.149a4.17 4.17 0 0 0 5.875 0l8.234-8.235c1.61-1.61 1.61-4.261 0-5.87L92.29 3.671a4.159 4.159 0 0 0-5.86 0ZM28.738 108.895a3.763 3.763 0 0 0 0-5.31l-4.183-4.187a3.768 3.768 0 0 0-5.313 0l-8.644 8.649-.016.012-2.371-2.375c-1.313-1.313-3.45-1.313-4.75 0-1.313 1.312-1.313 3.449 0 4.75l14.246 14.242a3.353 3.353 0 0 0 4.746 0c1.3-1.313 1.313-3.45 0-4.746l-2.375-2.375.016-.012Zm0 0"/>
<path fill="#425cc7" d="M72.297 27.313 54.004 45.605c-1.625 1.625-1.625 4.301 0 5.926L65.3 62.824c7.984-5.746 19.18-5.035 26.363 2.153l9.148-9.149c1.622-1.625 1.622-4.297 0-5.922L78.22 27.313a4.185 4.185 0 0 0-5.922 0ZM60.55 67.585l-6.672-6.672c-1.563-1.562-4.125-1.562-5.684 0l-23.53 23.54a4.036 4.036 0 0 0 0 5.687l13.331 13.332a4.036 4.036 0 0 0 5.688 0l15.132-15.157c-3.199-6.609-2.625-14.593 1.735-20.73Zm0 0"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,99 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="64"
height="64"
viewBox="0 0 64 64"
version="1.1"
id="svg20"
sodipodi:docname="supabase-icon.svg"
style="fill:none"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)">
<metadata
id="metadata24">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1687"
inkscape:window-height="849"
id="namedview22"
showgrid="false"
inkscape:zoom="2.0884956"
inkscape:cx="54.5"
inkscape:cy="56.5"
inkscape:window-x="70"
inkscape:window-y="0"
inkscape:window-maximized="0"
inkscape:current-layer="svg20" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path2"
style="fill:url(#paint0_linear);stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path4"
style="fill:url(#paint1_linear);fill-opacity:0.2;stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 26.89694,1.0634102 c 1.634986,-2.05918508 4.950125,-0.93090008 4.989521,1.698149 L 32.138899,41.214003 H 6.607076 c -4.6832501,0 -7.29518376,-5.409032 -4.3830007,-9.07673 z"
id="path6"
inkscape:connector-curvature="0"
style="fill:#3ecf8e;stroke-width:0.57177335" />
<defs
id="defs18">
<linearGradient
id="paint0_linear"
x1="53.973801"
y1="54.973999"
x2="94.163498"
y2="71.829498"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
stop-color="#249361"
id="stop8" />
<stop
offset="1"
stop-color="#3ECF8E"
id="stop10" />
</linearGradient>
<linearGradient
id="paint1_linear"
x1="36.1558"
y1="30.577999"
x2="54.484402"
y2="65.080597"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
id="stop13" />
<stop
offset="1"
stop-opacity="0"
id="stop15" />
</linearGradient>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 3.2 KiB

View File

@@ -4,7 +4,6 @@ import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import AppLoading from 'components/AppLoading/AppLoading';
import KBarCommandPalette from 'components/KBarCommandPalette/KBarCommandPalette';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
@@ -26,8 +25,6 @@ import { useAppContext } from 'providers/App/App';
import { IUser } from 'providers/App/types';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import { KBarCommandPaletteProvider } from 'providers/KBarCommandPaletteProvider';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
@@ -371,44 +368,39 @@ function App(): JSX.Element {
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<KBarCommandPaletteProvider>
<UserpilotRouteTracker />
<KBarCommandPalette />
<NotificationProvider>
<ErrorModalProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</PreferenceContextProvider>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
</ErrorModalProvider>
</NotificationProvider>
</KBarCommandPaletteProvider>
<UserpilotRouteTracker />
<NotificationProvider>
<ErrorModalProvider>
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>
</ErrorModalProvider>
</NotificationProvider>
</CompatRouter>
</Router>
</ConfigProvider>

View File

@@ -2,12 +2,14 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import { LOCALSTORAGE } from 'constants/localStorage';
const afterLogin = (
userId: string,
authToken: string,
refreshToken: string,
interceptorRejected?: boolean,
): void => {
setLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN, authToken);
setLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN, refreshToken);
setLocalStorageApi(LOCALSTORAGE.USER_ID, userId);
setLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN, 'true');
if (!interceptorRejected) {
@@ -16,6 +18,7 @@ const afterLogin = (
detail: {
accessJWT: authToken,
refreshJWT: refreshToken,
id: userId,
},
}),
);

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
AlertRuleV2,
PostableAlertRuleV2,
} from 'types/api/alerts/alertTypesV2';
export interface CreateAlertRuleResponse {
data: AlertRuleV2;
status: string;
}
const createAlertRule = async (
props: PostableAlertRuleV2,
): Promise<SuccessResponse<CreateAlertRuleResponse> | ErrorResponse> => {
const response = await axios.post(`/rules`, {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default createAlertRule;

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PostableAlertRuleV2 } from 'types/api/alerts/alertTypesV2';
export interface TestAlertRuleResponse {
data: {
alertCount: number;
message: string;
};
status: string;
}
const testAlertRule = async (
props: PostableAlertRuleV2,
): Promise<SuccessResponse<TestAlertRuleResponse> | ErrorResponse> => {
const response = await axios.post(`/testRule`, {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default testAlertRule;

View File

@@ -1,26 +0,0 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PostableAlertRuleV2 } from 'types/api/alerts/alertTypesV2';
export interface UpdateAlertRuleResponse {
data: string;
status: string;
}
const updateAlertRule = async (
id: string,
postableAlertRule: PostableAlertRuleV2,
): Promise<SuccessResponse<UpdateAlertRuleResponse> | ErrorResponse> => {
const response = await axios.put(`/rules/${id}`, {
...postableAlertRule,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateAlertRule;

View File

@@ -0,0 +1,62 @@
import getLocalStorageApi from 'api/browser/localstorage/get';
import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage';
import { isEmpty } from 'lodash-es';
export interface WsDataEvent {
read_rows: number;
read_bytes: number;
elapsed_ms: number;
}
interface GetQueryStatsProps {
queryId: string;
setData: React.Dispatch<React.SetStateAction<WsDataEvent | undefined>>;
}
function getURL(baseURL: string, queryId: string): URL | string {
if (baseURL && !isEmpty(baseURL)) {
return `${baseURL}/ws/query_progress?q=${queryId}`;
}
const url = new URL(`/ws/query_progress?q=${queryId}`, window.location.href);
if (window.location.protocol === 'http:') {
url.protocol = 'ws';
} else {
url.protocol = 'wss';
}
return url;
}
export function getQueryStats(props: GetQueryStatsProps): void {
const { queryId, setData } = props;
const token = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) || '';
// https://github.com/whatwg/websockets/issues/20 reason for not using the relative URLs
const url = getURL(ENVIRONMENT.wsURL, queryId);
const socket = new WebSocket(url, token);
socket.addEventListener('message', (event) => {
try {
const parsedData = JSON.parse(event?.data);
setData(parsedData);
} catch {
setData(event?.data);
}
});
socket.addEventListener('error', (event) => {
console.error(event);
});
socket.addEventListener('close', (event) => {
// 1000 is a normal closure status code
if (event.code !== 1000) {
console.error('WebSocket closed with error:', event);
} else {
console.error('WebSocket closed normally.');
}
});
}

View File

@@ -1,115 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { ApiBaseInstance } from 'api';
import { getFieldKeys } from '../getFieldKeys';
// Mock the API instance
jest.mock('api', () => ({
ApiBaseInstance: {
get: jest.fn(),
},
}));
describe('getFieldKeys API', () => {
beforeEach(() => {
jest.clearAllMocks();
});
const mockSuccessResponse = {
status: 200,
data: {
status: 'success',
data: {
keys: {
'service.name': [],
'http.status_code': [],
},
complete: true,
},
},
};
it('should call API with correct parameters when no args provided', async () => {
// Mock successful API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call function with no parameters
await getFieldKeys();
// Verify API was called correctly with empty params object
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: {},
});
});
it('should call API with signal parameter when provided', async () => {
// Mock successful API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call function with signal parameter
await getFieldKeys('traces');
// Verify API was called with signal parameter
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { signal: 'traces' },
});
});
it('should call API with name parameter when provided', async () => {
// Mock successful API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
data: {
keys: { service: [] },
complete: false,
},
},
});
// Call function with name parameter
await getFieldKeys(undefined, 'service');
// Verify API was called with name parameter
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { name: 'service' },
});
});
it('should call API with both signal and name when provided', async () => {
// Mock successful API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
data: {
keys: { service: [] },
complete: false,
},
},
});
// Call function with both parameters
await getFieldKeys('logs', 'service');
// Verify API was called with both parameters
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { signal: 'logs', name: 'service' },
});
});
it('should return properly formatted response', async () => {
// Mock API to return our response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call the function
const result = await getFieldKeys('traces');
// Verify the returned structure matches SuccessResponseV2 format
expect(result).toEqual({
httpStatusCode: 200,
data: mockSuccessResponse.data.data,
});
});
});

View File

@@ -1,214 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { ApiBaseInstance } from 'api';
import { getFieldValues } from '../getFieldValues';
// Mock the API instance
jest.mock('api', () => ({
ApiBaseInstance: {
get: jest.fn(),
},
}));
describe('getFieldValues API', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should call the API with correct parameters (no options)', async () => {
// Mock API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
data: {
values: {
stringValues: ['frontend', 'backend'],
},
complete: true,
},
},
});
// Call function without parameters
await getFieldValues();
// Verify API was called correctly with empty params
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: {},
});
});
it('should call the API with signal parameter', async () => {
// Mock API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
data: {
values: {
stringValues: ['frontend', 'backend'],
},
complete: true,
},
},
});
// Call function with signal parameter
await getFieldValues('traces');
// Verify API was called with signal parameter
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { signal: 'traces' },
});
});
it('should call the API with name parameter', async () => {
// Mock API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
data: {
values: {
stringValues: ['frontend', 'backend'],
},
complete: true,
},
},
});
// Call function with name parameter
await getFieldValues(undefined, 'service.name');
// Verify API was called with name parameter
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { name: 'service.name' },
});
});
it('should call the API with value parameter', async () => {
// Mock API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
data: {
values: {
stringValues: ['frontend'],
},
complete: false,
},
},
});
// Call function with value parameter
await getFieldValues(undefined, 'service.name', 'front');
// Verify API was called with value parameter
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { name: 'service.name', searchText: 'front' },
});
});
it('should call the API with time range parameters', async () => {
// Mock API response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
data: {
values: {
stringValues: ['frontend', 'backend'],
},
complete: true,
},
},
});
// Call function with time range parameters
const startUnixMilli = 1625097600000000; // Note: nanoseconds
const endUnixMilli = 1625184000000000;
await getFieldValues(
'logs',
'service.name',
undefined,
startUnixMilli,
endUnixMilli,
);
// Verify API was called with time range parameters (converted to milliseconds)
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: {
signal: 'logs',
name: 'service.name',
startUnixMilli: '1625097600', // Should be converted to seconds (divided by 1000000)
endUnixMilli: '1625184000', // Should be converted to seconds (divided by 1000000)
},
});
});
it('should normalize the response values', async () => {
// Mock API response with multiple value types
const mockResponse = {
status: 200,
data: {
status: 'success',
data: {
values: {
stringValues: ['frontend', 'backend'],
numberValues: [200, 404],
boolValues: [true, false],
},
complete: true,
},
},
};
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
// Call the function
const result = await getFieldValues('traces', 'mixed.values');
// Verify the response has normalized values array
expect(result.data?.normalizedValues).toContain('frontend');
expect(result.data?.normalizedValues).toContain('backend');
expect(result.data?.normalizedValues).toContain('200');
expect(result.data?.normalizedValues).toContain('404');
expect(result.data?.normalizedValues).toContain('true');
expect(result.data?.normalizedValues).toContain('false');
expect(result.data?.normalizedValues?.length).toBe(6);
});
it('should return a properly formatted success response', async () => {
// Create mock response
const mockApiResponse = {
status: 200,
data: {
status: 'success',
data: {
values: {
stringValues: ['frontend', 'backend'],
},
complete: true,
},
},
};
// Mock API to return our response
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
// Call the function
const result = await getFieldValues('traces', 'service.name');
// Verify the returned structure matches SuccessResponseV2 format
expect(result).toEqual({
httpStatusCode: 200,
data: expect.objectContaining({
values: expect.any(Object),
normalizedValues: expect.any(Array),
complete: true,
}),
});
});
});

View File

@@ -1,7 +1,5 @@
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { FieldKeyResponse } from 'types/api/dynamicVariables/getFieldKeys';
/**
@@ -12,27 +10,25 @@ import { FieldKeyResponse } from 'types/api/dynamicVariables/getFieldKeys';
export const getFieldKeys = async (
signal?: 'traces' | 'logs' | 'metrics',
name?: string,
): Promise<SuccessResponseV2<FieldKeyResponse>> => {
): Promise<SuccessResponse<FieldKeyResponse> | ErrorResponse> => {
const params: Record<string, string> = {};
if (signal) {
params.signal = encodeURIComponent(signal);
params.signal = signal;
}
if (name) {
params.name = encodeURIComponent(name);
params.name = name;
}
try {
const response = await ApiBaseInstance.get('/fields/keys', { params });
const response = await ApiBaseInstance.get('/fields/keys', { params });
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default getFieldKeys;

View File

@@ -1,8 +1,5 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
/**
@@ -10,28 +7,26 @@ import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
* @param signal Type of signal (traces, logs, metrics)
* @param name Name of the attribute for which values are being fetched
* @param value Optional search text
* @param existingQuery Optional existing query - across all present dynamic variables
*/
export const getFieldValues = async (
signal?: 'traces' | 'logs' | 'metrics',
name?: string,
searchText?: string,
value?: string,
startUnixMilli?: number,
endUnixMilli?: number,
existingQuery?: string,
): Promise<SuccessResponseV2<FieldValueResponse>> => {
): Promise<SuccessResponse<FieldValueResponse> | ErrorResponse> => {
const params: Record<string, string> = {};
if (signal) {
params.signal = encodeURIComponent(signal);
params.signal = signal;
}
if (name) {
params.name = encodeURIComponent(name);
params.name = name;
}
if (searchText) {
params.searchText = encodeURIComponent(searchText);
if (value) {
params.value = value;
}
if (startUnixMilli) {
@@ -42,46 +37,27 @@ export const getFieldValues = async (
params.endUnixMilli = Math.floor(endUnixMilli / 1000000).toString();
}
if (existingQuery) {
params.existingQuery = existingQuery;
}
const response = await ApiBaseInstance.get('/fields/values', { params });
try {
const response = await ApiBaseInstance.get('/fields/values', { params });
// Normalize values from different types (stringValues, boolValues, etc.)
if (response.data?.data?.values) {
const allValues: string[] = [];
Object.entries(response.data?.data?.values).forEach(
([key, valueArray]: [string, any]) => {
// Skip RelatedValues as they should be kept separate
if (key === 'relatedValues') {
return;
}
if (Array.isArray(valueArray)) {
allValues.push(...valueArray.map(String));
}
},
);
// Add a normalized values array to the response
response.data.data.normalizedValues = allValues;
// Add relatedValues to the response as per FieldValueResponse
if (response.data?.data?.values?.relatedValues) {
response.data.data.relatedValues =
response.data?.data?.values?.relatedValues;
// Normalize values from different types (stringValues, boolValues, etc.)
if (response.data?.data?.values) {
const allValues: string[] = [];
Object.values(response.data.data.values).forEach((valueArray: any) => {
if (Array.isArray(valueArray)) {
allValues.push(...valueArray.map(String));
}
}
});
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
// Add a normalized values array to the response
response.data.data.normalizedValues = allValues;
}
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default getFieldValues;

View File

@@ -2,7 +2,7 @@
/* eslint-disable no-param-reassign */
/* eslint-disable @typescript-eslint/no-explicit-any */
import getLocalStorageApi from 'api/browser/localstorage/get';
import post from 'api/v2/sessions/rotate/post';
import loginApi from 'api/v1/login/login';
import afterLogin from 'AppRoutes/utils';
import axios, {
AxiosError,
@@ -12,7 +12,6 @@ import axios, {
import { ENVIRONMENT } from 'constants/env';
import { Events } from 'constants/events';
import { LOCALSTORAGE } from 'constants/localStorage';
import { QueryClient } from 'react-query';
import { eventEmitter } from 'utils/getEventEmitter';
import apiV1, {
@@ -27,14 +26,6 @@ import apiV1, {
import { Logout } from './utils';
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
const queryClient = new QueryClient({
defaultOptions: {
queries: {
refetchOnWindowFocus: false,
retry: false,
},
},
});
const interceptorsResponse = (
value: AxiosResponse<any>,
@@ -83,24 +74,19 @@ const interceptorRejected = async (
try {
if (axios.isAxiosError(value) && value.response) {
const { response } = value;
if (
response.status === 401 &&
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
response.config.url !== '/sessions/rotate' &&
!(
response.config.url === '/sessions' && response.config.method === 'delete'
)
) {
// reject the refresh token error
if (response.status === 401 && response.config.url !== '/login') {
try {
const accessToken = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN);
const refreshToken = getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN);
const response = await queryClient.fetchQuery({
queryFn: () => post({ refreshToken: refreshToken || '' }),
queryKey: ['/api/v2/sessions/rotate', accessToken, refreshToken],
const response = await loginApi({
refreshToken: getLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN) || '',
});
afterLogin(response.data.accessToken, response.data.refreshToken, true);
afterLogin(
response.data.userId,
response.data.accessJwt,
response.data.refreshJwt,
true,
);
try {
const reResponse = await axios(
@@ -109,7 +95,7 @@ const interceptorRejected = async (
method: value.config.method,
headers: {
...value.config.headers,
Authorization: `Bearer ${response.data.accessToken}`,
Authorization: `Bearer ${response.data.accessJwt}`,
},
data: {
...JSON.parse(value.config.data || '{}'),
@@ -127,8 +113,8 @@ const interceptorRejected = async (
Logout();
}
}
if (response.status === 401 && response.config.url === '/sessions/rotate') {
// when refresh token is expired
if (response.status === 401 && response.config.url === '/login') {
Logout();
}
}

View File

@@ -1,34 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
export interface CreateRoutingPolicyBody {
name: string;
expression: string;
channels: string[];
description?: string;
}
export interface CreateRoutingPolicyResponse {
success: boolean;
message: string;
}
const createRoutingPolicy = async (
props: CreateRoutingPolicyBody,
): Promise<
SuccessResponseV2<CreateRoutingPolicyResponse> | ErrorResponseV2
> => {
try {
const response = await axios.post(`/route_policies`, props);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default createRoutingPolicy;

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
export interface DeleteRoutingPolicyResponse {
success: boolean;
message: string;
}
const deleteRoutingPolicy = async (
routingPolicyId: string,
): Promise<
SuccessResponseV2<DeleteRoutingPolicyResponse> | ErrorResponseV2
> => {
try {
const response = await axios.delete(`/route_policies/${routingPolicyId}`);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default deleteRoutingPolicy;

View File

@@ -1,40 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
export interface ApiRoutingPolicy {
id: string;
name: string;
expression: string;
description: string;
channels: string[];
createdAt: string;
updatedAt: string;
createdBy: string;
updatedBy: string;
}
export interface GetRoutingPoliciesResponse {
status: string;
data?: ApiRoutingPolicy[];
}
export const getRoutingPolicies = async (
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponseV2<GetRoutingPoliciesResponse> | ErrorResponseV2> => {
try {
const response = await axios.get('/route_policies', {
signal,
headers,
});
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};

View File

@@ -1,38 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorResponseV2, ErrorV2Resp, SuccessResponseV2 } from 'types/api';
export interface UpdateRoutingPolicyBody {
name: string;
expression: string;
channels: string[];
description: string;
}
export interface UpdateRoutingPolicyResponse {
success: boolean;
message: string;
}
const updateRoutingPolicy = async (
id: string,
props: UpdateRoutingPolicyBody,
): Promise<
SuccessResponseV2<UpdateRoutingPolicyResponse> | ErrorResponseV2
> => {
try {
const response = await axios.put(`/route_policies/${id}`, {
...props,
});
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default updateRoutingPolicy;

View File

@@ -1,25 +0,0 @@
import { ApiV2Instance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps } from 'types/api/settings/getRetention';
// Only works for logs
const getRetentionV2 = async (): Promise<
SuccessResponseV2<PayloadProps<'logs'>>
> => {
try {
const response = await ApiV2Instance.get<PayloadProps<'logs'>>(
`/settings/ttl`,
);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getRetentionV2;

View File

@@ -1,14 +1,14 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadPropsV2, Props } from 'types/api/settings/setRetention';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/settings/setRetention';
const setRetention = async (
props: Props,
): Promise<SuccessResponseV2<PayloadPropsV2>> => {
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.post<PayloadPropsV2>(
const response = await axios.post<PayloadProps>(
`/settings/ttl?duration=${props.totalDuration}&type=${props.type}${
props.coldStorage
? `&coldStorage=${props.coldStorage}&toColdDuration=${props.toColdDuration}`
@@ -17,11 +17,13 @@ const setRetention = async (
);
return {
httpStatusCode: response.status,
data: response.data,
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -1,32 +0,0 @@
import { ApiV2Instance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadPropsV2, PropsV2 } from 'types/api/settings/setRetention';
const setRetentionV2 = async ({
type,
defaultTTLDays,
coldStorageVolume,
coldStorageDuration,
ttlConditions,
}: PropsV2): Promise<SuccessResponseV2<PayloadPropsV2>> => {
try {
const response = await ApiV2Instance.post<PayloadPropsV2>(`/settings/ttl`, {
type,
defaultTTLDays,
coldStorageVolume,
coldStorageDuration,
ttlConditions,
});
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default setRetentionV2;

View File

@@ -1,31 +0,0 @@
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/thirdPartyApis/listOverview';
const listOverview = async (
props: Props,
): Promise<SuccessResponseV2<PayloadProps>> => {
const { start, end, show_ip: showIp, filter } = props;
try {
const response = await ApiBaseInstance.post(
`/third-party-apis/overview/list`,
{
start,
end,
show_ip: showIp,
filter,
},
);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default listOverview;

View File

@@ -3,15 +3,7 @@ import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import history from 'lib/history';
import deleteSession from './v2/sessions/delete';
export const Logout = async (): Promise<void> => {
try {
await deleteSession();
} catch (error) {
console.error(error);
}
export const Logout = (): void => {
deleteLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN);
deleteLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN);
deleteLocalStorageKey(LOCALSTORAGE.IS_IDENTIFIED_USER);
@@ -22,6 +14,7 @@ export const Logout = async (): Promise<void> => {
deleteLocalStorageKey(LOCALSTORAGE.USER_ID);
deleteLocalStorageKey(LOCALSTORAGE.QUICK_FILTERS_SETTINGS_ANNOUNCEMENT);
window.dispatchEvent(new CustomEvent('LOGOUT'));
history.push(ROUTES.LOGIN);
};

View File

@@ -2,19 +2,20 @@ import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Info } from 'types/api/v1/version/get';
import { AuthDomain } from 'types/api/SAML/listDomain';
import { PayloadProps, Props } from 'types/api/SAML/postDomain';
const get = async (): Promise<SuccessResponseV2<Info>> => {
const create = async (props: Props): Promise<SuccessResponseV2<AuthDomain>> => {
try {
const response = await axios.get<Info>(`/version`);
const response = await axios.post<PayloadProps>(`/domains`, props);
return {
httpStatusCode: response.status,
data: response.data,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default get;
export default create;

View File

@@ -2,10 +2,11 @@ import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/SAML/deleteDomain';
const deleteDomain = async (id: string): Promise<SuccessResponseV2<null>> => {
const deleteDomain = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete<null>(`/domains/${id}`);
const response = await axios.delete<PayloadProps>(`/domains/${props.id}`);
return {
httpStatusCode: response.status,

View File

@@ -1,25 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { UpdatableAuthDomain } from 'types/api/v1/domains/put';
const put = async (
props: UpdatableAuthDomain,
): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.put<RawSuccessResponse<null>>(
`/domains/${props.id}`,
{ config: props.config },
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default put;

View File

@@ -1,16 +1,12 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { GettableAuthDomain } from 'types/api/v1/domains/list';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { AuthDomain, PayloadProps } from 'types/api/SAML/listDomain';
const listAllDomain = async (): Promise<
SuccessResponseV2<GettableAuthDomain[]>
> => {
const listAllDomain = async (): Promise<SuccessResponseV2<AuthDomain[]>> => {
try {
const response = await axios.get<RawSuccessResponse<GettableAuthDomain[]>>(
`/domains`,
);
const response = await axios.get<PayloadProps>(`/domains`);
return {
httpStatusCode: response.status,

View File

@@ -1,26 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { GettableAuthDomain } from 'types/api/v1/domains/list';
import { PostableAuthDomain } from 'types/api/v1/domains/post';
const post = async (
props: PostableAuthDomain,
): Promise<SuccessResponseV2<GettableAuthDomain>> => {
try {
const response = await axios.post<RawSuccessResponse<GettableAuthDomain>>(
`/domains`,
props,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default post;

View File

@@ -0,0 +1,23 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { AuthDomain } from 'types/api/SAML/listDomain';
import { PayloadProps, Props } from 'types/api/SAML/updateDomain';
const updateDomain = async (
props: Props,
): Promise<SuccessResponseV2<AuthDomain>> => {
try {
const response = await axios.put<PayloadProps>(`/domains/${props.id}`, props);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default updateDomain;

View File

@@ -1,64 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import { ExportRawDataProps } from 'types/api/exportRawData/getExportRawData';
export const downloadExportData = async (
props: ExportRawDataProps,
): Promise<void> => {
try {
const queryParams = new URLSearchParams();
queryParams.append('start', String(props.start));
queryParams.append('end', String(props.end));
queryParams.append('filter', props.filter);
props.columns.forEach((col) => {
queryParams.append('columns', col);
});
queryParams.append('order_by', props.orderBy);
queryParams.append('limit', String(props.limit));
queryParams.append('format', props.format);
const response = await axios.get<Blob>(`export_raw_data?${queryParams}`, {
responseType: 'blob', // Important: tell axios to handle response as blob
decompress: true, // Enable automatic decompression
headers: {
Accept: 'application/octet-stream', // Tell server we expect binary data
},
timeout: 0,
});
// Only proceed if the response status is 200
if (response.status !== 200) {
throw new Error(
`Failed to download data: server returned status ${response.status}`,
);
}
// Create blob URL from response data
const blob = new Blob([response.data], { type: 'application/octet-stream' });
const url = window.URL.createObjectURL(blob);
// Create and configure download link
const link = document.createElement('a');
link.href = url;
// Get filename from Content-Disposition header or generate timestamped default
const filename =
response.headers['content-disposition']
?.split('filename=')[1]
?.replace(/["']/g, '') || `exported_data.${props.format || 'txt'}`;
link.setAttribute('download', filename);
// Trigger download
document.body.appendChild(link);
link.click();
link.remove();
URL.revokeObjectURL(url);
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default downloadExportData;

View File

@@ -2,12 +2,15 @@ import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/accept';
import { UserResponse } from 'types/api/user/getUser';
import {
LoginPrecheckResponse,
PayloadProps,
Props,
} from 'types/api/user/accept';
const accept = async (
props: Props,
): Promise<SuccessResponseV2<UserResponse>> => {
): Promise<SuccessResponseV2<LoginPrecheckResponse>> => {
try {
const response = await axios.post<PayloadProps>(`/invite/accept`, props);
return {

View File

@@ -2,11 +2,15 @@ import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, UserResponse } from 'types/api/user/getUser';
import { PayloadProps, Props, UserLoginResponse } from 'types/api/user/login';
const get = async (): Promise<SuccessResponseV2<UserResponse>> => {
const login = async (
props: Props,
): Promise<SuccessResponseV2<UserLoginResponse>> => {
try {
const response = await axios.get<PayloadProps>(`/user/me`);
const response = await axios.post<PayloadProps>(`/login`, {
...props,
});
return {
httpStatusCode: response.status,
@@ -17,4 +21,4 @@ const get = async (): Promise<SuccessResponseV2<UserResponse>> => {
}
};
export default get;
export default login;

View File

@@ -0,0 +1,28 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/loginPrecheck';
const loginPrecheck = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(
`/loginPrecheck?email=${encodeURIComponent(
props.email,
)}&ref=${encodeURIComponent(window.location.href)}`,
);
return {
statusCode: 200,
error: null,
message: response.statusText,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default loginPrecheck;

View File

@@ -1,27 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { Props } from 'types/api/user/signup';
import { SignupResponse } from 'types/api/v1/register/post';
const post = async (
props: Props,
): Promise<SuccessResponseV2<SignupResponse>> => {
try {
const response = await axios.post<RawSuccessResponse<SignupResponse>>(
`/register`,
{
...props,
},
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default post;

View File

@@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps } from 'types/api/user/loginPrecheck';
import { Props } from 'types/api/user/signup';
const signup = async (
props: Props,
): Promise<SuccessResponse<null | PayloadProps> | ErrorResponse> => {
try {
const response = await axios.post(`/register`, {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data?.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default signup;

View File

@@ -0,0 +1,25 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { getVersion } from 'constants/api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps } from 'types/api/user/getVersion';
const getVersionApi = async (): Promise<
SuccessResponse<PayloadProps> | ErrorResponse
> => {
try {
const response = await axios.get(`/${getVersion}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getVersionApi;

View File

@@ -1,27 +0,0 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { Props, SessionsContext } from 'types/api/v2/sessions/context/get';
const get = async (
props: Props,
): Promise<SuccessResponseV2<SessionsContext>> => {
try {
const response = await axios.get<RawSuccessResponse<SessionsContext>>(
'/sessions/context',
{
params: props,
},
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default get;

View File

@@ -1,19 +0,0 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
const deleteSession = async (): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete<RawSuccessResponse<null>>('/sessions');
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default deleteSession;

View File

@@ -1,23 +0,0 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { Props, Token } from 'types/api/v2/sessions/email_password/post';
const post = async (props: Props): Promise<SuccessResponseV2<Token>> => {
try {
const response = await axios.post<RawSuccessResponse<Token>>(
'/sessions/email_password',
props,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default post;

View File

@@ -1,23 +0,0 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { Props, Token } from 'types/api/v2/sessions/rotate/post';
const post = async (props: Props): Promise<SuccessResponseV2<Token>> => {
try {
const response = await axios.post<RawSuccessResponse<Token>>(
'/sessions/rotate',
props,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default post;

View File

@@ -124,7 +124,7 @@ export const FUNCTION_NAMES: Record<string, FunctionName> = {
RUNNING_DIFF: 'runningDiff',
LOG2: 'log2',
LOG10: 'log10',
CUM_SUM: 'cumulativeSum',
CUM_SUM: 'cumSum',
EWMA3: 'ewma3',
EWMA5: 'ewma5',
EWMA7: 'ewma7',

View File

@@ -1,284 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { SuccessResponse } from 'types/api';
import {
MetricRangePayloadV5,
QueryBuilderFormula,
QueryRangeRequestV5,
QueryRangeResponseV5,
RequestType,
ScalarData,
TelemetryFieldKey,
TimeSeries,
TimeSeriesData,
TimeSeriesValue,
} from 'types/api/v5/queryRange';
import { convertV5ResponseToLegacy } from './convertV5Response';
describe('convertV5ResponseToLegacy', () => {
function makeBaseSuccess<T>(
payload: T,
params: QueryRangeRequestV5,
): SuccessResponse<T, QueryRangeRequestV5> {
return {
statusCode: 200,
message: 'success',
payload,
error: null,
params,
};
}
function makeBaseParams(
requestType: RequestType,
queries: QueryRangeRequestV5['compositeQuery']['queries'],
): QueryRangeRequestV5 {
return {
schemaVersion: 'v1',
start: 1,
end: 2,
requestType,
compositeQuery: { queries },
variables: {},
formatOptions: { formatTableResultForUI: false, fillGaps: false },
};
}
it('converts time_series response into legacy series structure', () => {
const timeSeries: TimeSeriesData = {
queryName: 'A',
aggregations: [
{
index: 0,
alias: '__result_0',
meta: {},
series: [
({
labels: [
{
key: ({ name: 'service.name' } as unknown) as TelemetryFieldKey,
value: 'adservice',
},
],
values: [
({ timestamp: 1000, value: 10 } as unknown) as TimeSeriesValue,
({ timestamp: 2000, value: 12 } as unknown) as TimeSeriesValue,
],
} as unknown) as TimeSeries,
],
},
],
};
const v5Data: QueryRangeResponseV5 = {
type: 'time_series',
data: { results: [timeSeries] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('time_series', [
{
type: 'builder_query',
spec: {
name: 'A',
signal: 'traces',
stepInterval: 60,
disabled: false,
aggregations: [{ expression: 'count()' }],
},
},
]);
const input: SuccessResponse<
MetricRangePayloadV5,
QueryRangeRequestV5
> = makeBaseSuccess({ data: v5Data }, params);
const legendMap = { A: '{{service.name}}' };
const result = convertV5ResponseToLegacy(input, legendMap, false);
expect(result.payload.data.resultType).toBe('time_series');
expect(result.payload.data.result).toHaveLength(1);
const q = result.payload.data.result[0];
expect(q.queryName).toBe('A');
expect(q.legend).toBe('{{service.name}}');
expect(q.series?.[0]).toEqual(
expect.objectContaining({
labels: { 'service.name': 'adservice' },
values: [
{ timestamp: 1000, value: '10' },
{ timestamp: 2000, value: '12' },
],
metaData: expect.objectContaining({
alias: '__result_0',
index: 0,
queryName: 'A',
}),
}),
);
});
it('converts scalar to legacy table (formatForWeb=false) with names/ids resolved from aggregations', () => {
const scalar: ScalarData = {
columns: [
// group column
({
name: 'service.name',
queryName: 'A',
aggregationIndex: 0,
columnType: 'group',
} as unknown) as ScalarData['columns'][number],
// aggregation 0
({
name: '__result_0',
queryName: 'A',
aggregationIndex: 0,
columnType: 'aggregation',
} as unknown) as ScalarData['columns'][number],
// aggregation 1
({
name: '__result_1',
queryName: 'A',
aggregationIndex: 1,
columnType: 'aggregation',
} as unknown) as ScalarData['columns'][number],
// formula F1
({
name: '__result',
queryName: 'F1',
aggregationIndex: 0,
columnType: 'aggregation',
} as unknown) as ScalarData['columns'][number],
],
data: [['adservice', 606, 1.452, 151.5]],
};
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('scalar', [
{
type: 'builder_query',
spec: {
name: 'A',
signal: 'traces',
stepInterval: 60,
disabled: false,
aggregations: [
{ expression: 'count()' },
{ expression: 'avg(app.ads.count)', alias: 'avg' },
],
},
},
{
type: 'builder_formula',
spec: ({
name: 'F1',
expression: 'A * 0.25',
} as unknown) as QueryBuilderFormula,
},
]);
const input: SuccessResponse<
MetricRangePayloadV5,
QueryRangeRequestV5
> = makeBaseSuccess({ data: v5Data }, params);
const legendMap = { A: '{{service.name}}', F1: '' };
const result = convertV5ResponseToLegacy(input, legendMap, false);
expect(result.payload.data.resultType).toBe('scalar');
const [tableEntry] = result.payload.data.result;
expect(tableEntry.table?.columns).toEqual([
{
name: 'service.name',
queryName: 'A',
isValueColumn: false,
id: 'service.name',
},
{ name: 'count()', queryName: 'A', isValueColumn: true, id: 'A.count()' },
{
name: 'avg',
queryName: 'A',
isValueColumn: true,
id: 'A.avg(app.ads.count)',
},
{ name: 'F1', queryName: 'F1', isValueColumn: true, id: 'F1' },
]);
expect(tableEntry.table?.rows?.[0]).toEqual({
data: {
'service.name': 'adservice',
'A.count()': 606,
'A.avg(app.ads.count)': 1.452,
F1: 151.5,
},
});
});
it('converts scalar with formatForWeb=true to UI-friendly table', () => {
const scalar: ScalarData = {
columns: [
{
name: 'service.name',
queryName: 'A',
aggregationIndex: 0,
columnType: 'group',
} as any,
{
name: '__result_0',
queryName: 'A',
aggregationIndex: 0,
columnType: 'aggregation',
} as any,
],
data: [['adservice', 580]],
};
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('scalar', [
{
type: 'builder_query',
spec: {
name: 'A',
signal: 'traces',
stepInterval: 60,
disabled: false,
aggregations: [{ expression: 'count()' }],
},
},
]);
const input: SuccessResponse<
MetricRangePayloadV5,
QueryRangeRequestV5
> = makeBaseSuccess({ data: v5Data }, params);
const legendMap = { A: '{{service.name}}' };
const result = convertV5ResponseToLegacy(input, legendMap, true);
expect(result.payload.data.resultType).toBe('scalar');
const [tableEntry] = result.payload.data.result;
expect(tableEntry.table?.columns).toEqual([
{
name: 'service.name',
queryName: 'A',
isValueColumn: false,
id: 'service.name',
},
// Single aggregation: name resolves to legend, id resolves to queryName
{ name: '{{service.name}}', queryName: 'A', isValueColumn: true, id: 'A' },
]);
expect(tableEntry.table?.rows?.[0]).toEqual({
data: {
'service.name': 'adservice',
A: 580,
},
});
});
});

View File

@@ -1,893 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string, simple-import-sort/imports, @typescript-eslint/indent, no-mixed-spaces-and-tabs */
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
IBuilderFormula,
IBuilderQuery,
} from 'types/api/queryBuilder/queryBuilderData';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import {
ClickHouseQuery,
LogAggregation,
LogBuilderQuery,
MetricBuilderQuery,
PromQuery,
QueryBuilderFormula as V5QueryBuilderFormula,
QueryEnvelope,
QueryRangePayloadV5,
} from 'types/api/v5/queryRange';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { prepareQueryRangePayloadV5 } from './prepareQueryRangePayloadV5';
jest.mock('lib/getStartEndRangeTime', () => ({
__esModule: true,
default: jest.fn(() => ({ start: '100', end: '200' })),
}));
describe('prepareQueryRangePayloadV5', () => {
const start = 1_710_000_000; // seconds
const end = 1_710_000_600; // seconds
const baseBuilderQuery = (
overrides?: Partial<IBuilderQuery>,
): IBuilderQuery => ({
queryName: 'A',
dataSource: DataSource.METRICS,
aggregations: [
{
metricName: 'cpu_usage',
temporality: '',
timeAggregation: 'sum',
spaceAggregation: 'avg',
reduceTo: 'avg',
},
],
timeAggregation: 'sum',
spaceAggregation: 'avg',
temporality: '',
functions: [
{
name: 'timeShift',
args: [{ value: '5m' }],
},
],
filter: { expression: '' },
filters: { items: [], op: 'AND' },
groupBy: [],
expression: 'A',
disabled: false,
having: [],
limit: null,
stepInterval: 600,
orderBy: [],
reduceTo: 'avg',
legend: 'Legend A',
...overrides,
});
const baseFormula = (
overrides?: Partial<IBuilderFormula>,
): IBuilderFormula => ({
expression: 'A + 1',
disabled: false,
queryName: 'F1',
legend: 'Formula Legend',
limit: undefined,
having: [],
stepInterval: undefined,
orderBy: [],
...overrides,
});
it('builds payload for builder queries with formulas and variables', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q1',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [baseBuilderQuery()],
queryFormulas: [baseFormula()],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.TIME_SERIES,
selectedTime: 'GLOBAL_TIME',
start,
end,
variables: { svc: 'api', count: 5, flag: true },
fillGaps: true,
};
const result = prepareQueryRangePayloadV5(props);
expect(result).toEqual(
expect.objectContaining({
legendMap: { A: 'Legend A', F1: 'Formula Legend' },
queryPayload: expect.objectContaining({
compositeQuery: expect.objectContaining({
queries: expect.arrayContaining([
expect.objectContaining({
type: 'builder_query',
spec: expect.objectContaining({
name: 'A',
signal: 'metrics',
stepInterval: 600,
functions: [{ name: 'timeShift', args: [{ value: '5m' }] }],
aggregations: [
expect.objectContaining({
metricName: 'cpu_usage',
timeAggregation: 'sum',
spaceAggregation: 'avg',
reduceTo: undefined,
}),
],
}),
}),
expect.objectContaining({
type: 'builder_formula',
spec: expect.objectContaining({
name: 'F1',
expression: 'A + 1',
legend: 'Formula Legend',
}),
}),
]),
}),
requestType: 'time_series',
formatOptions: expect.objectContaining({
formatTableResultForUI: false,
fillGaps: true,
}),
start: start * 1000,
end: end * 1000,
variables: expect.objectContaining({
svc: { value: 'api' },
count: { value: 5 },
flag: { value: true },
}),
}),
}),
);
// Legend map combines builder and formulas
expect(result.legendMap).toEqual({ A: 'Legend A', F1: 'Formula Legend' });
const payload: QueryRangePayloadV5 = result.queryPayload;
expect(payload.schemaVersion).toBe('v1');
expect(payload.start).toBe(start * 1000);
expect(payload.end).toBe(end * 1000);
expect(payload.requestType).toBe('time_series');
expect(payload.formatOptions?.formatTableResultForUI).toBe(false);
expect(payload.formatOptions?.fillGaps).toBe(true);
// Variables mapped as { key: { value } }
expect(payload.variables).toEqual({
svc: { value: 'api' },
count: { value: 5 },
flag: { value: true },
});
// Queries include one builder_query and one builder_formula
expect(payload.compositeQuery.queries).toHaveLength(2);
const builderQuery = payload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const builderSpec = builderQuery.spec as MetricBuilderQuery;
expect(builderSpec.name).toBe('A');
expect(builderSpec.signal).toBe('metrics');
expect(builderSpec.aggregations?.[0]).toMatchObject({
metricName: 'cpu_usage',
timeAggregation: 'sum',
spaceAggregation: 'avg',
});
// reduceTo should not be present for non-scalar panels
expect(builderSpec.aggregations?.[0].reduceTo).toBeUndefined();
// functions should be preserved/normalized
expect(builderSpec.functions?.[0]?.name).toBe('timeShift');
const formulaQuery = payload.compositeQuery.queries.find(
(q) => q.type === 'builder_formula',
) as QueryEnvelope;
const formulaSpec = formulaQuery.spec as V5QueryBuilderFormula;
expect(formulaSpec.name).toBe('F1');
expect(formulaSpec.expression).toBe('A + 1');
expect(formulaSpec.legend).toBe('Formula Legend');
});
it('builds payload for PromQL queries and respects originalGraphType for formatting', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.PROM,
id: 'q2',
unit: undefined,
promql: [
{
name: 'A',
query: 'up',
disabled: false,
legend: 'LP',
},
],
clickhouse_sql: [],
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
},
graphType: PANEL_TYPES.TIME_SERIES,
originalGraphType: PANEL_TYPES.TABLE,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
expect(result).toEqual(
expect.objectContaining({
legendMap: { A: 'LP' },
queryPayload: expect.objectContaining({
compositeQuery: expect.objectContaining({
queries: [
{
type: 'promql',
spec: expect.objectContaining({
name: 'A',
query: 'up',
legend: 'LP',
stats: false,
}),
},
],
}),
requestType: 'time_series',
formatOptions: expect.objectContaining({
formatTableResultForUI: true,
fillGaps: false,
}),
start: start * 1000,
end: end * 1000,
variables: {},
}),
}),
);
expect(result.legendMap).toEqual({ A: 'LP' });
const payload: QueryRangePayloadV5 = result.queryPayload;
expect(payload.requestType).toBe('time_series');
expect(payload.formatOptions?.formatTableResultForUI).toBe(true);
expect(payload.compositeQuery.queries).toHaveLength(1);
const prom = payload.compositeQuery.queries[0];
expect(prom.type).toBe('promql');
const promSpec = prom.spec as PromQuery;
expect(promSpec.name).toBe('A');
expect(promSpec.query).toBe('up');
expect(promSpec.legend).toBe('LP');
expect(promSpec.stats).toBe(false);
});
it('builds payload for ClickHouse queries and maps requestType from panel', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.CLICKHOUSE,
id: 'q3',
unit: undefined,
promql: [],
clickhouse_sql: [
{
name: 'Q',
query: 'SELECT 1',
disabled: false,
legend: 'LC',
},
],
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
},
graphType: PANEL_TYPES.TABLE,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
expect(result).toEqual(
expect.objectContaining({
legendMap: { Q: 'LC' },
queryPayload: expect.objectContaining({
compositeQuery: expect.objectContaining({
queries: [
{
type: 'clickhouse_sql',
spec: expect.objectContaining({
name: 'Q',
query: 'SELECT 1',
legend: 'LC',
}),
},
],
}),
requestType: 'scalar',
formatOptions: expect.objectContaining({
formatTableResultForUI: true,
fillGaps: false,
}),
start: start * 1000,
end: end * 1000,
variables: {},
}),
}),
);
expect(result.legendMap).toEqual({ Q: 'LC' });
const payload: QueryRangePayloadV5 = result.queryPayload;
expect(payload.requestType).toBe('scalar');
expect(payload.compositeQuery.queries).toHaveLength(1);
const ch = payload.compositeQuery.queries[0];
expect(ch.type).toBe('clickhouse_sql');
const chSpec = ch.spec as ClickHouseQuery;
expect(chSpec.name).toBe('Q');
expect(chSpec.query).toBe('SELECT 1');
expect(chSpec.legend).toBe('LC');
});
it('uses getStartEndRangeTime when start/end are not provided', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q4',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
},
graphType: PANEL_TYPES.TIME_SERIES,
selectedTime: 'GLOBAL_TIME',
};
const result = prepareQueryRangePayloadV5(props);
expect(result).toEqual(
expect.objectContaining({
legendMap: {},
queryPayload: expect.objectContaining({
compositeQuery: { queries: [] },
requestType: 'time_series',
formatOptions: expect.objectContaining({
formatTableResultForUI: false,
fillGaps: false,
}),
start: 100 * 1000,
end: 200 * 1000,
variables: {},
}),
}),
);
const payload: QueryRangePayloadV5 = result.queryPayload;
expect(payload.start).toBe(100 * 1000);
expect(payload.end).toBe(200 * 1000);
});
it('includes reduceTo for metrics in scalar panels (TABLE)', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q5',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [baseBuilderQuery()],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.TABLE,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
expect(result).toEqual(
expect.objectContaining({
legendMap: { A: 'Legend A' },
queryPayload: expect.objectContaining({
compositeQuery: expect.objectContaining({
queries: [
{
type: 'builder_query',
spec: expect.objectContaining({
name: 'A',
signal: 'metrics',
stepInterval: 600,
functions: [{ name: 'timeShift', args: [{ value: '5m' }] }],
aggregations: [
expect.objectContaining({
metricName: 'cpu_usage',
timeAggregation: 'sum',
spaceAggregation: 'avg',
reduceTo: 'avg',
temporality: undefined,
}),
],
}),
},
],
}),
requestType: 'scalar',
formatOptions: expect.objectContaining({
formatTableResultForUI: true,
fillGaps: false,
}),
start: start * 1000,
end: end * 1000,
variables: {},
}),
}),
);
const payload: QueryRangePayloadV5 = result.queryPayload;
const builderQuery = payload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const builderSpec = builderQuery.spec as MetricBuilderQuery;
expect(builderSpec.aggregations?.[0].reduceTo).toBe('avg');
});
it('omits aggregations for raw request type (LIST panel)', () => {
const logAgg: LogAggregation[] = [{ expression: 'count()' }];
const logsQuery = baseBuilderQuery({
dataSource: DataSource.LOGS,
aggregations: logAgg,
} as Partial<IBuilderQuery>);
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q6',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [logsQuery],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
expect(result).toEqual(
expect.objectContaining({
legendMap: { A: 'Legend A' },
queryPayload: expect.objectContaining({
compositeQuery: expect.objectContaining({
queries: [
{
type: 'builder_query',
spec: expect.objectContaining({
name: 'A',
signal: 'logs',
stepInterval: 600,
functions: [{ name: 'timeShift', args: [{ value: '5m' }] }],
aggregations: undefined,
}),
},
],
}),
requestType: 'raw',
formatOptions: expect.objectContaining({
formatTableResultForUI: false,
fillGaps: false,
}),
start: start * 1000,
end: end * 1000,
variables: {},
}),
}),
);
const payload: QueryRangePayloadV5 = result.queryPayload;
expect(payload.requestType).toBe('raw');
const builderQuery = payload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
// For RAW request type, aggregations should be omitted
const logSpec = builderQuery.spec as LogBuilderQuery;
expect(logSpec.aggregations).toBeUndefined();
});
it('maps groupBy, order, having, aggregations and filter for logs builder query', () => {
const getStartEndRangeTime = jest.requireMock('lib/getStartEndRangeTime')
.default as jest.Mock;
getStartEndRangeTime.mockReturnValueOnce({
start: '1754623641',
end: '1754645241',
});
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'e643e387-1996-4449-97b6-9ef4498a0573',
unit: undefined,
promql: [{ name: 'A', query: '', legend: '', disabled: false }],
clickhouse_sql: [{ name: 'A', legend: '', disabled: false, query: '' }],
builder: {
queryData: [
{
dataSource: DataSource.LOGS,
queryName: 'A',
aggregateOperator: 'count',
aggregateAttribute: {
key: '',
dataType: DataTypes.EMPTY,
type: '',
},
timeAggregation: 'rate',
spaceAggregation: 'sum',
filter: { expression: "service.name = 'adservice'" },
aggregations: [
{ expression: 'count() as cnt avg(code.lineno) ' } as LogAggregation,
],
functions: [],
filters: {
items: [
{
id: '14c790ec-54d1-42f0-a889-3b4f0fb79852',
op: '=',
key: { id: 'service.name', key: 'service.name', type: '' },
value: 'adservice',
},
],
op: 'AND',
},
expression: 'A',
disabled: false,
stepInterval: 80,
having: { expression: 'count() > 0' },
limit: 600,
orderBy: [{ columnName: 'service.name', order: 'desc' }],
groupBy: [
{
key: 'service.name',
type: '',
},
],
legend: '{{service.name}}',
reduceTo: 'avg',
offset: 0,
pageSize: 100,
},
],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.TIME_SERIES,
selectedTime: 'GLOBAL_TIME',
globalSelectedInterval: 'custom' as never,
variables: {},
};
const result = prepareQueryRangePayloadV5(props);
expect(result).toEqual(
expect.objectContaining({
legendMap: { A: '{{service.name}}' },
queryPayload: expect.objectContaining({
schemaVersion: 'v1',
start: 1754623641000,
end: 1754645241000,
requestType: 'time_series',
compositeQuery: expect.objectContaining({
queries: [
{
type: 'builder_query',
spec: expect.objectContaining({
name: 'A',
signal: 'logs',
stepInterval: 80,
disabled: false,
filter: { expression: "service.name = 'adservice'" },
groupBy: [
{
name: 'service.name',
fieldDataType: '',
fieldContext: '',
},
],
limit: 600,
order: [
{
key: { name: 'service.name' },
direction: 'desc',
},
],
legend: '{{service.name}}',
having: { expression: 'count() > 0' },
aggregations: [
{ expression: 'count()', alias: 'cnt' },
{ expression: 'avg(code.lineno)' },
],
}),
},
],
}),
formatOptions: { formatTableResultForUI: false, fillGaps: false },
variables: {},
}),
}),
);
});
it('builds payload for builder queries with filters array but no filter expression', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q8',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [
baseBuilderQuery({
dataSource: DataSource.LOGS,
filter: { expression: '' },
filters: {
items: [
{
id: '1',
key: { key: 'service.name', type: 'string' },
op: '=',
value: 'payment-service',
},
{
id: '2',
key: { key: 'http.status_code', type: 'number' },
op: '>=',
value: 400,
},
{
id: '3',
key: { key: 'message', type: 'string' },
op: 'contains',
value: 'error',
},
],
op: 'AND',
},
}),
],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
expect(result.legendMap).toEqual({ A: 'Legend A' });
expect(result.queryPayload.compositeQuery.queries).toHaveLength(1);
const builderQuery = result.queryPayload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const logSpec = builderQuery.spec as LogBuilderQuery;
expect(logSpec.name).toBe('A');
expect(logSpec.signal).toBe('logs');
expect(logSpec.filter).toEqual({
expression:
"service.name = 'payment-service' AND http.status_code >= 400 AND message contains 'error'",
});
});
it('uses filter.expression when only expression is provided', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q9',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [
baseBuilderQuery({
dataSource: DataSource.LOGS,
filter: { expression: 'http.status_code >= 500' },
filters: (undefined as unknown) as IBuilderQuery['filters'],
}),
],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
const builderQuery = result.queryPayload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const logSpec = builderQuery.spec as LogBuilderQuery;
expect(logSpec.filter).toEqual({ expression: 'http.status_code >= 500' });
});
it('derives expression from filters when filter is undefined', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q10',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [
baseBuilderQuery({
dataSource: DataSource.LOGS,
filter: (undefined as unknown) as IBuilderQuery['filter'],
filters: {
items: [
{
id: '1',
key: { key: 'service.name', type: 'string' },
op: '=',
value: 'checkout',
},
],
op: 'AND',
},
}),
],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
const builderQuery = result.queryPayload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const logSpec = builderQuery.spec as LogBuilderQuery;
expect(logSpec.filter).toEqual({ expression: "service.name = 'checkout'" });
});
it('prefers filter.expression over filters when both are present', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q11',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [
baseBuilderQuery({
dataSource: DataSource.LOGS,
filter: { expression: "service.name = 'frontend'" },
filters: {
items: [
{
id: '1',
key: { key: 'service.name', type: 'string' },
op: '=',
value: 'backend',
},
],
op: 'AND',
},
}),
],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
const builderQuery = result.queryPayload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const logSpec = builderQuery.spec as LogBuilderQuery;
expect(logSpec.filter).toEqual({ expression: "service.name = 'frontend'" });
});
it('returns empty expression when neither filter nor filters provided', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q12',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [
baseBuilderQuery({
dataSource: DataSource.LOGS,
filter: (undefined as unknown) as IBuilderQuery['filter'],
filters: (undefined as unknown) as IBuilderQuery['filters'],
}),
],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
const builderQuery = result.queryPayload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const logSpec = builderQuery.spec as LogBuilderQuery;
expect(logSpec.filter).toEqual({ expression: '' });
});
it('returns empty expression when filters provided with empty items', () => {
const props: GetQueryResultsProps = {
query: {
queryType: EQueryType.QUERY_BUILDER,
id: 'q13',
unit: undefined,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [
baseBuilderQuery({
dataSource: DataSource.LOGS,
filter: { expression: '' },
filters: { items: [], op: 'AND' },
}),
],
queryFormulas: [],
queryTraceOperator: [],
},
},
graphType: PANEL_TYPES.LIST,
selectedTime: 'GLOBAL_TIME',
start,
end,
};
const result = prepareQueryRangePayloadV5(props);
const builderQuery = result.queryPayload.compositeQuery.queries.find(
(q) => q.type === 'builder_query',
) as QueryEnvelope;
const logSpec = builderQuery.spec as LogBuilderQuery;
expect(logSpec.filter).toEqual({ expression: '' });
});
});

View File

@@ -1,21 +1,15 @@
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable sonarjs/no-identical-functions */
import { convertFiltersToExpression } from 'components/QueryBuilderV2/utils';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi';
import { isEmpty } from 'lodash-es';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
IBuilderQuery,
IBuilderTraceOperator,
} from 'types/api/queryBuilder/queryBuilderData';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import {
BaseBuilderQuery,
FieldContext,
FieldDataType,
Filter,
FunctionName,
GroupByKey,
Having,
@@ -30,7 +24,6 @@ import {
TelemetryFieldKey,
TraceAggregation,
VariableItem,
VariableType,
} from 'types/api/v5/queryRange';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
@@ -73,63 +66,9 @@ function getSignalType(dataSource: string): 'traces' | 'logs' | 'metrics' {
return 'metrics';
}
function isDeprecatedField(fieldName: string): boolean {
const deprecatedIntrinsicFields = [
'traceID',
'spanID',
'parentSpanID',
'spanKind',
'durationNano',
'statusCode',
'statusMessage',
'statusCodeString',
];
const deprecatedCalculatedFields = [
'responseStatusCode',
'externalHttpUrl',
'httpUrl',
'externalHttpMethod',
'httpMethod',
'httpHost',
'dbName',
'dbOperation',
'hasError',
'isRemote',
'serviceName',
'httpRoute',
'msgSystem',
'msgOperation',
'dbSystem',
'rpcSystem',
'rpcService',
'rpcMethod',
'peerService',
];
return (
deprecatedIntrinsicFields.includes(fieldName) ||
deprecatedCalculatedFields.includes(fieldName)
);
}
function getFilter(queryData: IBuilderQuery): Filter {
const { filter } = queryData;
if (filter?.expression) {
return {
expression: filter.expression,
};
}
if (queryData.filters && queryData.filters?.items?.length > 0) {
return convertFiltersToExpression(queryData.filters);
}
return {
expression: '',
};
}
/**
* Creates base spec for builder queries
*/
function createBaseSpec(
queryData: IBuilderQuery,
requestType: RequestType,
@@ -141,16 +80,16 @@ function createBaseSpec(
)[])?.filter((c) => ('key' in c ? c?.key : c?.name));
return {
stepInterval: queryData?.stepInterval || null,
stepInterval: queryData?.stepInterval || undefined,
disabled: queryData.disabled,
filter: getFilter(queryData),
filter: queryData?.filter?.expression ? queryData.filter : undefined,
groupBy:
queryData.groupBy?.length > 0
? queryData.groupBy.map(
(item: any): GroupByKey => ({
name: item.key,
fieldDataType: item?.dataType || '',
fieldContext: item?.type || '',
fieldDataType: item?.dataType,
fieldContext: item?.type,
description: item?.description,
unit: item?.unit,
signal: item?.signal,
@@ -201,33 +140,19 @@ function createBaseSpec(
selectFields: isEmpty(nonEmptySelectColumns)
? undefined
: nonEmptySelectColumns?.map(
(column: any): TelemetryFieldKey => {
const fieldName = column.name ?? column.key;
const isDeprecated = isDeprecatedField(fieldName);
const fieldObj: TelemetryFieldKey = {
name: fieldName,
fieldDataType:
column?.fieldDataType ?? (column?.dataType as FieldDataType),
signal: column?.signal ?? undefined,
};
// Only add fieldContext if the field is NOT deprecated
if (!isDeprecated && fieldName !== 'name') {
fieldObj.fieldContext =
column?.fieldContext ?? (column?.type as FieldContext);
}
return fieldObj;
},
(column: any): TelemetryFieldKey => ({
name: column.name ?? column.key,
fieldDataType:
column?.fieldDataType ?? (column?.dataType as FieldDataType),
fieldContext: column?.fieldContext ?? (column?.type as FieldContext),
signal: column?.signal ?? undefined,
}),
),
};
}
// Utility to parse aggregation expressions with optional alias
export function parseAggregations(
expression: string,
availableAlias?: string,
): { expression: string; alias?: string }[] {
const result: { expression: string; alias?: string }[] = [];
// Matches function calls like "count()" or "sum(field)" with optional alias like "as 'alias'"
@@ -236,7 +161,7 @@ export function parseAggregations(
let match = regex.exec(expression);
while (match !== null) {
const expr = match[1];
let alias = match[2] || availableAlias; // Use provided alias or availableAlias if not matched
let alias = match[2];
if (alias) {
// Remove quotes if present
alias = alias.replace(/^['"]|['"]$/g, '');
@@ -287,14 +212,9 @@ export function createAggregation(
}
if (queryData.aggregations?.length > 0) {
return queryData.aggregations.flatMap(
(agg: { expression: string; alias?: string }) => {
const parsedAggregations = parseAggregations(agg.expression, agg?.alias);
return isEmpty(parsedAggregations)
? [{ expression: 'count()' }]
: parsedAggregations;
},
);
return isEmpty(parseAggregations(queryData.aggregations?.[0].expression))
? [{ expression: 'count()' }]
: parseAggregations(queryData.aggregations?.[0].expression);
}
return [{ expression: 'count()' }];
@@ -356,109 +276,6 @@ export function convertBuilderQueriesToV5(
);
}
function createTraceOperatorBaseSpec(
queryData: IBuilderTraceOperator,
requestType: RequestType,
panelType?: PANEL_TYPES,
): BaseBuilderQuery {
const nonEmptySelectColumns = (queryData.selectColumns as (
| BaseAutocompleteData
| TelemetryFieldKey
)[])?.filter((c) => ('key' in c ? c?.key : c?.name));
const {
stepInterval,
groupBy,
limit,
offset,
legend,
having,
orderBy,
pageSize,
} = queryData;
return {
stepInterval: stepInterval || undefined,
groupBy:
groupBy?.length > 0
? groupBy.map(
(item: any): GroupByKey => ({
name: item.key,
fieldDataType: item?.dataType,
fieldContext: item?.type,
description: item?.description,
unit: item?.unit,
signal: item?.signal,
materialized: item?.materialized,
}),
)
: undefined,
limit:
panelType === PANEL_TYPES.TABLE || panelType === PANEL_TYPES.LIST
? limit || pageSize || undefined
: limit || undefined,
offset: requestType === 'raw' || requestType === 'trace' ? offset : undefined,
order:
orderBy?.length > 0
? orderBy.map(
(order: any): OrderBy => ({
key: {
name: order.columnName,
},
direction: order.order,
}),
)
: undefined,
legend: isEmpty(legend) ? undefined : legend,
having: isEmpty(having) ? undefined : (having as Having),
selectFields: isEmpty(nonEmptySelectColumns)
? undefined
: nonEmptySelectColumns?.map(
(column: any): TelemetryFieldKey => ({
name: column.name ?? column.key,
fieldDataType:
column?.fieldDataType ?? (column?.dataType as FieldDataType),
fieldContext: column?.fieldContext ?? (column?.type as FieldContext),
signal: column?.signal ?? undefined,
}),
),
};
}
export function convertTraceOperatorToV5(
traceOperator: Record<string, IBuilderTraceOperator>,
requestType: RequestType,
panelType?: PANEL_TYPES,
): QueryEnvelope[] {
return Object.entries(traceOperator).map(
([queryName, traceOperatorData]): QueryEnvelope => {
const baseSpec = createTraceOperatorBaseSpec(
traceOperatorData,
requestType,
panelType,
);
// Skip aggregation for raw request type
const aggregations =
requestType === 'raw'
? undefined
: createAggregation(traceOperatorData, panelType);
const spec: QueryEnvelope['spec'] = {
name: queryName,
...baseSpec,
expression: traceOperatorData.expression || '',
aggregations: aggregations as TraceAggregation[],
};
return {
type: 'builder_trace_operator' as QueryType,
spec,
};
},
);
}
/**
* Converts PromQL queries to V5 format
*/
@@ -533,7 +350,6 @@ export const prepareQueryRangePayloadV5 = ({
formatForWeb,
originalGraphType,
fillGaps,
dynamicVariables,
}: GetQueryResultsProps): PrepareQueryRangePayloadV5Result => {
let legendMap: Record<string, string> = {};
const requestType = mapPanelTypeToRequestType(graphType);
@@ -541,28 +357,14 @@ export const prepareQueryRangePayloadV5 = ({
switch (query.queryType) {
case EQueryType.QUERY_BUILDER: {
const { queryData: data, queryFormulas, queryTraceOperator } = query.builder;
const { queryData: data, queryFormulas } = query.builder;
const currentQueryData = mapQueryDataToApi(data, 'queryName', tableParams);
const currentFormulas = mapQueryDataToApi(queryFormulas, 'queryName');
const filteredTraceOperator =
queryTraceOperator && queryTraceOperator.length > 0
? queryTraceOperator.filter((traceOperator) =>
Boolean(traceOperator.expression.trim()),
)
: [];
const currentTraceOperator = mapQueryDataToApi(
filteredTraceOperator,
'queryName',
tableParams,
);
// Combine legend maps
legendMap = {
...currentQueryData.newLegendMap,
...currentFormulas.newLegendMap,
...currentTraceOperator.newLegendMap,
};
// Convert builder queries
@@ -595,14 +397,8 @@ export const prepareQueryRangePayloadV5 = ({
}),
);
const traceOperatorQueries = convertTraceOperatorToV5(
currentTraceOperator.data,
requestType,
graphType,
);
// Combine all query types
queries = [...builderQueries, ...formulaQueries, ...traceOperatorQueries];
// Combine both types
queries = [...builderQueries, ...formulaQueries];
break;
}
case EQueryType.PROM: {
@@ -645,12 +441,7 @@ export const prepareQueryRangePayloadV5 = ({
fillGaps: fillGaps || false,
},
variables: Object.entries(variables).reduce((acc, [key, value]) => {
acc[key] = {
value,
type: dynamicVariables
?.find((v) => v.name === key)
?.type?.toLowerCase() as VariableType,
};
acc[key] = { value };
return acc;
}, {} as Record<string, VariableItem>),
};

View File

@@ -1,16 +1,14 @@
import { render, screen } from '@testing-library/react';
import getLocal from '../../../api/browser/localstorage/get';
import AppLoading from '../AppLoading';
jest.mock('../../../api/browser/localstorage/get', () => ({
// Mock the localStorage API
const mockGet = jest.fn();
jest.mock('api/browser/localstorage/get', () => ({
__esModule: true,
default: jest.fn(),
default: mockGet,
}));
// Access the mocked function
const mockGet = (getLocal as unknown) as jest.Mock;
describe('AppLoading', () => {
const SIGNOZ_TEXT = 'SigNoz';
const TAGLINE_TEXT =

View File

@@ -20,15 +20,13 @@
.ant-card-body {
height: calc(100% - 18px);
.widget-graph-component-container {
.widget-graph-container {
&.bar-panel-container {
height: calc(100% - 110px);
}
.widget-graph-container {
&.bar {
height: calc(100% - 110px);
}
&.graph-panel-container {
height: calc(100% - 80px);
}
&.graph {
height: calc(100% - 80px);
}
}
}
@@ -84,11 +82,9 @@
.ant-card-body {
height: calc(100% - 18px);
.widget-graph-component-container {
.widget-graph-container {
&.bar-panel-container {
height: calc(100% - 110px);
}
.widget-graph-container {
&.bar {
height: calc(100% - 110px);
}
}
}

View File

@@ -32,6 +32,8 @@ export const celeryAllStateWidgetData = (
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
isJSON: false,
key: '',
type: '',
},
@@ -48,6 +50,8 @@ export const celeryAllStateWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -84,6 +88,7 @@ export const celeryRetryStateWidgetData = (
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
key: '',
type: '',
},
@@ -98,6 +103,8 @@ export const celeryRetryStateWidgetData = (
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -112,6 +119,8 @@ export const celeryRetryStateWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.hostname--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.hostname',
type: 'tag',
},
@@ -144,6 +153,8 @@ export const celeryFailedStateWidgetData = (
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
isJSON: false,
key: '',
type: '',
},
@@ -158,6 +169,8 @@ export const celeryFailedStateWidgetData = (
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -172,6 +185,8 @@ export const celeryFailedStateWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.hostname--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.hostname',
type: 'tag',
},
@@ -204,6 +219,8 @@ export const celerySuccessStateWidgetData = (
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
isJSON: false,
key: '',
type: '',
},
@@ -218,6 +235,8 @@ export const celerySuccessStateWidgetData = (
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -232,6 +251,8 @@ export const celerySuccessStateWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.hostname--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.hostname',
type: 'tag',
},
@@ -263,6 +284,7 @@ export const celeryTasksByWorkerWidgetData = (
aggregateAttribute: {
dataType: DataTypes.String,
id: '------false',
isColumn: false,
key: '',
type: '',
},
@@ -279,6 +301,8 @@ export const celeryTasksByWorkerWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.hostname--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.hostname',
type: 'tag',
},
@@ -314,6 +338,8 @@ export const celeryErrorByWorkerWidgetData = (
aggregateAttribute: {
dataType: 'string',
id: 'span_id--string----true',
isColumn: true,
isJSON: false,
key: 'span_id',
type: '',
},
@@ -327,6 +353,8 @@ export const celeryErrorByWorkerWidgetData = (
key: {
dataType: DataTypes.bool,
id: 'has_error--bool----true',
isColumn: true,
isJSON: false,
key: 'has_error',
type: '',
},
@@ -345,6 +373,8 @@ export const celeryErrorByWorkerWidgetData = (
groupBy: [
{
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'celery.hostname',
type: 'tag',
id: 'celery.hostname--string--tag--false',
@@ -360,6 +390,8 @@ export const celeryErrorByWorkerWidgetData = (
aggregateAttribute: {
dataType: 'string',
id: 'span_id--string----true',
isColumn: true,
isJSON: false,
key: 'span_id',
type: '',
},
@@ -379,6 +411,8 @@ export const celeryErrorByWorkerWidgetData = (
groupBy: [
{
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'celery.hostname',
type: 'tag',
id: 'celery.hostname--string--tag--false',
@@ -411,6 +445,8 @@ export const celeryLatencyByWorkerWidgetData = (
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
@@ -427,6 +463,8 @@ export const celeryLatencyByWorkerWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.hostname--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.hostname',
type: 'tag',
},
@@ -460,6 +498,8 @@ export const celeryActiveTasksWidgetData = (
dataType: DataTypes.Float64,
id:
'flower_worker_number_of_currently_executing_tasks--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: 'flower_worker_number_of_currently_executing_tasks',
type: 'Gauge',
},
@@ -476,6 +516,8 @@ export const celeryActiveTasksWidgetData = (
{
dataType: DataTypes.String,
id: 'worker--string--tag--false',
isColumn: false,
isJSON: false,
key: 'worker',
type: 'tag',
},
@@ -509,6 +551,8 @@ export const celeryTaskLatencyWidgetData = (
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
@@ -525,6 +569,8 @@ export const celeryTaskLatencyWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.task_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.task_name',
type: 'tag',
},
@@ -560,6 +606,8 @@ export const celerySlowestTasksTableWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
@@ -576,6 +624,8 @@ export const celerySlowestTasksTableWidgetData = getWidgetQueryBuilder(
{
dataType: DataTypes.String,
id: 'celery.task_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.task_name',
type: 'tag',
},
@@ -610,6 +660,8 @@ export const celeryRetryTasksTableWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
@@ -624,6 +676,8 @@ export const celeryRetryTasksTableWidgetData = getWidgetQueryBuilder(
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -638,6 +692,8 @@ export const celeryRetryTasksTableWidgetData = getWidgetQueryBuilder(
{
dataType: DataTypes.String,
id: 'celery.task_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.task_name',
type: 'tag',
},
@@ -673,6 +729,8 @@ export const celeryFailedTasksTableWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
@@ -687,6 +745,8 @@ export const celeryFailedTasksTableWidgetData = getWidgetQueryBuilder(
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -701,6 +761,8 @@ export const celeryFailedTasksTableWidgetData = getWidgetQueryBuilder(
{
dataType: DataTypes.String,
id: 'celery.task_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.task_name',
type: 'tag',
},
@@ -734,6 +796,8 @@ export const celerySuccessTasksTableWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
@@ -748,6 +812,8 @@ export const celerySuccessTasksTableWidgetData = getWidgetQueryBuilder(
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -762,6 +828,8 @@ export const celerySuccessTasksTableWidgetData = getWidgetQueryBuilder(
{
dataType: DataTypes.String,
id: 'celery.task_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.task_name',
type: 'tag',
},
@@ -801,6 +869,8 @@ export const celeryTimeSeriesTablesWidgetData = (
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'duration_nano--float64----true',
isColumn: true,
isJSON: false,
key: 'duration_nano',
type: '',
},
@@ -815,6 +885,8 @@ export const celeryTimeSeriesTablesWidgetData = (
key: {
dataType: DataTypes.String,
id: `${entity}--string--tag--false`,
isColumn: false,
isJSON: false,
key: `${entity}`,
type: 'tag',
},
@@ -829,6 +901,8 @@ export const celeryTimeSeriesTablesWidgetData = (
{
dataType: DataTypes.String,
id: 'celery.task_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.task_name',
type: 'tag',
},
@@ -859,6 +933,8 @@ export const celeryAllStateCountWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.String,
id: 'span_id--string----true',
isColumn: true,
isJSON: false,
key: 'span_id',
type: '',
},
@@ -896,6 +972,8 @@ export const celerySuccessStateCountWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.String,
id: 'span_id--string----true',
isColumn: true,
isJSON: false,
key: 'span_id',
type: '',
},
@@ -910,6 +988,8 @@ export const celerySuccessStateCountWidgetData = getWidgetQueryBuilder(
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -945,6 +1025,8 @@ export const celeryFailedStateCountWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.String,
id: 'span_id--string----true',
isColumn: true,
isJSON: false,
key: 'span_id',
type: '',
},
@@ -959,6 +1041,8 @@ export const celeryFailedStateCountWidgetData = getWidgetQueryBuilder(
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},
@@ -994,6 +1078,7 @@ export const celeryRetryStateCountWidgetData = getWidgetQueryBuilder(
aggregateAttribute: {
dataType: DataTypes.String,
id: 'span_id--string----true',
isColumn: true,
key: 'span_id',
type: '',
},
@@ -1008,6 +1093,8 @@ export const celeryRetryStateCountWidgetData = getWidgetQueryBuilder(
key: {
dataType: DataTypes.String,
id: 'celery.state--string--tag--false',
isColumn: false,
isJSON: false,
key: 'celery.state',
type: 'tag',
},

View File

@@ -39,6 +39,8 @@ export function getFiltersFromQueryParams(
key,
dataType: DataTypes.String,
type: 'tag',
isColumn: false,
isJSON: false,
id: `${key}--string--tag--false`,
},
op: '=',
@@ -98,7 +100,8 @@ export const createFiltersFromData = (
key: string;
dataType: DataTypes;
type: string;
isColumn: boolean;
isJSON: boolean;
id: string;
};
op: string;
@@ -116,6 +119,8 @@ export const createFiltersFromData = (
key,
dataType: DataTypes.String,
type: 'tag',
isColumn: false,
isJSON: false,
id: `${key}--string--tag--false`,
},
op: '=',

Some files were not shown because too many files have changed in this diff Show More