Compare commits

..

6 Commits

736 changed files with 55142 additions and 87319 deletions

View File

@@ -42,7 +42,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
container_name: schema-migrator-sync
command:
- sync
@@ -55,7 +55,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
container_name: schema-migrator-async
command:
- async

44
.github/CODEOWNERS vendored
View File

@@ -2,11 +2,47 @@
# Owners are automatically requested for review for PRs that changes code
# that they own.
/frontend/ @YounixM @aks07
/frontend/ @SigNoz/frontend @YounixM
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
# Onboarding
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
# Dashboard, Alert, Metrics, Service Map, Services
/frontend/src/container/ListOfDashboard/ @srikanthccv
/frontend/src/container/NewDashboard/ @srikanthccv
/frontend/src/pages/DashboardsListPage/ @srikanthccv
/frontend/src/pages/DashboardWidget/ @srikanthccv
/frontend/src/pages/NewDashboard/ @srikanthccv
/frontend/src/providers/Dashboard/ @srikanthccv
# Alerts
/frontend/src/container/AlertHistory/ @srikanthccv
/frontend/src/container/AllAlertChannels/ @srikanthccv
/frontend/src/container/AnomalyAlertEvaluationView/ @srikanthccv
/frontend/src/container/CreateAlertChannels/ @srikanthccv
/frontend/src/container/CreateAlertRule/ @srikanthccv
/frontend/src/container/EditAlertChannels/ @srikanthccv
/frontend/src/container/FormAlertChannels/ @srikanthccv
/frontend/src/container/FormAlertRules/ @srikanthccv
/frontend/src/container/ListAlertRules/ @srikanthccv
/frontend/src/container/TriggeredAlerts/ @srikanthccv
/frontend/src/pages/AlertChannelCreate/ @srikanthccv
/frontend/src/pages/AlertDetails/ @srikanthccv
/frontend/src/pages/AlertHistory/ @srikanthccv
/frontend/src/pages/AlertList/ @srikanthccv
/frontend/src/pages/CreateAlert/ @srikanthccv
/frontend/src/providers/Alert.tsx @srikanthccv
# Metrics
/frontend/src/container/MetricsExplorer/ @srikanthccv
/frontend/src/pages/MetricsApplication/ @srikanthccv
/frontend/src/pages/MetricsExplorer/ @srikanthccv
# Services and Service Map
/frontend/src/container/ServiceApplication/ @srikanthccv
/frontend/src/container/ServiceTable/ @srikanthccv
/frontend/src/pages/Services/ @srikanthccv
/frontend/src/pages/ServiceTopLevelOperations/ @srikanthccv
/frontend/src/container/Home/Services/ @srikanthccv
/deploy/ @SigNoz/devops
.github @SigNoz/devops

View File

@@ -69,7 +69,6 @@ jobs:
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
@@ -108,6 +107,7 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'

View File

@@ -68,7 +68,6 @@ jobs:
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
- name: cache-dotenv
uses: actions/cache@v4
with:
@@ -107,6 +106,7 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'

View File

@@ -35,7 +35,6 @@ jobs:
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
- name: build-frontend
run: make js-build
- name: upload-frontend-artifact

View File

@@ -17,8 +17,6 @@ jobs:
- bootstrap
- passwordauthn
- callbackauthn
- cloudintegrations
- dashboard
- querier
- ttl
sqlstore-provider:

View File

@@ -1,63 +1,39 @@
version: "2"
linters:
default: none
default: standard
enable:
- bodyclose
- depguard
- errcheck
- forbidigo
- govet
- iface
- ineffassign
- misspell
- nilnil
- sloglint
- depguard
- iface
- unparam
- unused
settings:
depguard:
rules:
noerrors:
deny:
- pkg: errors
desc: Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead.
nozap:
deny:
- pkg: go.uber.org/zap
desc: Do not use zap logger. Use slog instead.
forbidigo:
forbid:
- pattern: fmt.Errorf
- pattern: ^(fmt\.Print.*|print|println)$
iface:
enable:
- identical
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
key-naming-case: snake
exclusions:
generated: lax
presets:
- comments
- common-false-positives
- legacy
- std-error-handling
paths:
- pkg/query-service
- ee/query-service
- scripts/
- tmp/
- third_party$
- builtin$
- examples$
formatters:
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$
- forbidigo
linters-settings:
sloglint:
no-mixed-args: true
kv-only: true
no-global: all
context: all
static-msg: true
msg-style: lowercased
key-naming-case: snake
depguard:
rules:
nozap:
deny:
- pkg: "go.uber.org/zap"
desc: "Do not use zap logger. Use slog instead."
noerrors:
deny:
- pkg: "errors"
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
iface:
enable:
- identical
issues:
exclude-dirs:
- "pkg/query-service"
- "ee/query-service"
- "scripts/"

View File

@@ -84,9 +84,10 @@ go-run-enterprise: ## Runs the enterprise go backend server
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
go run -race \
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go server
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
--config ./conf/prometheus.yml \
--cluster cluster
.PHONY: go-test
go-test: ## Runs go unit tests
@@ -101,9 +102,10 @@ go-run-community: ## Runs the community go backend server
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
go run -race \
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
--config ./conf/prometheus.yml \
--cluster cluster
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
go-build-community: ## Builds the go backend server for community
@@ -206,4 +208,4 @@ py-lint: ## Run lint for integration tests
.PHONY: py-test
py-test: ## Runs integration tests
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/

View File

@@ -5,12 +5,9 @@ import (
"log/slog"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -79,9 +76,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -31,6 +31,7 @@ builds:
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
mod_timestamp: "{{ .CommitTimestamp }}"

View File

@@ -8,8 +8,6 @@ import (
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
@@ -19,7 +17,6 @@ import (
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
@@ -108,9 +105,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -47,10 +47,10 @@ cache:
provider: memory
# memory: Uses in-memory caching.
memory:
# Max items for the in-memory cache (10x the entries)
num_counters: 100000
# Total cost in bytes allocated bounded cache
max_cost: 67108864
# Time-to-live for cache entries in memory. Specify the duration in ns
ttl: 60000000000
# The interval at which the cache will be cleaned up
cleanup_interval: 1m
# redis: Uses Redis as the caching backend.
redis:
# The hostname or IP address of the Redis server.

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.104.0
image: signoz/signoz:v0.98.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -209,7 +209,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.12
image: signoz/signoz-otel-collector:v0.129.7
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -233,7 +233,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
deploy:
restart_policy:
condition: on-failure

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.104.0
image: signoz/signoz:v0.98.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -150,7 +150,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.12
image: signoz/signoz-otel-collector:v0.129.7
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -176,7 +176,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.7
deploy:
restart_policy:
condition: on-failure

View File

@@ -1,10 +1,3 @@
connectors:
signozmeter:
metrics_flush_interval: 1h
dimensions:
- name: service.name
- name: deployment.environment
- name: host.name
receivers:
otlp:
protocols:
@@ -28,10 +21,6 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
batch/meter:
send_batch_max_size: 25000
send_batch_size: 20000
timeout: 1s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
@@ -77,11 +66,6 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
signozclickhousemeter:
dsn: tcp://clickhouse:9000/signoz_meter
timeout: 45s
sending_queue:
enabled: false
service:
telemetry:
logs:
@@ -93,20 +77,16 @@ service:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces, signozmeter]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter, signozmeter]
metrics/meter:
receivers: [signozmeter]
processors: [batch/meter]
exporters: [signozclickhousemeter]
exporters: [clickhouselogsexporter]

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.104.0}
image: signoz/signoz:${VERSION:-v0.98.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -213,7 +213,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -239,7 +239,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-sync
command:
- sync
@@ -250,7 +250,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-async
command:
- async

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.104.0}
image: signoz/signoz:${VERSION:-v0.98.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-async
command:
- async

View File

@@ -1,10 +1,3 @@
connectors:
signozmeter:
metrics_flush_interval: 1h
dimensions:
- name: service.name
- name: deployment.environment
- name: host.name
receivers:
otlp:
protocols:
@@ -28,10 +21,6 @@ processors:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
batch/meter:
send_batch_max_size: 25000
send_batch_size: 20000
timeout: 1s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
@@ -77,11 +66,6 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
signozclickhousemeter:
dsn: tcp://clickhouse:9000/signoz_meter
timeout: 45s
sending_queue:
enabled: false
service:
telemetry:
logs:
@@ -93,20 +77,16 @@ service:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces, signozmeter]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics, signozmeter]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter, signozmeter]
metrics/meter:
receivers: [signozmeter]
processors: [batch/meter]
exporters: [signozclickhousemeter]
exporters: [clickhouselogsexporter]

View File

@@ -103,19 +103,9 @@ Remember to replace the region and ingestion key with proper values as obtained
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
```sh
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
ENVOY_PORT=8081 docker compose up -d
```
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
```sh
docker compose up -d
```
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
```sh
docker compose ps -a

View File

@@ -129,12 +129,6 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
return &authtypes.AuthNProviderInfo{
RelayStatePath: nil,
}
}
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)

View File

@@ -99,14 +99,6 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
state := authtypes.NewState(&url.URL{Path: "login"}, authDomain.StorableAuthDomain().ID).URL.String()
return &authtypes.AuthNProviderInfo{
RelayStatePath: &state,
}
}
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
certStore, err := a.getCertificateStore(authDomain)
if err != nil {

View File

@@ -48,26 +48,7 @@ func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
if err != nil {
return err
}

View File

@@ -15,18 +15,18 @@ type anonymous
type role
relations
define assignee: [user, anonymous]
define assignee: [user]
define read: [user, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
type metaresources
type resources
relations
define create: [user, role#assignee]
define list: [user, role#assignee]
type metaresource
type resource
relations
define read: [user, anonymous, role#assignee]
define update: [user, role#assignee]
@@ -35,6 +35,6 @@ type metaresource
define block: [user, role#assignee]
type telemetryresource
type telemetry
relations
define read: [user, role#assignee]
define read: [user, anonymous, role#assignee]

View File

@@ -1,10 +1,10 @@
package licensing
import (
"fmt"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
)
@@ -18,7 +18,7 @@ func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
once.Do(func() {
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
if err := config.Validate(); err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid licensing config"))
panic(fmt.Errorf("invalid licensing config: %w", err))
}
})

View File

@@ -19,12 +19,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -61,7 +56,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
})
if err != nil {
@@ -105,39 +99,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost)
// dashboards
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.CreatePublic)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.GetPublic)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.UpdatePublic)).Methods(http.MethodPut)
router.HandleFunc("/api/v1/dashboards/{id}/public", am.AdminAccess(ah.Signoz.Handlers.Dashboard.DeletePublic)).Methods(http.MethodDelete)
// public access for dashboards
router.HandleFunc("/api/v1/public/dashboards/{id}", am.CheckWithoutClaims(
ah.Signoz.Handlers.Dashboard.GetPublicData,
authtypes.RelationRead, authtypes.RelationRead,
dashboardtypes.TypeableMetaResourcePublicDashboard,
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
id, err := valuer.NewUUID(mux.Vars(req)["id"])
if err != nil {
return nil, valuer.UUID{}, err
}
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
})).Methods(http.MethodGet)
router.HandleFunc("/api/v1/public/dashboards/{id}/widgets/{index}/query_range", am.CheckWithoutClaims(
ah.Signoz.Handlers.Dashboard.GetPublicWidgetQueryRange,
authtypes.RelationRead, authtypes.RelationRead,
dashboardtypes.TypeableMetaResourcePublicDashboard,
func(req *http.Request, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
id, err := valuer.NewUUID(mux.Vars(req)["id"])
if err != nil {
return nil, valuer.UUID{}, err
}
return ah.Signoz.Modules.Dashboard.GetPublicDashboardOrgAndSelectors(req.Context(), id, orgs)
})).Methods(http.MethodGet)
// v3
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost)
router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut)

View File

@@ -10,6 +10,7 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -76,7 +77,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
@@ -185,37 +186,48 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
return cloudIntegrationUser, nil
}
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, string, *basemodel.ApiError,
) {
// TODO: remove this struct from here
url := fmt.Sprintf(
"%s%s",
strings.TrimSuffix(constants.ZeusURL, "/"),
"/v2/deployments/me",
)
type deploymentResponse struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
Status string `json:"status"`
Error string `json:"error"`
Data struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
} `json:"data"`
}
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
resp, apiErr := requestAndParseResponse[deploymentResponse](
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
)
if apiErr != nil {
return "", "", basemodel.WrapApiError(
apiErr, "couldn't query for deployment info",
)
}
if resp.Status != "success" {
return "", "", basemodel.InternalError(fmt.Errorf(
"couldn't query for deployment info: error: %w", err,
"couldn't query for deployment info: status: %s, error: %s",
resp.Status, resp.Error,
))
}
resp := new(deploymentResponse)
err = json.Unmarshal(respBytes, resp)
if err != nil {
return "", "", basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal deployment info response: error: %w", err,
))
}
regionDns := resp.ClusterInfo.Region.DNS
deploymentName := resp.Name
regionDns := resp.Data.ClusterInfo.Region.DNS
deploymentName := resp.Data.Name
if len(regionDns) < 1 || len(deploymentName) < 1 {
// Fail early if actual response structure and expectation here ever diverge

View File

@@ -9,7 +9,6 @@ import (
_ "net/http/pprof" // http profiler
"slices"
"github.com/SigNoz/signoz/pkg/cache/memorycache"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
@@ -75,26 +74,13 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
Memory: cache.Memory{
NumCounters: 10 * 10000,
MaxCost: 1 << 27, // 128 MB
},
})
if err != nil {
return nil, err
}
reader := clickhouseReader.NewReader(
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.TelemetryStore.Cluster(),
config.Querier.FluxInterval,
cacheForTraceDetail,
signoz.Cache,
nil,
)
rm, err := makeRulesManager(
@@ -206,7 +192,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
r.Use(otelmux.Middleware(
"apiserver",

View File

@@ -10,6 +10,9 @@ var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
// this is set via build time variable
var ZeusURL = "https://api.signoz.cloud"
func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key)
if len(v) == 0 {

View File

@@ -246,9 +246,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
continue
}
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
})
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
if err != nil {
return nil, err
}
@@ -298,9 +296,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
continue
}
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
})
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
if err != nil {
return nil, err
}
@@ -414,7 +410,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
GeneratorURL: r.GeneratorURL(),
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
Missing: smpl.IsMissing,
IsRecovering: smpl.IsRecovering,
}
}
@@ -427,9 +422,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
alert.Value = a.Value
alert.Annotations = a.Annotations
// Update the recovering and missing state of existing alert
alert.IsRecovering = a.IsRecovering
alert.Missing = a.Missing
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
alert.Receivers = ruleReceiverMap[v]
}
@@ -488,30 +480,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
Value: a.Value,
})
}
// We need to change firing alert to recovering if the returned sample meets recovery threshold
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
// We need to change recovering alerts to firing if the returned sample meets target threshold
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
// in any of the above case we need to update the status of alert
if changeFiringToRecovering || changeRecoveringToFiring {
state := model.StateRecovering
if changeRecoveringToFiring {
state = model.StateFiring
}
a.State = state
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
RuleID: r.ID(),
RuleName: r.Name(),
State: state,
StateChanged: true,
UnixMilli: ts.UnixMilli(),
Labels: model.LabelsString(labelsJSON),
Fingerprint: a.QueryResultLables.Hash(),
Value: a.Value,
})
}
}
currentState := r.State()

View File

@@ -30,8 +30,6 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
return sqlschema.DataTypeBoolean
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
return sqlschema.DataTypeText
case "BYTEA":
return sqlschema.DataTypeBytea
}
return formatter.Formatter.DataTypeOf(dataType)

View File

@@ -1,153 +0,0 @@
package postgressqlstore
import (
"strings"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun/schema"
)
type formatter struct {
bunf schema.Formatter
}
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
return &formatter{bunf: schema.NewFormatter(dialect)}
}
func (f *formatter) JSONExtractString(column, path string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgres(path)...)
return sql
}
func (f *formatter) JSONType(column, path string) []byte {
var sql []byte
sql = append(sql, "jsonb_typeof("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONIsArray(column, path string) []byte {
var sql []byte
sql = append(sql, f.JSONType(column, path)...)
sql = append(sql, " = "...)
sql = schema.Append(f.bunf, sql, "array")
return sql
}
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias)
}
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements_text("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, append([]byte(alias), "::text"...)
}
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_each("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, append([]byte(alias), ".key"...)
}
func (f *formatter) JSONArrayAgg(expression string) []byte {
var sql []byte
sql = append(sql, "jsonb_agg("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
var sql []byte
sql = append(sql, "jsonb_build_array("...)
for idx, value := range values {
if idx > 0 {
sql = append(sql, ", "...)
}
sql = schema.Append(f.bunf, sql, value)
}
sql = append(sql, ')')
return sql
}
func (f *formatter) TextToJsonColumn(column string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, "::jsonb"...)
return sql
}
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
}
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
if path == "" {
return nil
}
parts := strings.Split(path, ".")
var validParts []string
for _, part := range parts {
if part != "" {
validParts = append(validParts, part)
}
}
if len(validParts) == 0 {
return nil
}
var result []byte
for idx, part := range validParts {
if idx == len(validParts)-1 {
if asText {
result = append(result, "->>"...)
} else {
result = append(result, "->"...)
}
result = schema.Append(f.bunf, result, part)
return result
}
result = append(result, "->"...)
result = schema.Append(f.bunf, result, part)
}
return result
}
func (f *formatter) LowerExpression(expression string) []byte {
var sql []byte
sql = append(sql, "lower("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}

View File

@@ -1,500 +0,0 @@
package postgressqlstore
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/uptrace/bun/dialect/pgdialect"
)
func TestJSONExtractString(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `"data"->>'field'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.name",
expected: `"metadata"->'user'->>'name'`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.level1.level2.level3",
expected: `"json_col"->'level1'->'level2'->>'level3'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `"json_col"`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `"data"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONExtractString(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONType(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `jsonb_typeof("data"->'field')`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.age",
expected: `jsonb_typeof("metadata"->'user'->'age')`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col")`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `jsonb_typeof("data")`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONType(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONIsArray(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.items",
expected: `jsonb_typeof("data"->'items') = 'array'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.tags",
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col") = 'array'`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `jsonb_typeof("data") = 'array'`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONIsArray(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayElements(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "nested path",
column: "metadata",
path: "$.items",
alias: "item",
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.user.tags",
alias: "tag",
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayOfStrings(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "nested path",
column: "metadata",
path: "$.strings",
alias: "s",
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONKeys(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "nested path",
column: "metadata",
path: "$.object",
alias: "key",
expected: `jsonb_each("metadata"->'object') AS "key"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayAgg(t *testing.T) {
tests := []struct {
name string
expression string
expected string
}{
{
name: "simple column",
expression: "id",
expected: "jsonb_agg(id)",
},
{
name: "expression with function",
expression: "DISTINCT name",
expected: "jsonb_agg(DISTINCT name)",
},
{
name: "complex expression",
expression: "data->>'field'",
expected: "jsonb_agg(data->>'field')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayAgg(tt.expression))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayLiteral(t *testing.T) {
tests := []struct {
name string
values []string
expected string
}{
{
name: "empty array",
values: []string{},
expected: "jsonb_build_array()",
},
{
name: "single value",
values: []string{"value1"},
expected: "jsonb_build_array('value1')",
},
{
name: "multiple values",
values: []string{"value1", "value2", "value3"},
expected: "jsonb_build_array('value1', 'value2', 'value3')",
},
{
name: "values with special characters",
values: []string{"test", "with space", "with-dash"},
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayLiteral(tt.values...))
assert.Equal(t, tt.expected, got)
})
}
}
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
tests := []struct {
name string
jsonPath string
asText bool
expected string
}{
{
name: "simple path as text",
jsonPath: "$.field",
asText: true,
expected: "->>'field'",
},
{
name: "simple path as json",
jsonPath: "$.field",
asText: false,
expected: "->'field'",
},
{
name: "nested path as text",
jsonPath: "$.user.name",
asText: true,
expected: "->'user'->>'name'",
},
{
name: "nested path as json",
jsonPath: "$.user.name",
asText: false,
expected: "->'user'->'name'",
},
{
name: "deeply nested as text",
jsonPath: "$.a.b.c.d",
asText: true,
expected: "->'a'->'b'->'c'->>'d'",
},
{
name: "root path",
jsonPath: "$",
asText: true,
expected: "",
},
{
name: "empty path",
jsonPath: "",
asText: true,
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New()).(*formatter)
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
assert.Equal(t, tt.expected, got)
})
}
}
func TestTextToJsonColumn(t *testing.T) {
tests := []struct {
name string
column string
expected string
}{
{
name: "simple column name",
column: "data",
expected: `"data"::jsonb`,
},
{
name: "column with underscore",
column: "user_data",
expected: `"user_data"::jsonb`,
},
{
name: "column with special characters",
column: "json-col",
expected: `"json-col"::jsonb`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.TextToJsonColumn(tt.column))
assert.Equal(t, tt.expected, got)
})
}
}
func TestLowerExpression(t *testing.T) {
tests := []struct {
name string
expr string
expected string
}{
{
name: "simple column name",
expr: "name",
expected: "lower(name)",
},
{
name: "quoted column identifier",
expr: `"column_name"`,
expected: `lower("column_name")`,
},
{
name: "jsonb text extraction",
expr: "data->>'field'",
expected: "lower(data->>'field')",
},
{
name: "nested jsonb extraction",
expr: "metadata->'user'->>'name'",
expected: "lower(metadata->'user'->>'name')",
},
{
name: "jsonb_typeof expression",
expr: "jsonb_typeof(data->'field')",
expected: "lower(jsonb_typeof(data->'field'))",
},
{
name: "string concatenation",
expr: "first_name || ' ' || last_name",
expected: "lower(first_name || ' ' || last_name)",
},
{
name: "CAST expression",
expr: "CAST(value AS TEXT)",
expected: "lower(CAST(value AS TEXT))",
},
{
name: "COALESCE expression",
expr: "COALESCE(name, 'default')",
expected: "lower(COALESCE(name, 'default'))",
},
{
name: "subquery column",
expr: "users.email",
expected: "lower(users.email)",
},
{
name: "quoted identifier with special chars",
expr: `"user-name"`,
expected: `lower("user-name")`,
},
{
name: "jsonb to text cast",
expr: "data::text",
expected: "lower(data::text)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.LowerExpression(tt.expr))
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -15,11 +15,10 @@ import (
)
type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
formatter sqlstore.SQLFormatter
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
}
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
@@ -56,14 +55,11 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
sqldb := stdlib.OpenDBFromPool(pool)
pgDialect := pgdialect.New()
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
return &provider{
settings: settings,
sqldb: sqldb,
bundb: bunDB,
dialect: new(dialect),
formatter: newFormatter(bunDB.Dialect()),
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
dialect: new(dialect),
}, nil
}
@@ -79,10 +75,6 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}
func (provider *provider) Formatter() sqlstore.SQLFormatter {
return provider.formatter
}
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
return provider.bundb.BunDBCtx(ctx)
}

View File

@@ -1,10 +1,10 @@
package zeus
import (
"fmt"
neturl "net/url"
"sync"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/zeus"
)
@@ -24,17 +24,17 @@ func Config() zeus.Config {
once.Do(func() {
parsedURL, err := neturl.Parse(url)
if err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus URL"))
panic(fmt.Errorf("invalid zeus URL: %w", err))
}
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
if err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus deprecated URL"))
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
}
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
if err := config.Validate(); err != nil {
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus config"))
panic(fmt.Errorf("invalid zeus config: %w", err))
}
})

View File

@@ -1,5 +1,5 @@
module.exports = {
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
ignorePatterns: ['src/parser/*.ts'],
env: {
browser: true,
es2021: true,

View File

@@ -3,6 +3,5 @@ BUNDLE_ANALYSER="true"
FRONTEND_API_ENDPOINT="http://localhost:8080/"
PYLON_APP_ID="pylon-app-id"
APPCUES_APP_ID="appcess-app-id"
PYLON_IDENTITY_SECRET="pylon-identity-secret"
CI="1"

View File

@@ -14,7 +14,7 @@
"jest": "jest",
"jest:coverage": "jest --coverage",
"jest:watch": "jest --watch",
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure) && node scripts/update-registry.js",
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1",
"test": "jest",
@@ -38,7 +38,7 @@
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",
"@monaco-editor/react": "^4.3.1",
"@playwright/test": "1.55.1",
"@playwright/test": "1.54.1",
"@radix-ui/react-tabs": "1.0.4",
"@radix-ui/react-tooltip": "1.0.7",
"@sentry/react": "8.41.0",
@@ -47,8 +47,6 @@
"@signozhq/button": "0.0.2",
"@signozhq/calendar": "0.0.0",
"@signozhq/callout": "0.0.2",
"@signozhq/checkbox": "0.0.2",
"@signozhq/command": "0.0.0",
"@signozhq/design-tokens": "1.1.4",
"@signozhq/input": "0.0.2",
"@signozhq/popover": "0.0.0",
@@ -71,7 +69,7 @@
"antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1",
"antlr4": "4.13.2",
"axios": "1.12.0",
"axios": "1.8.2",
"babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4",
"babel-loader": "9.1.3",
@@ -85,7 +83,6 @@
"color": "^4.2.1",
"color-alpha": "1.1.3",
"cross-env": "^7.0.3",
"crypto-js": "4.2.0",
"css-loader": "5.0.0",
"css-minimizer-webpack-plugin": "5.0.1",
"d3-hierarchy": "3.1.2",
@@ -105,6 +102,7 @@
"i18next-http-backend": "^1.3.2",
"jest": "^27.5.1",
"js-base64": "^3.7.2",
"kbar": "0.1.0-beta.48",
"less": "^4.1.2",
"less-loader": "^10.2.0",
"lodash-es": "^4.17.21",
@@ -114,7 +112,7 @@
"overlayscrollbars": "^2.8.1",
"overlayscrollbars-react": "^0.5.6",
"papaparse": "5.4.1",
"posthog-js": "1.298.0",
"posthog-js": "1.215.5",
"rc-tween-one": "3.0.6",
"react": "18.2.0",
"react-addons-update": "15.6.3",
@@ -151,6 +149,7 @@
"tsconfig-paths-webpack-plugin": "^3.5.1",
"typescript": "^4.0.5",
"uplot": "1.6.31",
"userpilot": "1.3.9",
"uuid": "^8.3.2",
"web-vitals": "^0.2.4",
"webpack": "5.94.0",
@@ -187,7 +186,6 @@
"@types/color": "^3.0.3",
"@types/compression-webpack-plugin": "^9.0.0",
"@types/copy-webpack-plugin": "^8.0.1",
"@types/crypto-js": "4.2.2",
"@types/dompurify": "^2.4.0",
"@types/event-source-polyfill": "^1.0.0",
"@types/fontfaceobserver": "2.1.0",
@@ -282,7 +280,6 @@
"got": "11.8.5",
"form-data": "4.0.4",
"brace-expansion": "^2.0.2",
"on-headers": "^1.1.0",
"tmp": "0.2.4"
"on-headers": "^1.1.0"
}
}

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" fill="currentColor" fill-rule="evenodd" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>AWS</title><path d="M6.763 11.212q.002.446.088.71c.064.176.144.368.256.576.04.063.056.127.056.183q.002.12-.152.24l-.503.335a.4.4 0 0 1-.208.072q-.12-.002-.239-.112a2.5 2.5 0 0 1-.287-.375 6 6 0 0 1-.248-.471q-.934 1.101-2.347 1.101c-.67 0-1.205-.191-1.596-.574-.39-.384-.59-.894-.59-1.533 0-.678.24-1.23.726-1.644.487-.415 1.133-.623 1.955-.623.272 0 .551.024.846.064.296.04.6.104.918.176v-.583q-.001-.908-.375-1.277c-.255-.248-.686-.367-1.3-.367-.28 0-.568.031-.863.103s-.583.16-.862.272a2 2 0 0 1-.28.104.5.5 0 0 1-.127.023q-.168.002-.168-.247v-.391c0-.128.016-.224.056-.28a.6.6 0 0 1 .224-.167 4.6 4.6 0 0 1 1.005-.36 4.8 4.8 0 0 1 1.246-.151c.95 0 1.644.216 2.091.647q.661.646.662 1.963v2.586zm-3.24 1.214c.263 0 .534-.048.822-.144a1.8 1.8 0 0 0 .758-.51 1.3 1.3 0 0 0 .272-.512c.047-.191.08-.423.08-.694v-.335a7 7 0 0 0-.735-.136 6 6 0 0 0-.75-.048c-.535 0-.926.104-1.19.32-.263.215-.39.518-.39.917 0 .375.095.655.295.846.191.2.47.296.838.296m6.41.862c-.144 0-.24-.024-.304-.08-.064-.048-.12-.16-.168-.311L7.586 6.726a1.4 1.4 0 0 1-.072-.32c0-.128.064-.2.191-.2h.783q.227-.001.31.08c.065.048.113.16.16.312l1.342 5.284 1.245-5.284q.058-.24.151-.312a.55.55 0 0 1 .32-.08h.638c.152 0 .256.025.32.08.063.048.12.16.151.312l1.261 5.348 1.381-5.348q.074-.24.16-.312a.52.52 0 0 1 .311-.08h.743c.127 0 .2.065.2.2 0 .04-.009.08-.017.128a1 1 0 0 1-.056.2l-1.923 6.17q-.072.24-.168.311a.5.5 0 0 1-.303.08h-.687c-.15 0-.255-.024-.32-.08-.063-.056-.119-.16-.15-.32L12.32 7.747l-1.23 5.14c-.04.16-.087.264-.15.32-.065.056-.177.08-.32.08zm10.256.215c-.415 0-.83-.048-1.229-.143-.399-.096-.71-.2-.918-.32-.128-.071-.215-.151-.247-.223a.6.6 0 0 1-.048-.224v-.407c0-.167.064-.247.183-.247q.072 0 .144.024c.048.016.12.048.2.08q.408.181.878.279c.32.064.63.096.95.096.502 0 .894-.088 1.165-.264a.86.86 0 0 0 .415-.758.78.78 0 0 0-.215-.559c-.144-.151-.416-.287-.807-.415l-1.157-.36c-.583-.183-1.014-.454-1.277-.813a1.9 1.9 0 0 1-.4-1.158q0-.502.216-.886c.144-.255.335-.479.575-.654.24-.184.51-.32.83-.415.32-.096.655-.136 1.006-.136.175 0 .36.008.535.032.183.024.35.056.518.088q.24.058.455.127.216.072.336.144a.7.7 0 0 1 .24.2.43.43 0 0 1 .071.263v.375q-.002.254-.184.256a.8.8 0 0 1-.303-.096 3.65 3.65 0 0 0-1.532-.311c-.455 0-.815.071-1.062.223s-.375.383-.375.71c0 .224.08.416.24.567.16.152.454.304.877.44l1.134.358c.574.184.99.44 1.237.767s.367.702.367 1.117c0 .343-.072.655-.207.926a2.2 2.2 0 0 1-.583.703c-.248.2-.543.343-.886.447-.36.111-.734.167-1.142.167"/><path fill="#f90" d="M.378 15.475c3.384 1.963 7.56 3.153 11.877 3.153 2.914 0 6.114-.607 9.06-1.852.44-.2.814.287.383.607-2.626 1.94-6.442 2.969-9.722 2.969-4.598 0-8.74-1.7-11.87-4.526-.247-.223-.024-.527.272-.351m23.531-.2c.287.36-.08 2.826-1.485 4.007-.215.184-.423.088-.327-.151l.175-.439c.343-.88.802-2.198.52-2.555-.336-.43-2.22-.207-3.074-.103-.255.032-.295-.192-.063-.36 1.5-1.053 3.967-.75 4.254-.399"/></svg>

Before

Width:  |  Height:  |  Size: 3.0 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 20 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>Azure</title><path fill="url(#a)" d="M7.242 1.613A1.11 1.11 0 0 1 8.295.857h6.977L8.03 22.316a1.11 1.11 0 0 1-1.052.755h-5.43a1.11 1.11 0 0 1-1.053-1.466z"/><path fill="#0078d4" d="M18.397 15.296H7.4a.51.51 0 0 0-.347.882l7.066 6.595c.206.192.477.298.758.298h6.226z"/><path fill="url(#b)" d="M15.272.857H7.497L0 23.071h7.775l1.596-4.73 5.068 4.73h6.665l-2.707-7.775h-7.998z"/><path fill="url(#c)" d="M17.193 1.613a1.11 1.11 0 0 0-1.052-.756h-7.81.035c.477 0 .9.304 1.052.756l6.748 19.992a1.11 1.11 0 0 1-1.052 1.466h-.12 7.895a1.11 1.11 0 0 0 1.052-1.466z"/><defs><linearGradient id="a" x1="8.247" x2="1.002" y1="1.626" y2="23.03" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0669bc"/></linearGradient><linearGradient id="b" x1="14.042" x2="12.324" y1="15.302" y2="15.888" gradientUnits="userSpaceOnUse"><stop stop-opacity=".3"/><stop offset=".071" stop-opacity=".2"/><stop offset=".321" stop-opacity=".1"/><stop offset=".623" stop-opacity=".05"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" x1="12.841" x2="20.793" y1="1.626" y2="22.814" gradientUnits="userSpaceOnUse"><stop stop-color="#3ccbf4"/><stop offset="1" stop-color="#2892df"/></linearGradient></defs></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>CrewAI</title><path fill="#461816" d="M19.41 10.783a2.75 2.75 0 0 1 2.471 1.355c.483.806.622 1.772.385 2.68l-.136.522a10 10 0 0 1-3.156 5.058c-.605.517-1.283 1.062-2.083 1.524l-.028.017c-.402.232-.884.511-1.398.756-1.19.602-2.475.997-3.798 1.167-.854.111-1.716.155-2.577.132h-.018a8.6 8.6 0 0 1-5.046-1.87l-.012-.01-.012-.01A8.02 8.02 0 0 1 1.22 17.42a10.9 10.9 0 0 1-.102-3.779A15.6 15.6 0 0 1 2.88 8.4a21.8 21.8 0 0 1 2.432-3.678 15.4 15.4 0 0 1 3.56-3.182A10 10 0 0 1 12.44.104h.004l.003-.002c2.057-.384 3.743.374 5.024 1.26a8.3 8.3 0 0 1 2.395 2.513l.024.04.023.042a5.47 5.47 0 0 1 .508 4.012c-.239.97-.577 1.914-1.01 2.814z"/><path fill="#fff" d="M18.861 13.165a.748.748 0 0 1 1.256.031c.199.332.256.73.159 1.103l-.137.522a7.94 7.94 0 0 1-2.504 4.014c-.572.49-1.138.939-1.774 1.306-.427.247-.857.496-1.303.707a9.6 9.6 0 0 1-3.155.973 14.3 14.3 0 0 1-2.257.116 6.53 6.53 0 0 1-3.837-1.422 5.97 5.97 0 0 1-2.071-3.494 8.9 8.9 0 0 1-.085-3.08 13.6 13.6 0 0 1 1.54-4.568 19.7 19.7 0 0 1 2.212-3.348 13.4 13.4 0 0 1 3.088-2.76 7.9 7.9 0 0 1 2.832-1.14c1.307-.245 2.434.207 3.481.933a6.2 6.2 0 0 1 1.806 1.892c.423.767.536 1.668.314 2.515a12.4 12.4 0 0 1-.99 2.67l-.223.497q-.48 1.07-.97 2.137a.76.76 0 0 1-.97.467 3.39 3.39 0 0 1-2.283-2.49c-.095-.83.04-1.669.39-2.426.288-.746.61-1.477.933-2.208l.248-.563a.53.53 0 0 0-.204-.742 2.35 2.35 0 0 0-1.2.702 25 25 0 0 0-1.614 1.767 21.6 21.6 0 0 0-2.619 4.184 7.6 7.6 0 0 0-.816 2.753 7 7 0 0 0 .07 2.219 2.055 2.055 0 0 0 1.934 1.715c1.801.1 3.59-.363 5.116-1.328a19 19 0 0 0 1.675-1.294c.752-.71 1.376-1.519 1.958-2.36"/></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><g transform="translate(25 25)"><rect width="60" height="55" fill="#fcd34d" rx="6"/><path stroke="#d97706" stroke-linecap="round" stroke-linejoin="round" stroke-width="4" d="m10 40 15-15 15 8 15-23"/><rect width="35" height="55" x="65" fill="#6ee7b7" rx="6"/><rect width="20" height="6" x="73" y="10" fill="#059669" rx="3"/><rect width="15" height="6" x="73" y="22" fill="#059669" opacity=".7" rx="3"/><rect width="18" height="6" x="73" y="34" fill="#059669" opacity=".7" rx="3"/><rect width="100" height="40" y="60" fill="#a5b4fc" rx="6"/><rect width="80" height="8" x="10" y="70" fill="#4f46e5" rx="4"/><rect width="50" height="8" x="20" y="83" fill="#6366f1" rx="4"/></g></svg>

Before

Width:  |  Height:  |  Size: 826 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#b31aab" d="m33.172 61.48.176 7.325 7.797 4.785-.176-7.324Zm19.031 30.504-.176-7.18-6.84-4.206c-.085-.059-.203-.145-.289-.203l.176 7.207Zm-24.355 9.688L10.012 90.715l-.438-18.367 8.758-3.746-.172-7.352-13.969 5.969c-1.074.46-1.714 1.441-1.687 2.566l.523 22.055c.032 1.125.73 2.25 1.836 2.941l21.383 13.149c.992.605 2.215.78 3.203.46a.8.8 0 0 0 .29-.113l13.124-5.593-7.129-4.383Zm0 0"/><path fill="#d163ce" d="M85.488 61.047c-.031-1.328-.843-2.625-2.125-3.43L57.38 41.672l-.813.344.176 7.726 20.57 12.63.493 20.648 7.86 4.812.433-.172ZM54.383 97.289 30.262 82.47l-.582-24.883 11-4.7-.203-8.562-17.082 7.293c-1.25.547-2.008 1.672-1.977 3l.668 29.18c.031 1.324.844 2.625 2.125 3.402l28.281 17.387c1.164.723 2.563.894 3.754.547.117-.028.234-.086.348-.145l16.703-7.12-8.32-5.102Zm0 0"/><path fill="#e13eaf" d="M122.234 40.633 85.98 18.343c-1.335-.808-2.937-1.038-4.304-.605-.145.028-.262.086-.406.145l-35.383 15.11c-1.426.605-2.297 1.902-2.27 3.429l.903 37.371c.03 1.527.96 2.996 2.445 3.89l36.254 22.262c1.336.805 2.937 1.035 4.277.606.145-.031.262-.09.406-.145l35.383-15.11c1.426-.605 2.297-1.933 2.27-3.433l-.875-37.367c-.028-1.473-.961-2.969-2.446-3.863M85.398 91.64 53.891 72.293l-.79-32.496 30.727-13.121 31.512 19.347.785 32.497Zm0 0"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.0 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 5.5 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 19 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><g transform="translate(25 40)"><rect width="12" height="12" fill="#059669" opacity=".8" rx="3"/><rect width="80" height="12" x="20" fill="#10b981" rx="3"/><rect width="12" height="12" y="22" fill="#059669" opacity=".8" rx="3"/><rect width="65" height="12" x="20" y="22" fill="#10b981" rx="3"/><rect width="12" height="12" y="44" fill="#059669" opacity=".8" rx="3"/><rect width="75" height="12" x="20" y="44" fill="#10b981" rx="3"/><rect width="12" height="12" y="66" fill="#059669" opacity=".8" rx="3"/><rect width="50" height="12" x="20" y="66" fill="#10b981" rx="3"/></g></svg>

Before

Width:  |  Height:  |  Size: 726 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><defs><linearGradient id="a" x1="0" x2="0" y1="0" y2="1"><stop offset="0%" stop-color="#f59e0b" stop-opacity=".5"/><stop offset="100%" stop-color="#f59e0b" stop-opacity=".05"/></linearGradient></defs><g transform="translate(25 35)"><path stroke="#d1d5db" stroke-linecap="round" stroke-width="2" d="M0 80h100M0 80V0"/><path fill="url(#a)" d="M2 78c18 0 28-28 48-23s30-35 48-40v63z"/><path stroke="#d97706" stroke-linecap="round" stroke-width="5" d="M2 78c18 0 28-28 48-23s30-35 48-40"/><circle cx="50" cy="55" r="4" fill="#f59e0b"/><circle cx="98" cy="15" r="4" fill="#f59e0b"/></g></svg>

Before

Width:  |  Height:  |  Size: 733 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>PydanticAI</title><path fill="#e72564" d="M13.223 22.86c-.605.83-1.844.83-2.448 0L5.74 15.944a1.514 1.514 0 0 1 .73-2.322l5.035-1.738c.32-.11.668-.11.988 0l5.035 1.738c.962.332 1.329 1.5.73 2.322zm-1.224-1.259 4.688-6.439-4.688-1.618-4.688 1.618L12 21.602z"/><path fill="#e723a0" d="M23.71 13.463c.604.832.221 2.01-.756 2.328l-8.133 2.652a1.514 1.514 0 0 1-1.983-1.412l-.097-5.326c-.006-.338.101-.67.305-.94l3.209-4.25a1.514 1.514 0 0 1 2.434.022l5.022 6.926zm-1.574.775L17.46 7.79l-2.988 3.958.09 4.959z"/><path fill="#e520e9" d="M18.016.591a1.514 1.514 0 0 1 1.98 1.44l.009 8.554a1.514 1.514 0 0 1-1.956 1.45l-5.095-1.554a1.5 1.5 0 0 1-.8-.58l-3.05-4.366a1.514 1.514 0 0 1 .774-2.308zm.25 1.738L10.69 4.783l2.841 4.065 4.744 1.446-.008-7.965z"/><path fill="#e520e9" d="M5.99.595a1.514 1.514 0 0 0-1.98 1.44L4 10.588a1.514 1.514 0 0 0 1.956 1.45l5.095-1.554c.323-.098.605-.303.799-.58l3.052-4.366a1.514 1.514 0 0 0-.775-2.308zm-.25 1.738 7.577 2.454-2.842 4.065-4.743 1.446.007-7.965z"/><path fill="#e723a0" d="M.29 13.461a1.514 1.514 0 0 0 .756 2.329l8.133 2.651a1.514 1.514 0 0 0 1.983-1.412l.097-5.325a1.5 1.5 0 0 0-.305-.94L7.745 6.513a1.514 1.514 0 0 0-2.434.023L.289 13.461zm1.574.776L6.54 7.788l2.988 3.959-.09 4.958z"/><path fill="#ff96d1" d="m16.942 17.751 1.316-1.806q.178-.248.245-.523l-2.63.858-1.627 2.235a1.5 1.5 0 0 0 .575-.072zm-4.196-5.78.033 1.842 1.742.602-.034-1.843-1.741-.6zm7.257-3.622-1.314-1.812a1.5 1.5 0 0 0-.419-.393l.003 2.767 1.624 2.24q.107-.261.108-.566zm-5.038 2.746-1.762-.537 1.11-1.471 1.762.537zm-2.961-1.41 1.056-1.51-1.056-1.51-1.056 1.51zM9.368 3.509c.145-.122.316-.219.51-.282l2.12-.686 2.13.69c.191.062.36.157.503.276l-2.634.853zm1.433 7.053L9.691 9.09l-1.762.537 1.11 1.47 1.762-.537zm-6.696.584L5.733 8.9l.003-2.763c-.16.1-.305.232-.425.398L4.003 8.339l-.002 2.25q.002.299.104.557m7.149.824-1.741.601-.034 1.843 1.742-.601zM9.75 18.513l-1.628-2.237-2.629-.857q.068.276.247.525l1.313 1.804 2.126.693c.192.062.385.085.571.072"/></svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><rect width="100" height="14" x="25" y="45" fill="#4f46e5" rx="4"/><rect width="60" height="14" x="40" y="68" fill="#6366f1" rx="4"/><rect width="20" height="14" x="105" y="91" fill="#818cf8" rx="4"/><path stroke="#c7d2fe" stroke-width="2" d="M35 59v16m5 0h-5M100 59v39m5 0h-5"/></svg>

Before

Width:  |  Height:  |  Size: 431 B

View File

@@ -1,50 +0,0 @@
/* eslint-disable @typescript-eslint/no-var-requires, import/no-dynamic-require, simple-import-sort/imports, simple-import-sort/exports */
const fs = require('fs');
const path = require('path');
// 1. Define paths
const packageJsonPath = path.resolve(__dirname, '../package.json');
const registryPath = path.resolve(
__dirname,
'../src/auto-import-registry.d.ts',
);
// 2. Read package.json
const packageJson = require(packageJsonPath);
// 3. Combine dependencies and devDependencies
const allDeps = {
...packageJson.dependencies,
...packageJson.devDependencies,
};
// 4. Filter for @signozhq packages
const signozPackages = Object.keys(allDeps).filter((dep) =>
dep.startsWith('@signozhq/'),
);
// 5. Generate file content
const fileContent = `// -------------------------------------------------------------------------
// AUTO-GENERATED FILE
// -------------------------------------------------------------------------
// This file is generated by scripts/update-registry.js automatically
// whenever you run 'yarn install' or 'npm install'.
//
// It forces VS Code to index these specific packages to fix auto-import
// performance issues in TypeScript 4.x.
//
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
// -------------------------------------------------------------------------
${signozPackages.map((pkg) => `import '${pkg}';`).join('\n')}
`;
// 6. Write the file
try {
fs.writeFileSync(registryPath, fileContent);
console.log(
`✅ Auto-import registry updated with ${signozPackages.length} @signozhq packages.`,
);
} catch (err) {
console.error('❌ Failed to update auto-import registry:', err);
}

View File

@@ -245,14 +245,6 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
history.replace(newLocation);
return;
}
// if the current route is public dashboard then don't redirect to login
const isPublicDashboard = currentRoute?.path === ROUTES.PUBLIC_DASHBOARD;
if (isPublicDashboard) {
return;
}
// if the current route
if (currentRoute) {
const { isPrivate, key } = currentRoute;

View File

@@ -4,15 +4,14 @@ import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import AppLoading from 'components/AppLoading/AppLoading';
import { CmdKPalette } from 'components/cmdKPalette/cmdKPalette';
import KBarCommandPalette from 'components/KBarCommandPalette/KBarCommandPalette';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import AppLayout from 'container/AppLayout';
import Hex from 'crypto-js/enc-hex';
import HmacSHA256 from 'crypto-js/hmac-sha256';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useThemeConfig } from 'hooks/useDarkMode';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
@@ -22,17 +21,19 @@ import { StatusCodes } from 'http-status-codes';
import history from 'lib/history';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import posthog from 'posthog-js';
import AlertRuleProvider from 'providers/Alert';
import { useAppContext } from 'providers/App/App';
import { IUser } from 'providers/App/types';
import { CmdKProvider } from 'providers/cmdKProvider';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import { KBarCommandPaletteProvider } from 'providers/KBarCommandPaletteProvider';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
import { CompatRouter } from 'react-router-dom-v5-compat';
import { LicenseStatus } from 'types/api/licensesV3/getActive';
import { Userpilot } from 'userpilot';
import { extractDomain } from 'utils/app';
import { Home } from './pageComponents';
@@ -83,9 +84,9 @@ function App(): JSX.Element {
email,
name: displayName,
company_name: orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
role,
@@ -93,9 +94,9 @@ function App(): JSX.Element {
const groupTraits = {
name: orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
};
@@ -110,23 +111,37 @@ function App(): JSX.Element {
if (window && window.Appcues) {
window.Appcues.identify(id, {
name: displayName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
companyName: orgName,
email,
paidUser: !!trialInfo?.trialConvertedToSubscription,
});
}
Userpilot.identify(email, {
email,
name: displayName,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.identify(id, {
email,
name: displayName,
orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
@@ -134,9 +149,9 @@ function App(): JSX.Element {
posthog?.group('company', orgId, {
name: orgName,
deployment_name: hostNameParts[0],
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
deployment_url: hostname,
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
@@ -213,10 +228,7 @@ function App(): JSX.Element {
]);
useEffect(() => {
if (
pathname === ROUTES.ONBOARDING ||
pathname.startsWith('/public/dashboard/')
) {
if (pathname === ROUTES.ONBOARDING) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
window.Pylon('hideChatBubble');
@@ -258,20 +270,11 @@ function App(): JSX.Element {
!showAddCreditCardModal &&
(isCloudUser || isEnterpriseSelfHostedUser)
) {
const email = user.email || '';
const secret = process.env.PYLON_IDENTITY_SECRET || '';
let emailHash = '';
if (email && secret) {
emailHash = HmacSHA256(email, Hex.parse(secret)).toString(Hex);
}
window.pylon = {
chat_settings: {
app_id: process.env.PYLON_APP_ID,
email: user.email,
name: user.displayName || user.email,
email_hash: emailHash,
name: user.displayName,
},
};
}
@@ -305,6 +308,10 @@ function App(): JSX.Element {
});
}
if (process.env.USERPILOT_KEY) {
Userpilot.initialize(process.env.USERPILOT_KEY);
}
if (!isSentryInitialized) {
Sentry.init({
dsn: process.env.SENTRY_DSN,
@@ -364,33 +371,36 @@ function App(): JSX.Element {
<ConfigProvider theme={themeConfig}>
<Router history={history}>
<CompatRouter>
<CmdKProvider>
<KBarCommandPaletteProvider>
<UserpilotRouteTracker />
<KBarCommandPalette />
<NotificationProvider>
<ErrorModalProvider>
{isLoggedInState && <CmdKPalette userRole={user.role} />}
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AppLayout>
<PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</PreferenceContextProvider>
</AppLayout>
<AlertRuleProvider>
<AppLayout>
<PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</PreferenceContextProvider>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
@@ -398,7 +408,7 @@ function App(): JSX.Element {
</PrivateRoute>
</ErrorModalProvider>
</NotificationProvider>
</CmdKProvider>
</KBarCommandPaletteProvider>
</CompatRouter>
</Router>
</ConfigProvider>

View File

@@ -295,10 +295,3 @@ export const MetricsExplorer = Loadable(
export const ApiMonitoring = Loadable(
() => import(/* webpackChunkName: "ApiMonitoring" */ 'pages/ApiMonitoring'),
);
export const PublicDashboardPage = Loadable(
() =>
import(
/* webpackChunkName: "Public Dashboard Page" */ 'pages/PublicDashboard'
),
);

View File

@@ -34,7 +34,6 @@ import {
OrgOnboarding,
PasswordReset,
PipelinePage,
PublicDashboardPage,
ServiceMapPage,
ServiceMetricsPage,
ServicesTablePage,
@@ -170,13 +169,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'DASHBOARD',
},
{
path: ROUTES.PUBLIC_DASHBOARD,
exact: false,
component: PublicDashboardPage,
isPrivate: false,
key: 'PUBLIC_DASHBOARD',
},
{
path: ROUTES.DASHBOARD_WIDGET,
exact: true,

View File

@@ -1,8 +1,6 @@
import { LogEventAxiosInstance as axios } from 'api';
import getLocalStorageApi from 'api/browser/localstorage/get';
import { ApiBaseInstance as axios } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { EventSuccessPayloadProps } from 'types/api/events/types';
@@ -13,14 +11,9 @@ const logEvent = async (
rateLimited?: boolean,
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
try {
// add deployment_url and user_email to attributes
// add tenant_url to attributes
const { hostname } = window.location;
const userEmail = getLocalStorageApi(LOCALSTORAGE.LOGGED_IN_USER_EMAIL);
const updatedAttributes = {
...attributes,
deployment_url: hostname,
user_email: userEmail,
};
const updatedAttributes = { ...attributes, tenant_url: hostname };
const response = await axios.post('/event', {
eventName,
attributes: updatedAttributes,

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { CreatePublicDashboardProps } from 'types/api/dashboard/public/create';
const createPublicDashboard = async (
props: CreatePublicDashboardProps,
): Promise<SuccessResponseV2<CreatePublicDashboardProps>> => {
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = '30m' } = props;
try {
const response = await axios.post(
`/dashboards/${dashboardId}/public`,
{ timeRangeEnabled, defaultTimeRange },
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default createPublicDashboard;

View File

@@ -1,20 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { GetPublicDashboardDataProps, PayloadProps,PublicDashboardDataProps } from 'types/api/dashboard/public/get';
const getPublicDashboardData = async (props: GetPublicDashboardDataProps): Promise<SuccessResponseV2<PublicDashboardDataProps>> => {
try {
const response = await axios.get<PayloadProps>(`/public/dashboards/${props.id}`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getPublicDashboardData;

View File

@@ -1,20 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { GetPublicDashboardMetaProps, PayloadProps,PublicDashboardMetaProps } from 'types/api/dashboard/public/getMeta';
const getPublicDashboardMeta = async (props: GetPublicDashboardMetaProps): Promise<SuccessResponseV2<PublicDashboardMetaProps>> => {
try {
const response = await axios.get<PayloadProps>(`/dashboards/${props.id}/public`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getPublicDashboardMeta;

View File

@@ -1,27 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { MetricRangePayloadV5 } from 'api/v5/v5';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { GetPublicDashboardWidgetDataProps } from 'types/api/dashboard/public/getWidgetData';
const getPublicDashboardWidgetData = async (props: GetPublicDashboardWidgetDataProps): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
try {
const response = await axios.get(`/public/dashboards/${props.id}/widgets/${props.index}/query_range`, {
params: {
startTime: props.startTime,
endTime: props.endTime,
},
});
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getPublicDashboardWidgetData;

View File

@@ -1,22 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps,RevokePublicDashboardAccessProps } from 'types/api/dashboard/public/delete';
const revokePublicDashboardAccess = async (
props: RevokePublicDashboardAccessProps,
): Promise<SuccessResponseV2<PayloadProps>> => {
try {
const response = await axios.delete<PayloadProps>(`/dashboards/${props.id}/public`);
return {
httpStatusCode: response.status,
data: response.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default revokePublicDashboardAccess;

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { UpdatePublicDashboardProps } from 'types/api/dashboard/public/update';
const updatePublicDashboard = async (
props: UpdatePublicDashboardProps,
): Promise<SuccessResponseV2<UpdatePublicDashboardProps>> => {
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = '30m' } = props;
try {
const response = await axios.put(
`/dashboards/${dashboardId}/public`,
{ timeRangeEnabled, defaultTimeRange },
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default updatePublicDashboard;

View File

@@ -1,11 +1,13 @@
/* eslint-disable sonarjs/no-duplicate-string */
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { getFieldKeys } from '../getFieldKeys';
// Mock the API instance
jest.mock('api', () => ({
get: jest.fn(),
ApiBaseInstance: {
get: jest.fn(),
},
}));
describe('getFieldKeys API', () => {
@@ -29,33 +31,33 @@ describe('getFieldKeys API', () => {
it('should call API with correct parameters when no args provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call function with no parameters
await getFieldKeys();
// Verify API was called correctly with empty params object
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: {},
});
});
it('should call API with signal parameter when provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call function with signal parameter
await getFieldKeys('traces');
// Verify API was called with signal parameter
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { signal: 'traces' },
});
});
it('should call API with name parameter when provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -70,14 +72,14 @@ describe('getFieldKeys API', () => {
await getFieldKeys(undefined, 'service');
// Verify API was called with name parameter
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { name: 'service' },
});
});
it('should call API with both signal and name when provided', async () => {
// Mock successful API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -92,14 +94,14 @@ describe('getFieldKeys API', () => {
await getFieldKeys('logs', 'service');
// Verify API was called with both parameters
expect(axios.get).toHaveBeenCalledWith('/fields/keys', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/keys', {
params: { signal: 'logs', name: 'service' },
});
});
it('should return properly formatted response', async () => {
// Mock API to return our response
(axios.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockSuccessResponse);
// Call the function
const result = await getFieldKeys('traces');

View File

@@ -1,11 +1,13 @@
/* eslint-disable sonarjs/no-duplicate-string */
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { getFieldValues } from '../getFieldValues';
// Mock the API instance
jest.mock('api', () => ({
get: jest.fn(),
ApiBaseInstance: {
get: jest.fn(),
},
}));
describe('getFieldValues API', () => {
@@ -15,7 +17,7 @@ describe('getFieldValues API', () => {
it('should call the API with correct parameters (no options)', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -32,14 +34,14 @@ describe('getFieldValues API', () => {
await getFieldValues();
// Verify API was called correctly with empty params
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: {},
});
});
it('should call the API with signal parameter', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -56,14 +58,14 @@ describe('getFieldValues API', () => {
await getFieldValues('traces');
// Verify API was called with signal parameter
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { signal: 'traces' },
});
});
it('should call the API with name parameter', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -80,14 +82,14 @@ describe('getFieldValues API', () => {
await getFieldValues(undefined, 'service.name');
// Verify API was called with name parameter
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { name: 'service.name' },
});
});
it('should call the API with value parameter', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -104,14 +106,14 @@ describe('getFieldValues API', () => {
await getFieldValues(undefined, 'service.name', 'front');
// Verify API was called with value parameter
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: { name: 'service.name', searchText: 'front' },
});
});
it('should call the API with time range parameters', async () => {
// Mock API response
(axios.get as jest.Mock).mockResolvedValueOnce({
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce({
status: 200,
data: {
status: 'success',
@@ -136,7 +138,7 @@ describe('getFieldValues API', () => {
);
// Verify API was called with time range parameters (converted to milliseconds)
expect(axios.get).toHaveBeenCalledWith('/fields/values', {
expect(ApiBaseInstance.get).toHaveBeenCalledWith('/fields/values', {
params: {
signal: 'logs',
name: 'service.name',
@@ -163,7 +165,7 @@ describe('getFieldValues API', () => {
},
};
(axios.get as jest.Mock).mockResolvedValueOnce(mockResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockResponse);
// Call the function
const result = await getFieldValues('traces', 'mixed.values');
@@ -194,7 +196,7 @@ describe('getFieldValues API', () => {
};
// Mock API to return our response
(axios.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
(ApiBaseInstance.get as jest.Mock).mockResolvedValueOnce(mockApiResponse);
// Call the function
const result = await getFieldValues('traces', 'service.name');

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
@@ -24,7 +24,7 @@ export const getFieldKeys = async (
}
try {
const response = await axios.get('/fields/keys', { params });
const response = await ApiBaseInstance.get('/fields/keys', { params });
return {
httpStatusCode: response.status,

View File

@@ -1,5 +1,5 @@
/* eslint-disable sonarjs/cognitive-complexity */
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
@@ -47,7 +47,7 @@ export const getFieldValues = async (
}
try {
const response = await axios.get('/fields/values', { params });
const response = await ApiBaseInstance.get('/fields/values', { params });
// Normalize values from different types (stringValues, boolValues, etc.)
if (response.data?.data?.values) {

View File

@@ -86,9 +86,8 @@ const interceptorRejected = async (
if (
response.status === 401 &&
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
response.config.url !== '/sessions/rotate' &&
response.config.url !== '/sessions/email_password' &&
!(
response.config.url === '/sessions' && response.config.method === 'delete'
)
@@ -200,15 +199,15 @@ ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
//
// axios Base
export const LogEventAxiosInstance = axios.create({
export const ApiBaseInstance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
});
LogEventAxiosInstance.interceptors.response.use(
ApiBaseInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejectedBase,
);
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError, AxiosResponse } from 'axios';
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
@@ -17,7 +17,7 @@ export const getHostAttributeKeys = async (
try {
const response: AxiosResponse<{
data: IQueryAutocompleteResponse;
}> = await axios.get(
}> = await ApiBaseInstance.get(
`/${entity}/attribute_keys?dataSource=metrics&searchText=${searchText}`,
{
params: {

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { SOMETHING_WENT_WRONG } from 'constants/api';
@@ -20,7 +20,7 @@ const getOnboardingStatus = async (props: {
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
const { endpointService, ...rest } = props;
try {
const response = await axios.post(
const response = await ApiBaseInstance.post(
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
rest,
);

View File

@@ -1,20 +1,13 @@
import { ApiV2Instance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import axios from 'api';
import { PayloadProps, Props } from 'types/api/metrics/getService';
const getService = async (props: Props): Promise<PayloadProps> => {
try {
const response = await ApiV2Instance.post(`/services`, {
start: `${props.start}`,
end: `${props.end}`,
tags: props.selectedTags,
});
return response.data.data;
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
const response = await axios.post(`/services`, {
start: `${props.start}`,
end: `${props.end}`,
tags: props.selectedTags,
});
return response.data;
};
export default getService;

View File

@@ -1,27 +1,22 @@
import { ApiV2Instance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import axios from 'api';
import { PayloadProps, Props } from 'types/api/metrics/getTopOperations';
const getTopOperations = async (props: Props): Promise<PayloadProps> => {
try {
const endpoint = props.isEntryPoint
? '/service/entry_point_operations'
: '/service/top_operations';
const endpoint = props.isEntryPoint
? '/service/entry_point_operations'
: '/service/top_operations';
const response = await ApiV2Instance.post(endpoint, {
start: `${props.start}`,
end: `${props.end}`,
service: props.service,
tags: props.selectedTags,
limit: 5000,
});
const response = await axios.post(endpoint, {
start: `${props.start}`,
end: `${props.end}`,
service: props.service,
tags: props.selectedTags,
});
if (props.isEntryPoint) {
return response.data.data;
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
return response.data;
};
export default getTopOperations;

View File

@@ -9,7 +9,6 @@ export interface UpdateMetricMetadataProps {
metricType: MetricType;
temporality?: Temporality;
isMonotonic?: boolean;
unit?: string;
}
export interface UpdateMetricMetadataResponse {

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
@@ -9,7 +9,7 @@ const getCustomFilters = async (
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const { signal } = props;
try {
const response = await axios.get(`/orgs/me/filters/${signal}`);
const response = await ApiBaseInstance.get(`orgs/me/filters/${signal}`);
return {
statusCode: 200,

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { AxiosError } from 'axios';
import { SuccessResponse } from 'types/api';
import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFilters';
@@ -6,7 +6,7 @@ import { UpdateCustomFiltersProps } from 'types/api/quickFilters/updateCustomFil
const updateCustomFiltersAPI = async (
props: UpdateCustomFiltersProps,
): Promise<SuccessResponse<void> | AxiosError> =>
axios.put(`/orgs/me/filters`, {
ApiBaseInstance.put(`orgs/me/filters`, {
...props.data,
});

View File

@@ -8,7 +8,7 @@ const setRetentionV2 = async ({
type,
defaultTTLDays,
coldStorageVolume,
coldStorageDurationDays,
coldStorageDuration,
ttlConditions,
}: PropsV2): Promise<SuccessResponseV2<PayloadPropsV2>> => {
try {
@@ -16,7 +16,7 @@ const setRetentionV2 = async ({
type,
defaultTTLDays,
coldStorageVolume,
coldStorageDurationDays,
coldStorageDuration,
ttlConditions,
});

View File

@@ -1,4 +1,4 @@
import axios from 'api';
import { ApiBaseInstance } from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
@@ -9,12 +9,15 @@ const listOverview = async (
): Promise<SuccessResponseV2<PayloadProps>> => {
const { start, end, show_ip: showIp, filter } = props;
try {
const response = await axios.post(`/third-party-apis/overview/list`, {
start,
end,
show_ip: showIp,
filter,
});
const response = await ApiBaseInstance.post(
`/third-party-apis/overview/list`,
{
start,
end,
show_ip: showIp,
filter,
},
);
return {
httpStatusCode: response.status,

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import {
GetSpanPercentilesProps,
GetSpanPercentilesResponseDataProps,
} from 'types/api/trace/getSpanPercentiles';
const getSpanPercentiles = async (
props: GetSpanPercentilesProps,
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
try {
const response = await axios.post('/span_percentile', {
...props,
});
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
throw error;
}
};
export default getSpanPercentiles;

View File

@@ -11,7 +11,7 @@ import {
export const getQueryRangeV5 = async (
props: QueryRangePayloadV5,
version: string,
signal?: AbortSignal,
signal: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
try {

View File

@@ -1,30 +1,30 @@
interface ConfigureIconProps {
width?: number;
height?: number;
color?: string;
fill?: string;
}
function ConfigureIcon({
width,
height,
color,
fill,
}: ConfigureIconProps): JSX.Element {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={width}
height={height}
fill="none"
fill={fill}
>
<path
stroke={color}
stroke="#C0C1C3"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="1.333"
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
/>
<path
stroke={color}
stroke="#C0C1C3"
strokeLinecap="round"
strokeWidth="1.333"
d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3m5.417 7.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"
@@ -36,6 +36,6 @@ function ConfigureIcon({
ConfigureIcon.defaultProps = {
width: 16,
height: 16,
color: 'currentColor',
fill: 'none',
};
export default ConfigureIcon;

View File

@@ -1,25 +0,0 @@
// -------------------------------------------------------------------------
// AUTO-GENERATED FILE
// -------------------------------------------------------------------------
// This file is generated by scripts/update-registry.js automatically
// whenever you run 'yarn install' or 'npm install'.
//
// It forces VS Code to index these specific packages to fix auto-import
// performance issues in TypeScript 4.x.
//
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
// -------------------------------------------------------------------------
import '@signozhq/badge';
import '@signozhq/button';
import '@signozhq/calendar';
import '@signozhq/callout';
import '@signozhq/checkbox';
import '@signozhq/command';
import '@signozhq/design-tokens';
import '@signozhq/input';
import '@signozhq/popover';
import '@signozhq/resizable';
import '@signozhq/sonner';
import '@signozhq/table';
import '@signozhq/tooltip';

View File

@@ -62,8 +62,6 @@ interface CustomTimePickerProps {
showLiveLogs?: boolean;
onGoLive?: () => void;
onExitLiveLogs?: () => void;
/** When false, hides the "Recently Used" time ranges section */
showRecentlyUsed?: boolean;
}
function CustomTimePicker({
@@ -83,7 +81,6 @@ function CustomTimePicker({
onGoLive,
onExitLiveLogs,
showLiveLogs,
showRecentlyUsed = true,
}: CustomTimePickerProps): JSX.Element {
const [
selectedTimePlaceholderValue,
@@ -398,7 +395,6 @@ function CustomTimePicker({
setActiveView={setActiveView}
setIsOpenedFromFooter={setIsOpenedFromFooter}
isOpenedFromFooter={isOpenedFromFooter}
showRecentlyUsed={showRecentlyUsed}
/>
) : (
content
@@ -468,5 +464,4 @@ CustomTimePicker.defaultProps = {
onCustomTimeStatusUpdate: noop,
onExitLiveLogs: noop,
showLiveLogs: false,
showRecentlyUsed: true,
};

View File

@@ -47,7 +47,6 @@ interface CustomTimePickerPopoverContentProps {
isOpenedFromFooter: boolean;
setIsOpenedFromFooter: Dispatch<SetStateAction<boolean>>;
onExitLiveLogs: () => void;
showRecentlyUsed: boolean;
}
interface RecentlyUsedDateTimeRange {
@@ -73,7 +72,6 @@ function CustomTimePickerPopoverContent({
isOpenedFromFooter,
setIsOpenedFromFooter,
onExitLiveLogs,
showRecentlyUsed = true,
}: CustomTimePickerPopoverContentProps): JSX.Element {
const { pathname } = useLocation();
@@ -226,35 +224,33 @@ function CustomTimePickerPopoverContent({
<div>{getTimeChips(RelativeDurationSuggestionOptions)}</div>
</div>
{showRecentlyUsed && (
<div className="recently-used-container">
<div className="time-heading">RECENTLY USED</div>
<div className="recently-used-range">
{recentlyUsedTimeRanges.map((range: RecentlyUsedDateTimeRange) => (
<div
className="recently-used-range-item"
role="button"
tabIndex={0}
onKeyDown={(e): void => {
if (e.key === 'Enter' || e.key === ' ') {
handleExitLiveLogs();
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
setIsOpen(false);
}
}}
key={range.value}
onClick={(): void => {
<div className="recently-used-container">
<div className="time-heading">RECENTLY USED</div>
<div className="recently-used-range">
{recentlyUsedTimeRanges.map((range: RecentlyUsedDateTimeRange) => (
<div
className="recently-used-range-item"
role="button"
tabIndex={0}
onKeyDown={(e): void => {
if (e.key === 'Enter' || e.key === ' ') {
handleExitLiveLogs();
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
setIsOpen(false);
}}
>
{range.label}
</div>
))}
</div>
}
}}
key={range.value}
onClick={(): void => {
handleExitLiveLogs();
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
setIsOpen(false);
}}
>
{range.label}
</div>
))}
</div>
)}
</div>
</div>
)}
</div>

View File

@@ -1,372 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { PrecisionOptionsEnum } from '../types';
import { getYAxisFormattedValue } from '../yAxisConfig';
const testFullPrecisionGetYAxisFormattedValue = (
value: string,
format: string,
): string => getYAxisFormattedValue(value, format, PrecisionOptionsEnum.FULL);
describe('getYAxisFormattedValue - none (full precision legacy assertions)', () => {
test('large integers and decimals', () => {
expect(testFullPrecisionGetYAxisFormattedValue('250034', 'none')).toBe(
'250034',
);
expect(
testFullPrecisionGetYAxisFormattedValue('250034897.12345', 'none'),
).toBe('250034897.12345');
expect(
testFullPrecisionGetYAxisFormattedValue('250034897.02354', 'none'),
).toBe('250034897.02354');
expect(testFullPrecisionGetYAxisFormattedValue('9999999.9999', 'none')).toBe(
'9999999.9999',
);
});
test('preserves leading zeros after decimal until first non-zero', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1.0000234', 'none')).toBe(
'1.0000234',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.00003', 'none')).toBe(
'0.00003',
);
});
test('trims to three significant decimals and removes trailing zeros', () => {
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'none'),
).toBe('0.000000250034');
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'none')).toBe(
'0.00000025',
);
// Big precision, limiting the javascript precision (~16 digits)
expect(
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'none'),
).toBe('1');
expect(
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'none'),
).toBe('1.005555555595958');
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
'0.000000001',
);
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250000', 'none'),
).toBe('0.00000025');
});
test('whole numbers normalize', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'none')).toBe('1000');
expect(testFullPrecisionGetYAxisFormattedValue('99.5458', 'none')).toBe(
'99.5458',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.234567', 'none')).toBe(
'1.234567',
);
expect(testFullPrecisionGetYAxisFormattedValue('99.998', 'none')).toBe(
'99.998',
);
});
test('strip redundant decimal zeros', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000.000', 'none')).toBe(
'1000',
);
expect(testFullPrecisionGetYAxisFormattedValue('99.500', 'none')).toBe(
'99.5',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.000', 'none')).toBe('1');
});
test('edge values', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'none')).toBe('0');
expect(testFullPrecisionGetYAxisFormattedValue('-0', 'none')).toBe('0');
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'none')).toBe('∞');
expect(testFullPrecisionGetYAxisFormattedValue('-Infinity', 'none')).toBe(
'-∞',
);
expect(testFullPrecisionGetYAxisFormattedValue('invalid', 'none')).toBe(
'NaN',
);
expect(testFullPrecisionGetYAxisFormattedValue('', 'none')).toBe('NaN');
expect(testFullPrecisionGetYAxisFormattedValue('abc123', 'none')).toBe('NaN');
});
test('small decimals keep precision as-is', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'none')).toBe(
'0.0001',
);
expect(testFullPrecisionGetYAxisFormattedValue('-0.0001', 'none')).toBe(
'-0.0001',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.000000001', 'none')).toBe(
'0.000000001',
);
});
test('simple decimals preserved', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.1', 'none')).toBe('0.1');
expect(testFullPrecisionGetYAxisFormattedValue('0.2', 'none')).toBe('0.2');
expect(testFullPrecisionGetYAxisFormattedValue('0.3', 'none')).toBe('0.3');
expect(testFullPrecisionGetYAxisFormattedValue('1.0000000001', 'none')).toBe(
'1.0000000001',
);
});
});
describe('getYAxisFormattedValue - units (full precision legacy assertions)', () => {
test('ms', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'ms')).toBe('1.5 s');
expect(testFullPrecisionGetYAxisFormattedValue('500', 'ms')).toBe('500 ms');
expect(testFullPrecisionGetYAxisFormattedValue('60000', 'ms')).toBe('1 min');
expect(testFullPrecisionGetYAxisFormattedValue('295.429', 'ms')).toBe(
'295.429 ms',
);
expect(testFullPrecisionGetYAxisFormattedValue('4353.81', 'ms')).toBe(
'4.35381 s',
);
});
test('s', () => {
expect(testFullPrecisionGetYAxisFormattedValue('90', 's')).toBe('1.5 mins');
expect(testFullPrecisionGetYAxisFormattedValue('30', 's')).toBe('30 s');
expect(testFullPrecisionGetYAxisFormattedValue('3600', 's')).toBe('1 hour');
});
test('m', () => {
expect(testFullPrecisionGetYAxisFormattedValue('90', 'm')).toBe('1.5 hours');
expect(testFullPrecisionGetYAxisFormattedValue('30', 'm')).toBe('30 min');
expect(testFullPrecisionGetYAxisFormattedValue('1440', 'm')).toBe('1 day');
});
test('bytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'bytes')).toBe(
'1 KiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'bytes')).toBe('512 B');
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'bytes')).toBe(
'1.5 KiB',
);
});
test('mbytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'mbytes')).toBe(
'1 GiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'mbytes')).toBe(
'512 MiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'mbytes')).toBe(
'1.5 GiB',
);
});
test('kbytes', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1024', 'kbytes')).toBe(
'1 MiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('512', 'kbytes')).toBe(
'512 KiB',
);
expect(testFullPrecisionGetYAxisFormattedValue('1536', 'kbytes')).toBe(
'1.5 MiB',
);
});
test('short', () => {
expect(testFullPrecisionGetYAxisFormattedValue('1000', 'short')).toBe('1 K');
expect(testFullPrecisionGetYAxisFormattedValue('1500', 'short')).toBe(
'1.5 K',
);
expect(testFullPrecisionGetYAxisFormattedValue('999', 'short')).toBe('999');
expect(testFullPrecisionGetYAxisFormattedValue('1000000', 'short')).toBe(
'1 Mil',
);
expect(testFullPrecisionGetYAxisFormattedValue('1555600', 'short')).toBe(
'1.5556 Mil',
);
expect(testFullPrecisionGetYAxisFormattedValue('999999', 'short')).toBe(
'999.999 K',
);
expect(testFullPrecisionGetYAxisFormattedValue('1000000000', 'short')).toBe(
'1 Bil',
);
expect(testFullPrecisionGetYAxisFormattedValue('1500000000', 'short')).toBe(
'1.5 Bil',
);
expect(testFullPrecisionGetYAxisFormattedValue('999999999', 'short')).toBe(
'999.999999 Mil',
);
});
test('percent', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.15', 'percent')).toBe(
'0.15%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.1234', 'percent')).toBe(
'0.1234%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.123499', 'percent')).toBe(
'0.123499%',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.5', 'percent')).toBe(
'1.5%',
);
expect(testFullPrecisionGetYAxisFormattedValue('0.0001', 'percent')).toBe(
'0.0001%',
);
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000001', 'percent'),
).toBe('1e-9%');
expect(
testFullPrecisionGetYAxisFormattedValue('0.000000250034', 'percent'),
).toBe('0.000000250034%');
expect(testFullPrecisionGetYAxisFormattedValue('0.00000025', 'percent')).toBe(
'0.00000025%',
);
// Big precision, limiting the javascript precision (~16 digits)
expect(
testFullPrecisionGetYAxisFormattedValue('1.0000000000000001', 'percent'),
).toBe('1%');
expect(
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'percent'),
).toBe('1.005555555595959%');
});
test('ratio', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0.5', 'ratio')).toBe(
'0.5 ratio',
);
expect(testFullPrecisionGetYAxisFormattedValue('1.25', 'ratio')).toBe(
'1.25 ratio',
);
expect(testFullPrecisionGetYAxisFormattedValue('2.0', 'ratio')).toBe(
'2 ratio',
);
});
test('temperature units', () => {
expect(testFullPrecisionGetYAxisFormattedValue('25', 'celsius')).toBe(
'25 °C',
);
expect(testFullPrecisionGetYAxisFormattedValue('0', 'celsius')).toBe('0 °C');
expect(testFullPrecisionGetYAxisFormattedValue('-10', 'celsius')).toBe(
'-10 °C',
);
expect(testFullPrecisionGetYAxisFormattedValue('77', 'fahrenheit')).toBe(
'77 °F',
);
expect(testFullPrecisionGetYAxisFormattedValue('32', 'fahrenheit')).toBe(
'32 °F',
);
expect(testFullPrecisionGetYAxisFormattedValue('14', 'fahrenheit')).toBe(
'14 °F',
);
});
test('ms edge cases', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'ms')).toBe('0 ms');
expect(testFullPrecisionGetYAxisFormattedValue('-1500', 'ms')).toBe('-1.5 s');
expect(testFullPrecisionGetYAxisFormattedValue('Infinity', 'ms')).toBe('∞');
});
test('bytes edge cases', () => {
expect(testFullPrecisionGetYAxisFormattedValue('0', 'bytes')).toBe('0 B');
expect(testFullPrecisionGetYAxisFormattedValue('-1024', 'bytes')).toBe(
'-1 KiB',
);
});
});
describe('getYAxisFormattedValue - precision option tests', () => {
test('precision 0 drops decimal part', () => {
expect(getYAxisFormattedValue('1.2345', 'none', 0)).toBe('1');
expect(getYAxisFormattedValue('0.9999', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('12345.6789', 'none', 0)).toBe('12345');
expect(getYAxisFormattedValue('0.0000123456', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('1000.000', 'none', 0)).toBe('1000');
expect(getYAxisFormattedValue('0.000000250034', 'none', 0)).toBe('0');
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 0)).toBe('1');
// with unit
expect(getYAxisFormattedValue('4353.81', 'ms', 0)).toBe('4 s');
});
test('precision 1,2,3,4 decimals', () => {
expect(getYAxisFormattedValue('1.2345', 'none', 1)).toBe('1.2');
expect(getYAxisFormattedValue('1.2345', 'none', 2)).toBe('1.23');
expect(getYAxisFormattedValue('1.2345', 'none', 3)).toBe('1.234');
expect(getYAxisFormattedValue('1.2345', 'none', 4)).toBe('1.2345');
expect(getYAxisFormattedValue('0.0000123456', 'none', 1)).toBe('0.00001');
expect(getYAxisFormattedValue('0.0000123456', 'none', 2)).toBe('0.000012');
expect(getYAxisFormattedValue('0.0000123456', 'none', 3)).toBe('0.0000123');
expect(getYAxisFormattedValue('0.0000123456', 'none', 4)).toBe('0.00001234');
expect(getYAxisFormattedValue('1000.000', 'none', 1)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 2)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 3)).toBe('1000');
expect(getYAxisFormattedValue('1000.000', 'none', 4)).toBe('1000');
expect(getYAxisFormattedValue('0.000000250034', 'none', 1)).toBe('0.0000002');
expect(getYAxisFormattedValue('0.000000250034', 'none', 2)).toBe(
'0.00000025',
); // leading zeros + 2 significant => same trimmed
expect(getYAxisFormattedValue('0.000000250034', 'none', 3)).toBe(
'0.00000025',
);
expect(getYAxisFormattedValue('0.000000250304', 'none', 4)).toBe(
'0.0000002503',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 1)).toBe(
'1.005',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 2)).toBe(
'1.0055',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 3)).toBe(
'1.00555',
);
expect(getYAxisFormattedValue('1.00555555559595876', 'none', 4)).toBe(
'1.005555',
);
// with unit
expect(getYAxisFormattedValue('4353.81', 'ms', 1)).toBe('4.4 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 2)).toBe('4.35 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 3)).toBe('4.354 s');
expect(getYAxisFormattedValue('4353.81', 'ms', 4)).toBe('4.3538 s');
// Percentages
expect(getYAxisFormattedValue('0.123456', 'percent', 2)).toBe('0.12%');
expect(getYAxisFormattedValue('0.123456', 'percent', 4)).toBe('0.1235%'); // approximation
});
test('precision full uses up to DEFAULT_SIGNIFICANT_DIGITS significant digits', () => {
expect(
getYAxisFormattedValue(
'0.00002625429914148441',
'none',
PrecisionOptionsEnum.FULL,
),
).toBe('0.000026254299141');
expect(
getYAxisFormattedValue(
'0.000026254299141484417',
's',
PrecisionOptionsEnum.FULL,
),
).toBe('26.254299141484417 µs');
expect(
getYAxisFormattedValue('4353.81', 'ms', PrecisionOptionsEnum.FULL),
).toBe('4.35381 s');
expect(getYAxisFormattedValue('500', 'ms', PrecisionOptionsEnum.FULL)).toBe(
'500 ms',
);
});
});

View File

@@ -78,18 +78,3 @@ export interface ITimeRange {
minTime: number | null;
maxTime: number | null;
}
export const DEFAULT_SIGNIFICANT_DIGITS = 15;
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
export const MAX_DECIMALS = 15;
export enum PrecisionOptionsEnum {
ZERO = 0,
ONE = 1,
TWO = 2,
THREE = 3,
FOUR = 4,
FULL = 'full',
}
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;

View File

@@ -16,12 +16,8 @@ import {
} from './Plugin/IntersectionCursor';
import {
CustomChartOptions,
DEFAULT_SIGNIFICANT_DIGITS,
GraphOnClickHandler,
IAxisTimeConfig,
MAX_DECIMALS,
PrecisionOption,
PrecisionOptionsEnum,
StaticLineProps,
} from './types';
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
@@ -153,7 +149,6 @@ export const getGraphOptions = (
scales: {
x: {
stacked: isStacked,
offset: false,
grid: {
display: true,
color: getGridColor(),
@@ -246,68 +241,3 @@ declare module 'chart.js' {
custom: TooltipPositionerFunction<ChartType>;
}
}
/**
* Formats a number for display, preserving leading zeros after the decimal point
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
* It avoids scientific notation and removes unnecessary trailing zeros.
*
* @example
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
* formatDecimalWithLeadingZeros(5.0); // "5"
*
* @param value The number to format.
* @returns The formatted string.
*/
export const formatDecimalWithLeadingZeros = (
value: number,
precision: PrecisionOption,
): string => {
if (value === 0) {
return '0';
}
// Use toLocaleString to get a full decimal representation without scientific notation.
const numStr = value.toLocaleString('en-US', {
useGrouping: false,
maximumFractionDigits: 20,
});
const [integerPart, decimalPart = ''] = numStr.split('.');
// If there's no decimal part, the integer part is the result.
if (!decimalPart) {
return integerPart;
}
// Find the index of the first non-zero digit in the decimal part.
const firstNonZeroIndex = decimalPart.search(/[^0]/);
// If the decimal part consists only of zeros, return just the integer part.
if (firstNonZeroIndex === -1) {
return integerPart;
}
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
const significantDigits =
precision === PrecisionOptionsEnum.FULL
? DEFAULT_SIGNIFICANT_DIGITS
: precision;
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
// If precision is 0, we drop the decimal part entirely.
if (precision === 0) {
return integerPart;
}
// Remove any trailing zeros from the result to keep it clean.
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
// Return the integer part, or the integer and decimal parts combined.
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
};

View File

@@ -1,97 +1,58 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { formattedValueToString, getValueFormat } from '@grafana/data';
import * as Sentry from '@sentry/react';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { isUniversalUnit } from 'components/YAxisUnitSelector/utils';
import { isNaN } from 'lodash-es';
import { formatUniversalUnit } from '../YAxisUnitSelector/formatter';
import {
DEFAULT_SIGNIFICANT_DIGITS,
PrecisionOption,
PrecisionOptionsEnum,
} from './types';
import { formatDecimalWithLeadingZeros } from './utils';
/**
* Formats a Y-axis value based on a given format string.
*
* @param value The string value from the axis.
* @param format The format identifier (e.g. 'none', 'ms', 'bytes', 'short').
* @returns A formatted string ready for display.
*/
export const getYAxisFormattedValue = (
value: string,
format: string,
precision: PrecisionOption = 2, // default precision requested
): string => {
const numValue = parseFloat(value);
// Handle non-numeric or special values first.
if (isNaN(numValue)) return 'NaN';
if (numValue === Infinity) return '∞';
if (numValue === -Infinity) return '-∞';
// For all other standard formats, delegate to grafana/data's built-in formatter.
const computeDecimals = (): number | undefined => {
if (precision === PrecisionOptionsEnum.FULL) {
return DEFAULT_SIGNIFICANT_DIGITS;
}
return precision;
};
const fallbackFormat = (): string => {
if (precision === PrecisionOptionsEnum.FULL) return numValue.toString();
if (precision === 0) return Math.round(numValue).toString();
return precision !== undefined
? numValue
.toFixed(precision)
.replace(/(\.[0-9]*[1-9])0+$/, '$1') // trimming zeros
.replace(/\.$/, '')
: numValue.toString();
};
let decimalPrecision: number | undefined;
const parsedValue = getValueFormat(format)(
parseFloat(value),
undefined,
undefined,
undefined,
);
try {
// Use custom formatter for the 'none' format honoring precision
if (format === 'none') {
return formatDecimalWithLeadingZeros(numValue, precision);
const decimalSplitted = parsedValue.text.split('.');
if (decimalSplitted.length === 1) {
decimalPrecision = 0;
} else {
const decimalDigits = decimalSplitted[1].split('');
decimalPrecision = decimalDigits.length;
let nonZeroCtr = 0;
for (let idx = 0; idx < decimalDigits.length; idx += 1) {
if (decimalDigits[idx] !== '0') {
nonZeroCtr += 1;
if (nonZeroCtr >= 2) {
decimalPrecision = idx + 1;
}
} else if (nonZeroCtr) {
decimalPrecision = idx;
break;
}
}
}
// Separate logic for universal units// Separate logic for universal units
if (format && isUniversalUnit(format)) {
const decimals = computeDecimals();
return formatUniversalUnit(
numValue,
format as UniversalYAxisUnit,
precision,
decimals,
);
}
const formatter = getValueFormat(format);
const formattedValue = formatter(numValue, computeDecimals(), undefined);
if (formattedValue.text && formattedValue.text.includes('.')) {
formattedValue.text = formatDecimalWithLeadingZeros(
parseFloat(formattedValue.text),
precision,
);
}
return formattedValueToString(formattedValue);
return formattedValueToString(
getValueFormat(format)(
parseFloat(value),
decimalPrecision,
undefined,
undefined,
),
);
} catch (error) {
Sentry.captureEvent({
message: `Error applying formatter: ${
error instanceof Error ? error.message : 'Unknown error'
}`,
level: 'error',
});
return fallbackFormat();
console.error(error);
}
return `${parseFloat(value)}`;
};
export const getToolTipValue = (
value: string | number,
format?: string,
precision?: PrecisionOption,
): string =>
getYAxisFormattedValue(value?.toString(), format || 'none', precision);
export const getToolTipValue = (value: string, format?: string): string => {
try {
return formattedValueToString(
getValueFormat(format)(parseFloat(value), undefined, undefined, undefined),
);
} catch (error) {
console.error(error);
}
return `${value}`;
};

View File

@@ -60,14 +60,6 @@ function Metrics({
setElement,
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
}>({
scrollTop: 0,
scrollLeft: 0,
});
const queryPayloads = useMemo(
() =>
getHostQueryPayload(
@@ -155,13 +147,6 @@ function Metrics({
maxTimeScale: graphTimeIntervals[idx].end,
onDragSelect: (start, end) => onDragSelect(start, end, idx),
query: currentQuery,
legendScrollPosition: legendScrollPositionRef.current,
setLegendScrollPosition: (position: {
scrollTop: number;
scrollLeft: number;
}) => {
legendScrollPositionRef.current = position;
},
}),
),
[

View File

@@ -37,6 +37,7 @@
border-radius: 2px 0px 0px 2px;
border: 1px solid var(--bg-slate-400);
background: var(--bg-ink-300);
border-right: none;
border-left: none;
@@ -44,12 +45,6 @@
border-bottom-right-radius: 0px;
border-top-left-radius: 0px;
border-bottom-left-radius: 0px;
font-size: 12px !important;
line-height: 27px;
&::placeholder {
color: var(--bg-vanilla-400) !important;
font-size: 12px !important;
}
}
.close-btn {

View File

@@ -0,0 +1,152 @@
.kbar-command-palette__positioner {
position: fixed;
inset: 0;
display: flex;
align-items: flex-start;
justify-content: center;
padding: 1rem;
background: rgba(0, 0, 0, 0.5);
backdrop-filter: blur(6px);
z-index: 50;
}
.kbar-command-palette__animator {
width: 100%;
max-width: 600px;
}
.kbar-command-palette__card {
background: var(--bg-ink-500);
color: var(--text-vanilla-100);
border-radius: 3px;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.15);
overflow: hidden;
display: flex;
flex-direction: column;
}
.kbar-command-palette__search {
padding: 12px 16px;
font-size: 13px;
border: none;
border-bottom: 1px solid var(--border-ink-200);
color: var(--text-vanilla-100);
outline: none;
background-color: var(--bg-ink-500);
}
.kbar-command-palette__section {
padding: 8px 16px 4px;
font-size: 12px;
font-weight: 600;
color: var(--text-robin-500);
font-family: 'Inter', sans-serif;
text-transform: uppercase;
letter-spacing: 0.02em;
}
.kbar-command-palette__item {
display: flex;
align-items: center;
gap: 8px;
padding: 10px 16px;
font-size: 13px;
cursor: pointer;
transition: background 0.15s ease;
}
.kbar-command-palette__item:hover,
.kbar-command-palette__item--active {
background: var(--bg-ink-400);
}
.kbar-command-palette__icon {
flex-shrink: 0;
width: 18px;
height: 18px;
color: #444;
}
.kbar-command-palette__shortcut {
margin-left: auto;
display: flex;
gap: 4px;
}
.kbar-command-palette__key {
padding: 2px 6px;
font-size: 12px;
border-radius: 4px;
background: var(--bg-ink-300);
color: var(--text-vanilla-300);
text-transform: uppercase;
font-family: 'Space Mono', monospace;
}
.kbar-command-palette__results-container {
div {
&::-webkit-scrollbar {
width: 0.3rem;
height: 0.3rem;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-300);
}
&::-webkit-scrollbar-thumb:hover {
background: var(--bg-slate-200);
}
}
}
.lightMode {
.kbar-command-palette__positioner {
background: rgba(0, 0, 0, 0.5);
}
.kbar-command-palette__card {
background: #fff;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.15);
}
.kbar-command-palette__search {
border-bottom: 1px solid #e5e5e5;
color: var(--text-ink-500);
background-color: var(--bg-vanilla-100);
}
.kbar-command-palette__item {
color: var(--text-ink-500);
}
.kbar-command-palette__item:hover,
.kbar-command-palette__item--active {
background: #f5f5f5;
}
.kbar-command-palette__icon {
color: #444;
}
.kbar-command-palette__key {
background: #eee;
color: #555;
}
.kbar-command-palette__results-container {
div {
&::-webkit-scrollbar-thumb {
background: var(--bg-vanilla-300);
}
&::-webkit-scrollbar-thumb:hover {
background: var(--bg-vanilla-300);
}
}
}
}

View File

@@ -0,0 +1,69 @@
import './KBarCommandPalette.scss';
import {
KBarAnimator,
KBarPortal,
KBarPositioner,
KBarResults,
KBarSearch,
useMatches,
} from 'kbar';
function Results(): JSX.Element {
const { results } = useMatches();
const renderResults = ({
item,
active,
}: {
item: any;
active: boolean;
}): JSX.Element =>
typeof item === 'string' ? (
<div className="kbar-command-palette__section">{item}</div>
) : (
<div
className={`kbar-command-palette__item ${
active ? 'kbar-command-palette__item--active' : ''
}`}
>
{item.icon}
<span>{item.name}</span>
{item.shortcut?.length ? (
<span className="kbar-command-palette__shortcut">
{item.shortcut.map((sc: string) => (
<kbd key={sc} className="kbar-command-palette__key">
{sc}
</kbd>
))}
</span>
) : null}
</div>
);
return (
<div className="kbar-command-palette__results-container">
<KBarResults items={results} onRender={renderResults} />
</div>
);
}
function KBarCommandPalette(): JSX.Element {
return (
<KBarPortal>
<KBarPositioner className="kbar-command-palette__positioner">
<KBarAnimator className="kbar-command-palette__animator">
<div className="kbar-command-palette__card">
<KBarSearch
className="kbar-command-palette__search"
placeholder="Search or type a command..."
/>
<Results />
</div>
</KBarAnimator>
</KBarPositioner>
</KBarPortal>
);
}
export default KBarCommandPalette;

View File

@@ -132,9 +132,9 @@
justify-content: center;
}
.log-detail-drawer__actions {
.json-action-btn {
display: flex;
gap: 4px;
gap: 8px;
}
}

View File

@@ -319,35 +319,31 @@ function LogDetailInner({
</Radio.Button>
</Radio.Group>
<div className="log-detail-drawer__actions">
{selectedView === VIEW_TYPES.CONTEXT && (
<Tooltip
title="Show Filters"
placement="topLeft"
aria-label="Show Filters"
>
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
</Tooltip>
)}
<Tooltip
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
placement="topLeft"
aria-label={
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
}
>
{selectedView === VIEW_TYPES.JSON && (
<div className="json-action-btn">
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
onClick={handleJSONCopy}
/>
</Tooltip>
</div>
</div>
)}
{selectedView === VIEW_TYPES.CONTEXT && (
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
)}
<Tooltip title="Copy Log Link" placement="left" aria-label="Copy Log Link">
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={onLogCopy}
/>
</Tooltip>
</div>
{isFilterVisible && contextQuery?.builder.queryData[0] && (
<div className="log-detail-drawer-query-container">
@@ -387,8 +383,7 @@ function LogDetailInner({
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
timestamp={log.timestamp.toString()}
dataSource={DataSource.LOGS}
logLineTimestamp={log.timestamp.toString()}
/>
)}
</Drawer>

View File

@@ -6,7 +6,6 @@ import { useCopyToClipboard } from 'react-use';
function CopyClipboardHOC({
entityKey,
textToCopy,
tooltipText = 'Copy to clipboard',
children,
}: CopyClipboardHOCProps): JSX.Element {
const [value, setCopy] = useCopyToClipboard();
@@ -32,7 +31,7 @@ function CopyClipboardHOC({
<span onClick={onClick} role="presentation" tabIndex={-1}>
<Popover
placement="top"
content={<span style={{ fontSize: '0.9rem' }}>{tooltipText}</span>}
content={<span style={{ fontSize: '0.9rem' }}>Copy to clipboard</span>}
>
{children}
</Popover>
@@ -43,11 +42,7 @@ function CopyClipboardHOC({
interface CopyClipboardHOCProps {
entityKey: string | undefined;
textToCopy: string;
tooltipText?: string;
children: ReactNode;
}
export default CopyClipboardHOC;
CopyClipboardHOC.defaultProps = {
tooltipText: 'Copy to clipboard',
};

View File

@@ -57,8 +57,8 @@ export const RawLogViewContainer = styled(Row)<{
transition: background-color 2s ease-in;`
: ''}
${({ $isCustomHighlighted }): string =>
getCustomHighlightBackground($isCustomHighlighted)}
${({ $isCustomHighlighted, $isDarkMode, $logType }): string =>
getCustomHighlightBackground($isCustomHighlighted, $isDarkMode, $logType)}
`;
export const InfoIconWrapper = styled(Info)`

View File

@@ -153,9 +153,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
children: (
<TableBodyContent
dangerouslySetInnerHTML={{
__html: getSanitizedLogBody(field as string, {
shouldEscapeHtml: true,
}),
__html: getSanitizedLogBody(field as string),
}}
fontSize={fontSize}
linesPerRow={linesPerRow}

View File

@@ -471,13 +471,11 @@ function LogsFormatOptionsMenu({
rootClassName="format-options-popover"
destroyTooltipOnHide
>
<Tooltip title="Options">
<Button
className="periscope-btn ghost"
icon={<Sliders size={14} />}
data-testid="periscope-btn-format-options"
/>
</Tooltip>
<Button
className="periscope-btn ghost"
icon={<Sliders size={14} />}
data-testid="periscope-btn-format-options"
/>
</Popover>
);
}

View File

@@ -32,7 +32,6 @@ import { popupContainer } from 'utils/selectPopupContainer';
import { CustomMultiSelectProps, CustomTagProps, OptionData } from './types';
import {
ALL_SELECTED_VALUE,
filterOptionsBySearch,
handleScrollToBottom,
prioritizeOrAddOptionForMultiSelect,
@@ -44,6 +43,8 @@ enum ToggleTagValue {
All = 'All',
}
const ALL_SELECTED_VALUE = '__ALL__'; // Constant for the special value
const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
placeholder = 'Search...',
className,

View File

@@ -5,8 +5,6 @@ import { OptionData } from './types';
export const SPACEKEY = ' ';
export const ALL_SELECTED_VALUE = '__ALL__'; // Constant for the special value
export const prioritizeOrAddOptionForSingleSelect = (
options: OptionData[],
value: string,

View File

@@ -1,16 +0,0 @@
.overflow-input {
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
}
.overflow-input-mirror {
position: absolute;
visibility: hidden;
white-space: pre;
pointer-events: none;
font: inherit;
letter-spacing: inherit;
height: 0;
overflow: hidden;
}

View File

@@ -1,119 +0,0 @@
import { render, screen, userEvent, waitFor, within } from 'tests/test-utils';
import OverflowInputToolTip from './OverflowInputToolTip';
const TOOLTIP_INNER_SELECTOR = '.ant-tooltip-inner';
// Utility to mock overflow behaviour on inputs / elements.
// Stubs HTMLElement.prototype.clientWidth, scrollWidth and offsetWidth used by component.
function mockOverflow(clientWidth: number, scrollWidth: number): void {
Object.defineProperty(HTMLElement.prototype, 'clientWidth', {
configurable: true,
value: clientWidth,
});
Object.defineProperty(HTMLElement.prototype, 'scrollWidth', {
configurable: true,
value: scrollWidth,
});
// mirror.offsetWidth is used to compute mirrorWidth = offsetWidth + 24.
// Use clientWidth so the mirror measurement aligns with the mocked client width in tests.
Object.defineProperty(HTMLElement.prototype, 'offsetWidth', {
configurable: true,
value: clientWidth,
});
}
function queryTooltipInner(): HTMLElement | null {
// find element that has role="tooltip" (could be the inner itself)
const tooltip = document.querySelector<HTMLElement>('[role="tooltip"]');
if (!tooltip) return document.querySelector(TOOLTIP_INNER_SELECTOR);
// if the role element is already the inner, return it; otherwise return its descendant
if (tooltip.classList.contains('ant-tooltip-inner')) return tooltip;
return (
(tooltip.querySelector(TOOLTIP_INNER_SELECTOR) as HTMLElement) ??
document.querySelector(TOOLTIP_INNER_SELECTOR)
);
}
describe('OverflowInputToolTip', () => {
beforeEach(() => {
jest.restoreAllMocks();
});
test('shows tooltip when content overflows and input is clamped at maxAutoWidth', async () => {
mockOverflow(150, 250); // clientWidth >= maxAutoWidth (150), scrollWidth > clientWidth
render(<OverflowInputToolTip value="Very long overflowing text" />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).not.toBeNull();
});
const tooltipInner = queryTooltipInner();
if (!tooltipInner) throw new Error('Tooltip inner not found');
expect(
within(tooltipInner).getByText('Very long overflowing text'),
).toBeInTheDocument();
});
test('does NOT show tooltip when content does not overflow', async () => {
mockOverflow(150, 100); // content fits (scrollWidth <= clientWidth)
render(<OverflowInputToolTip value="Short text" />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).toBeNull();
});
});
test('does NOT show tooltip when content overflows but input is NOT at maxAutoWidth', async () => {
mockOverflow(100, 250); // clientWidth < maxAutoWidth (150), scrollWidth > clientWidth
render(<OverflowInputToolTip value="Long but input not clamped" />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).toBeNull();
});
});
test('uncontrolled input allows typing', async () => {
render(<OverflowInputToolTip defaultValue="Init" />);
const input = screen.getByRole('textbox') as HTMLInputElement;
await userEvent.type(input, 'ABC');
expect(input).toHaveValue('InitABC');
});
test('disabled input never shows tooltip even if overflowing', async () => {
mockOverflow(150, 300);
render(<OverflowInputToolTip value="Overflowing!" disabled />);
await userEvent.hover(screen.getByRole('textbox'));
await waitFor(() => {
expect(queryTooltipInner()).toBeNull();
});
});
test('renders mirror span and input correctly (structural assertions instead of snapshot)', () => {
const { container } = render(<OverflowInputToolTip value="Snapshot" />);
const mirror = container.querySelector('.overflow-input-mirror');
const input = container.querySelector('input') as HTMLInputElement | null;
expect(mirror).toBeTruthy();
expect(mirror?.textContent).toBe('Snapshot');
expect(input).toBeTruthy();
expect(input?.value).toBe('Snapshot');
// width should be set inline (component calculates width on mount)
expect(input?.getAttribute('style')).toContain('width:');
});
});

Some files were not shown because too many files have changed in this diff Show More