Compare commits

..

2 Commits

Author SHA1 Message Date
vikrantgupta25
b2af2a1cf4 chore(analytics): associate user events with license group 2025-06-11 23:14:56 +05:30
vikrantgupta25
d901b60aa4 chore(analytics): associate user events with license group 2025-06-11 23:09:09 +05:30
522 changed files with 11098 additions and 30986 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.128.0
image: signoz/signoz-schema-migrator:v0.111.42
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.128.0
image: signoz/signoz-schema-migrator:v0.111.42
container_name: schema-migrator-async
command:
- async

7
.github/CODEOWNERS vendored
View File

@@ -12,9 +12,4 @@
/pkg/factory/ @grandwizard28
/pkg/types/ @grandwizard28
.golangci.yml @grandwizard28
/pkg/zeus/ @vikrantgupta25
/pkg/licensing/ @vikrantgupta25
/pkg/sqlmigration/ @vikrantgupta25
/ee/zeus/ @vikrantgupta25
/ee/licensing/ @vikrantgupta25
/ee/sqlmigration/ @vikrantgupta25
**/(zeus|licensing|sqlmigration)/ @vikrantgupta25

View File

@@ -22,7 +22,7 @@ jobs:
- 24.1.2-alpine
- 24.12-alpine
schema-migrator-version:
- v0.128.0
- v0.111.38
postgres-version:
- 15
if: |

View File

@@ -7,7 +7,6 @@ linters:
- sloglint
- depguard
- iface
- unparam
linters-settings:
sloglint:

View File

@@ -90,15 +90,6 @@ apiserver:
- /api/v1/version
- /
##################### Querier #####################
querier:
# The TTL for cached query results.
cache_ttl: 168h
# The interval for recent data that should not be cached.
flux_interval: 5m
# The maximum number of concurrent queries for missing ranges.
max_concurrent_queries: 4
##################### TelemetryStore #####################
telemetrystore:
# Maximum number of idle connections in the connection pool.
@@ -112,15 +103,13 @@ telemetrystore:
clickhouse:
# The DSN to use for clickhouse.
dsn: tcp://localhost:9000
# The cluster name to use for clickhouse.
cluster: cluster
# The query settings for clickhouse.
settings:
max_execution_time: 0
max_execution_time_leaf: 0
timeout_before_checking_execution_speed: 0
max_bytes_to_read: 0
max_result_rows: 0
max_result_rows_for_ch_query: 0
##################### Prometheus #####################
prometheus:
@@ -235,12 +224,3 @@ statsreporter:
enabled: true
# The interval at which the stats are collected.
interval: 6h
collect:
# Whether to collect identities and traits (emails).
identities: true
##################### Gateway (License only) #####################
gateway:
# The URL of the gateway's api.
url: http://localhost:8080

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.89.0
image: signoz/signoz:v0.87.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -194,7 +194,6 @@ services:
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
- SIGNOZ_JWT_SECRET=secret
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -207,7 +206,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.128.0
image: signoz/signoz-otel-collector:v0.111.42
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -231,7 +230,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.128.0
image: signoz/signoz-schema-migrator:v0.111.42
deploy:
restart_policy:
condition: on-failure

View File

@@ -100,32 +100,26 @@ services:
# - "9000:9000"
# - "8123:8123"
# - "9181:9181"
configs:
- source: clickhouse-config
target: /etc/clickhouse-server/config.xml
- source: clickhouse-users
target: /etc/clickhouse-server/users.xml
- source: clickhouse-custom-function
target: /etc/clickhouse-server/custom-function.xml
- source: clickhouse-cluster
target: /etc/clickhouse-server/config.d/cluster.xml
volumes:
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
- clickhouse:/var/lib/clickhouse/
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.89.0
image: signoz/signoz:v0.87.0
command:
- --config=/root/config/prometheus.yml
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port
volumes:
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
- ../common/dashboards:/root/config/dashboards
- sqlite:/var/lib/signoz/
configs:
- source: signoz-prometheus-config
target: /root/config/prometheus.yml
environment:
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
@@ -135,7 +129,6 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -148,17 +141,15 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.128.0
image: signoz/signoz-otel-collector:v0.111.42
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
- --copy-path=/var/tmp/collector-config.yaml
- --feature-gates=-pkg.translator.prometheus.NormalizeName
configs:
- source: otel-collector-config
target: /etc/otel-collector-config.yaml
- source: otel-manager-config
target: /etc/manager-config.yaml
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
- LOW_CARDINAL_EXCEPTION_GROUPING=false
@@ -174,7 +165,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.128.0
image: signoz/signoz-schema-migrator:v0.111.42
deploy:
restart_policy:
condition: on-failure
@@ -195,24 +186,3 @@ volumes:
name: signoz-sqlite
zookeeper-1:
name: signoz-zookeeper-1
configs:
clickhouse-config:
file: ../common/clickhouse/config.xml
clickhouse-users:
file: ../common/clickhouse/users.xml
clickhouse-custom-function:
file: ../common/clickhouse/custom-function.xml
clickhouse-cluster:
file: ../common/clickhouse/cluster.xml
signoz-prometheus-config:
file: ../common/signoz/prometheus.yml
# If you have multiple dashboard files, you can list them individually:
# dashboard-foo:
# file: ../common/dashboards/foo.json
# dashboard-bar:
# file: ../common/dashboards/bar.json
otel-collector-config:
file: ./otel-collector-config.yaml
otel-manager-config:
file: ../common/signoz/otel-collector-opamp-config.yaml

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: signozclickhousemetrics
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -60,16 +60,27 @@ exporters:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
resource_to_telemetry_conversion:
enabled: true
disable_v2: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
@@ -81,11 +92,11 @@ service:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite, signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.89.0}
image: signoz/signoz:${VERSION:-v0.87.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -197,7 +197,6 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-standalone-amd
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -211,7 +210,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -237,7 +236,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-sync
command:
- sync
@@ -248,7 +247,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.89.0}
image: signoz/signoz:${VERSION:-v0.87.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -130,7 +130,6 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-standalone-amd
- DOT_METRICS_ENABLED=true
healthcheck:
test:
- CMD
@@ -143,7 +142,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -165,7 +164,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-sync
command:
- sync
@@ -177,7 +176,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
container_name: schema-migrator-async
command:
- async

View File

@@ -26,7 +26,7 @@ processors:
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: signozclickhousemetrics
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
@@ -60,16 +60,27 @@ exporters:
datasource: tcp://clickhouse:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
disable_v2: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
@@ -81,11 +92,11 @@ service:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite, signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]

View File

@@ -16,7 +16,7 @@ __Table of Contents__
- [Prerequisites](#prerequisites-1)
- [Install Helm Repo and Charts](#install-helm-repo-and-charts)
- [Start the OpenTelemetry Demo App](#start-the-opentelemetry-demo-app-1)
- [Monitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
- [Moniitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
- [What's next](#whats-next)

View File

@@ -6,13 +6,11 @@ import (
"time"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
@@ -25,17 +23,16 @@ type provider struct {
config licensing.Config
settings factory.ScopedProviderSettings
orgGetter organization.Getter
analytics analytics.Analytics
stopChan chan struct{}
}
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
return New(ctx, providerSettings, config, store, zeus, orgGetter, analytics)
return New(ctx, providerSettings, config, store, zeus, orgGetter)
})
}
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) (licensing.Licensing, error) {
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) (licensing.Licensing, error) {
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
licensestore := sqllicensingstore.New(sqlstore)
return &provider{
@@ -45,7 +42,6 @@ func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Conf
settings: settings,
orgGetter: orgGetter,
stopChan: make(chan struct{}),
analytics: analytics,
}, nil
}
@@ -163,25 +159,6 @@ func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUI
return err
}
stats := licensetypes.NewStatsFromLicense(activeLicense)
provider.analytics.Send(ctx,
analyticstypes.Track{
UserId: "stats_" + organizationID.String(),
Event: "License Updated",
Properties: analyticstypes.NewPropertiesFromMap(stats),
Context: &analyticstypes.Context{
Extra: map[string]interface{}{
analyticstypes.KeyGroupID: organizationID.String(),
},
},
},
analyticstypes.Group{
UserId: "stats_" + organizationID.String(),
GroupId: organizationID.String(),
Traits: analyticstypes.NewTraitsFromMap(stats),
},
)
return nil
}

View File

@@ -11,9 +11,11 @@ RUN apk update && \
COPY ./target/${OS}-${TARGETARCH}/signoz /root/signoz
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]

View File

@@ -12,9 +12,11 @@ RUN apk update && \
rm -rf /var/cache/apk/*
COPY ./target/${OS}-${ARCH}/signoz /root/signoz
COPY ./conf/prometheus.yml /root/config/prometheus.yml
COPY ./templates/email /root/templates
COPY frontend/build/ /etc/signoz/web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["./signoz"]
CMD ["-config", "/root/config/prometheus.yml"]

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/interfaces"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
@@ -16,7 +17,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -26,7 +26,8 @@ import (
)
type APIHandlerOptions struct {
DataConnector interfaces.Reader
DataConnector interfaces.DataConnector
PreferSpanMetrics bool
RulesManager *rules.Manager
UsageManager *usage.Manager
IntegrationsController *integrations.Controller
@@ -50,6 +51,7 @@ type APIHandler struct {
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
PreferSpanMetrics: opts.PreferSpanMetrics,
RuleManager: opts.RulesManager,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsController: opts.CloudIntegrationsController,
@@ -59,7 +61,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier),
QuerierAPI: querierAPI.NewAPI(signoz.Querier),
})
if err != nil {

View File

@@ -96,7 +96,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
return
}
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email)
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email, ah.opts.JWT)
if err != nil {
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)

View File

@@ -59,7 +59,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
}
}
if constants.IsPreferSpanMetrics {
if ah.opts.PreferSpanMetrics {
for idx, feature := range featureSet {
if feature.Name == licensetypes.UseSpanMetrics {
featureSet[idx].Active = true

View File

@@ -0,0 +1,39 @@
package db
import (
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/prometheus"
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
)
type ClickhouseReader struct {
conn clickhouse.Conn
appdb sqlstore.SQLStore
*basechr.ClickHouseReader
}
func NewDataConnector(
sqlDB sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
prometheus prometheus.Prometheus,
cluster string,
fluxIntervalForTraceDetail time.Duration,
cache cache.Cache,
) *ClickhouseReader {
chReader := basechr.NewReader(sqlDB, telemetryStore, prometheus, cluster, fluxIntervalForTraceDetail, cache)
return &ClickhouseReader{
conn: telemetryStore.ClickhouseDB(),
appdb: sqlDB,
ClickHouseReader: chReader,
}
}
func (r *ClickhouseReader) GetSQLStore() sqlstore.SQLStore {
return r.appdb
}

View File

@@ -6,10 +6,14 @@ import (
"net"
"net/http"
_ "net/http/pprof" // http profiler
"time"
"github.com/gorilla/handlers"
"github.com/jmoiron/sqlx"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/app/db"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
@@ -28,7 +32,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
@@ -38,6 +41,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
@@ -55,55 +59,62 @@ type ServerOptions struct {
Jwt *authtypes.JWT
}
// Server runs HTTP, Mux and a grpc server
// Server runs HTTP api service
type Server struct {
config signoz.Config
signoz *signoz.SigNoz
jwt *authtypes.JWT
ruleManager *baserules.Manager
serverOptions *ServerOptions
ruleManager *baserules.Manager
// public http router
httpConn net.Listener
httpServer *http.Server
httpHostPort string
httpConn net.Listener
httpServer *http.Server
// private http
privateConn net.Listener
privateHTTP *http.Server
privateHostPort string
opampServer *opamp.Server
privateConn net.Listener
privateHTTP *http.Server
// Usage manager
usageManager *usage.Manager
opampServer *opamp.Server
unavailableChannel chan healthcheck.Status
}
// HealthCheckStatus returns health check status channel a client can subscribe to
func (s Server) HealthCheckStatus() chan healthcheck.Status {
return s.unavailableChannel
}
// NewServer creates and initializes Server
func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
func NewServer(serverOptions *ServerOptions) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
return nil, err
}
reader := clickhouseReader.NewReader(
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.TelemetryStore.Cluster(),
config.Querier.FluxInterval,
signoz.Cache,
fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail)
if err != nil {
return nil, err
}
reader := db.NewDataConnector(
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.Cluster,
fluxIntervalForTraceDetail,
serverOptions.SigNoz.Cache,
)
rm, err := makeRulesManager(
serverOptions.SigNoz.SQLStore.SQLxDB(),
reader,
signoz.Cache,
signoz.Alertmanager,
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
signoz.Modules.OrgGetter,
serverOptions.SigNoz.Cache,
serverOptions.SigNoz.Alertmanager,
serverOptions.SigNoz.SQLStore,
serverOptions.SigNoz.TelemetryStore,
serverOptions.SigNoz.Prometheus,
serverOptions.SigNoz.Modules.OrgGetter,
)
if err != nil {
@@ -111,16 +122,19 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
}
// initiate opamp
opAmpModel.Init(signoz.SQLStore, signoz.Instrumentation.Logger(), signoz.Modules.OrgGetter)
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
if err != nil {
return nil, err
}
integrationsController, err := integrations.NewController(signoz.SQLStore)
integrationsController, err := integrations.NewController(serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create integrations controller: %w", err,
)
}
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
cloudIntegrationsController, err := cloudintegrations.NewController(serverOptions.SigNoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create cloud provider integrations controller: %w", err,
@@ -129,8 +143,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
signoz.SQLStore,
integrationsController.GetPipelinesForInstalledIntegrations,
serverOptions.SigNoz.SQLStore, integrationsController.GetPipelinesForInstalledIntegrations,
)
if err != nil {
return nil, err
@@ -138,7 +151,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
// initiate agent config handler
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
Store: signoz.SQLStore,
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
})
if err != nil {
@@ -146,7 +159,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
}
// start the usagemanager
usageManager, err := usage.New(signoz.Licensing, signoz.TelemetryStore.ClickhouseDB(), signoz.Zeus, signoz.Modules.OrgGetter)
usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.OrgGetter)
if err != nil {
return nil, err
}
@@ -155,36 +168,47 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
return nil, err
}
telemetry.GetInstance().SetReader(reader)
telemetry.GetInstance().SetSqlStore(serverOptions.SigNoz.SQLStore)
telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey)
telemetry.GetInstance().SetSavedViewsInfoCallback(telemetry.GetSavedViewsInfo)
telemetry.GetInstance().SetAlertsInfoCallback(telemetry.GetAlertsInfo)
telemetry.GetInstance().SetGetUsersCallback(telemetry.GetUsers)
telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount)
telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo)
fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval)
if err != nil {
return nil, err
}
apiOpts := api.APIHandlerOptions{
DataConnector: reader,
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
RulesManager: rm,
UsageManager: usageManager,
IntegrationsController: integrationsController,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
FluxInterval: fluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
JWT: jwt,
GatewayUrl: serverOptions.GatewayUrl,
JWT: serverOptions.Jwt,
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
apiHandler, err := api.NewAPIHandler(apiOpts, serverOptions.SigNoz)
if err != nil {
return nil, err
}
s := &Server{
config: config,
signoz: signoz,
jwt: jwt,
ruleManager: rm,
httpHostPort: baseconst.HTTPHostPort,
privateHostPort: baseconst.PrivateHostPort,
serverOptions: serverOptions,
unavailableChannel: make(chan healthcheck.Status),
usageManager: usageManager,
}
httpServer, err := s.createPublicServer(apiHandler, signoz.Web)
httpServer, err := s.createPublicServer(apiHandler, serverOptions.SigNoz.Web)
if err != nil {
return nil, err
@@ -200,28 +224,35 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
s.privateHTTP = privateServer
s.opampServer = opamp.InitializeServer(
&opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation,
&opAmpModel.AllAgents, agentConfMgr,
)
return s, nil
}
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
if err != nil {
return nil, err
}
for _, org := range orgs {
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
for _, er := range errorList {
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
}
}
// HealthCheckStatus returns health check status channel a client can subscribe to
func (s Server) HealthCheckStatus() chan healthcheck.Status {
return s.unavailableChannel
return s, nil
}
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterPrivateRoutes(r)
@@ -243,16 +274,17 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger())
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.serverOptions.SigNoz.Sharder, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.SigNoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(),
s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes,
s.serverOptions.Config.APIServer.Timeout.Default,
s.serverOptions.Config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewAnalytics().Wrap)
r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap)
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
@@ -293,7 +325,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
func (s *Server) initListeners() error {
// listen on public port
var err error
publicHostPort := s.httpHostPort
publicHostPort := s.serverOptions.HTTPHostPort
if publicHostPort == "" {
return fmt.Errorf("baseconst.HTTPHostPort is required")
}
@@ -303,10 +335,10 @@ func (s *Server) initListeners() error {
return err
}
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
// listen on private port to support internal services
privateHostPort := s.privateHostPort
privateHostPort := s.serverOptions.PrivateHostPort
if privateHostPort == "" {
return fmt.Errorf("baseconst.PrivateHostPort is required")
@@ -316,7 +348,7 @@ func (s *Server) initListeners() error {
if err != nil {
return err
}
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
return nil
}
@@ -336,7 +368,7 @@ func (s *Server) Start(ctx context.Context) error {
}
go func() {
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort))
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
@@ -362,7 +394,7 @@ func (s *Server) Start(ctx context.Context) error {
}
go func() {
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort))
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
switch err := s.privateHTTP.Serve(s.privateConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
@@ -414,6 +446,7 @@ func (s *Server) Stop(ctx context.Context) error {
}
func makeRulesManager(
db *sqlx.DB,
ch baseint.Reader,
cache cache.Cache,
alertmanager alertmanager.Alertmanager,
@@ -426,6 +459,7 @@ func makeRulesManager(
managerOpts := &baserules.ManagerOptions{
TelemetryStore: telemetryStore,
Prometheus: prometheus,
DBConn: db,
Context: context.Background(),
Logger: zap.L(),
Reader: ch,

View File

@@ -37,14 +37,9 @@ func GetDefaultSiteURL() string {
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
var IsDotMetricsEnabled = false
var IsPreferSpanMetrics = false
func init() {
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
IsDotMetricsEnabled = true
}
if GetOrDefaultEnv("USE_SPAN_METRICS", "false") == "true" {
IsPreferSpanMetrics = true
}
}

View File

@@ -0,0 +1,11 @@
package interfaces
import (
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
)
// Connector defines methods for interaction
// with o11y data. for example - clickhouse
type DataConnector interface {
baseint.Reader
}

View File

@@ -9,11 +9,9 @@ import (
"github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
@@ -22,7 +20,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -104,14 +101,10 @@ func main() {
fileprovider.NewFactory(),
},
}, signoz.DeprecatedFlags{
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
Config: promConfigPath,
FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster,
GatewayUrl: gatewayUrl,
MaxIdleConns: maxIdleConns,
MaxOpenConns: maxOpenConns,
DialTimeout: dialTimeout,
Config: promConfigPath,
})
if err != nil {
zap.L().Fatal("Failed to create config", zap.Error(err))
@@ -141,20 +134,12 @@ func main() {
zeus.Config(),
httpzeus.NewProviderFactory(),
licensing.Config(24*time.Hour, 3),
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter)
},
signoz.NewEmailingProviderFactories(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] {
existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore)
if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil {
zap.L().Fatal("Failed to add postgressqlschema factory", zap.Error(err))
}
return existingFactories
},
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
@@ -162,7 +147,20 @@ func main() {
zap.L().Fatal("Failed to create signoz", zap.Error(err))
}
server, err := app.NewServer(config, signoz, jwt)
serverOptions := &app.ServerOptions{
Config: config,
SigNoz: signoz,
HTTPHostPort: baseconst.HTTPHostPort,
PreferSpanMetrics: preferSpanMetrics,
PrivateHostPort: baseconst.PrivateHostPort,
FluxInterval: fluxInterval,
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
Cluster: cluster,
GatewayUrl: gatewayUrl,
Jwt: jwt,
}
server, err := app.NewServer(serverOptions)
if err != nil {
zap.L().Fatal("Failed to create server", zap.Error(err))
}

View File

@@ -1,36 +0,0 @@
package postgressqlschema
import (
"strings"
"github.com/SigNoz/signoz/pkg/sqlschema"
)
type Formatter struct {
sqlschema.Formatter
}
func (formatter Formatter) SQLDataTypeOf(dataType sqlschema.DataType) string {
if dataType == sqlschema.DataTypeTimestamp {
return "TIMESTAMPTZ"
}
return strings.ToUpper(dataType.String())
}
func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
switch strings.ToUpper(dataType) {
case "TIMESTAMPTZ", "TIMESTAMP", "TIMESTAMP WITHOUT TIME ZONE", "TIMESTAMP WITH TIME ZONE":
return sqlschema.DataTypeTimestamp
case "INT8":
return sqlschema.DataTypeBigInt
case "INT2", "INT4", "SMALLINT", "INTEGER":
return sqlschema.DataTypeInteger
case "BOOL", "BOOLEAN":
return sqlschema.DataTypeBoolean
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
return sqlschema.DataTypeText
}
return formatter.Formatter.DataTypeOf(dataType)
}

View File

@@ -1,285 +0,0 @@
package postgressqlschema
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type provider struct {
settings factory.ScopedProviderSettings
fmter sqlschema.SQLFormatter
sqlstore sqlstore.SQLStore
operator sqlschema.SQLOperator
}
func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config] {
return factory.NewProviderFactory(factory.MustNewName("postgres"), func(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config) (sqlschema.SQLSchema, error) {
return New(ctx, providerSettings, config, sqlstore)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config, sqlstore sqlstore.SQLStore) (sqlschema.SQLSchema, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/sqlschema/postgressqlschema")
fmter := Formatter{Formatter: sqlschema.NewFormatter(sqlstore.BunDB().Dialect())}
return &provider{
sqlstore: sqlstore,
fmter: fmter,
settings: settings,
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
DropConstraint: true,
ColumnIfNotExistsExists: true,
AlterColumnSetNotNull: true,
}),
}, nil
}
func (provider *provider) Formatter() sqlschema.SQLFormatter {
return provider.fmter
}
func (provider *provider) Operator() sqlschema.SQLOperator {
return provider.operator
}
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
rows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
c.column_name,
c.is_nullable = 'YES',
c.udt_name,
c.column_default
FROM
information_schema.columns AS c
WHERE
c.table_name = ?`, string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
columns := make([]*sqlschema.Column, 0)
for rows.Next() {
var (
name string
sqlDataType string
nullable bool
defaultVal *string
)
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
return nil, nil, err
}
columnDefault := ""
if defaultVal != nil {
columnDefault = *defaultVal
}
columns = append(columns, &sqlschema.Column{
Name: sqlschema.ColumnName(name),
Nullable: nullable,
DataType: provider.fmter.DataTypeOf(sqlDataType),
Default: columnDefault,
})
}
constraintsRows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
c.column_name,
constraint_name,
constraint_type
FROM
information_schema.table_constraints tc
JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_catalog, table_name, constraint_name)
JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name
WHERE
c.table_name = ?`, string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := constraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
var primaryKeyConstraint *sqlschema.PrimaryKeyConstraint
uniqueConstraintsMap := make(map[string]*sqlschema.UniqueConstraint)
for constraintsRows.Next() {
var (
name string
constraintName string
constraintType string
)
if err := constraintsRows.Scan(&name, &constraintName, &constraintType); err != nil {
return nil, nil, err
}
if constraintType == "PRIMARY KEY" {
if primaryKeyConstraint == nil {
primaryKeyConstraint = (&sqlschema.PrimaryKeyConstraint{
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
}).Named(constraintName).(*sqlschema.PrimaryKeyConstraint)
} else {
primaryKeyConstraint.ColumnNames = append(primaryKeyConstraint.ColumnNames, sqlschema.ColumnName(name))
}
}
if constraintType == "UNIQUE" {
if _, ok := uniqueConstraintsMap[constraintName]; !ok {
uniqueConstraintsMap[constraintName] = (&sqlschema.UniqueConstraint{
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
}).Named(constraintName).(*sqlschema.UniqueConstraint)
} else {
uniqueConstraintsMap[constraintName].ColumnNames = append(uniqueConstraintsMap[constraintName].ColumnNames, sqlschema.ColumnName(name))
}
}
}
foreignKeyConstraintsRows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
tc.constraint_name,
kcu.table_name AS referencing_table,
kcu.column_name AS referencing_column,
ccu.table_name AS referenced_table,
ccu.column_name AS referenced_column
FROM
information_schema.key_column_usage kcu
JOIN information_schema.table_constraints tc ON kcu.constraint_name = tc.constraint_name AND kcu.table_schema = tc.table_schema
JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name AND ccu.table_schema = tc.table_schema
WHERE
tc.constraint_type = ?
AND kcu.table_name = ?`, "FOREIGN KEY", string(tableName))
if err != nil {
return nil, nil, err
}
defer func() {
if err := foreignKeyConstraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
foreignKeyConstraints := make([]*sqlschema.ForeignKeyConstraint, 0)
for foreignKeyConstraintsRows.Next() {
var (
constraintName string
referencingTable string
referencingColumn string
referencedTable string
referencedColumn string
)
if err := foreignKeyConstraintsRows.Scan(&constraintName, &referencingTable, &referencingColumn, &referencedTable, &referencedColumn); err != nil {
return nil, nil, err
}
foreignKeyConstraints = append(foreignKeyConstraints, (&sqlschema.ForeignKeyConstraint{
ReferencingColumnName: sqlschema.ColumnName(referencingColumn),
ReferencedTableName: sqlschema.TableName(referencedTable),
ReferencedColumnName: sqlschema.ColumnName(referencedColumn),
}).Named(constraintName).(*sqlschema.ForeignKeyConstraint))
}
uniqueConstraints := make([]*sqlschema.UniqueConstraint, 0)
for _, uniqueConstraint := range uniqueConstraintsMap {
uniqueConstraints = append(uniqueConstraints, uniqueConstraint)
}
return &sqlschema.Table{
Name: tableName,
Columns: columns,
PrimaryKeyConstraint: primaryKeyConstraint,
ForeignKeyConstraints: foreignKeyConstraints,
}, uniqueConstraints, nil
}
func (provider *provider) GetIndices(ctx context.Context, name sqlschema.TableName) ([]sqlschema.Index, error) {
rows, err := provider.
sqlstore.
BunDB().
QueryContext(ctx, `
SELECT
ct.relname AS table_name,
ci.relname AS index_name,
i.indisunique AS unique,
i.indisprimary AS primary,
a.attname AS column_name
FROM
pg_index i
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
LEFT JOIN pg_class ci ON ci.oid = i.indexrelid
LEFT JOIN pg_attribute a ON a.attrelid = ct.oid
LEFT JOIN pg_constraint con ON con.conindid = i.indexrelid
WHERE
a.attnum = ANY(i.indkey)
AND con.oid IS NULL
AND ct.relkind = 'r'
AND ct.relname = ?`, string(name))
if err != nil {
return nil, err
}
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex)
for rows.Next() {
var (
tableName string
indexName string
unique bool
primary bool
columnName string
)
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
return nil, err
}
if unique {
if _, ok := uniqueIndicesMap[indexName]; !ok {
uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{
TableName: name,
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
}
} else {
uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName))
}
}
}
indices := make([]sqlschema.Index, 0)
for _, index := range uniqueIndicesMap {
indices = append(indices, index)
}
return indices, nil
}
func (provider *provider) ToggleFKEnforcement(_ context.Context, _ bun.IDB, _ bool) error {
return nil
}

View File

@@ -17,21 +17,19 @@ var (
)
var (
Org = "org"
User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
AgentConfigVersion = "agent_config_version"
Org = "org"
User = "user"
UserNoCascade = "user_no_cascade"
FactorPassword = "factor_password"
CloudIntegration = "cloud_integration"
)
var (
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
AgentConfigVersionReference = `("version_id") REFERENCES "agent_config_version" ("id")`
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
UserReferenceNoCascade = `("user_id") REFERENCES "users" ("id")`
FactorPasswordReference = `("password_id") REFERENCES "factor_password" ("id")`
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
)
type dialect struct{}
@@ -276,8 +274,6 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
fkReferences = append(fkReferences, FactorPasswordReference)
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
fkReferences = append(fkReferences, CloudIntegrationReference)
} else if reference == AgentConfigVersion && !slices.Contains(fkReferences, AgentConfigVersionReference) {
fkReferences = append(fkReferences, AgentConfigVersionReference)
}
}

View File

@@ -10,6 +10,7 @@ import (
"github.com/jackc/pgx/v5/pgconn"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/jackc/pgx/v5/stdlib"
"github.com/jmoiron/sqlx"
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect/pgdialect"
)
@@ -18,6 +19,7 @@ type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
sqlxdb *sqlx.DB
dialect *dialect
}
@@ -59,6 +61,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
sqlxdb: sqlx.NewDb(sqldb, "postgres"),
dialect: new(dialect),
}, nil
}
@@ -71,6 +74,10 @@ func (provider *provider) SQLDB() *sql.DB {
return provider.sqldb
}
func (provider *provider) SQLxDB() *sqlx.DB {
return provider.sqlxdb
}
func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}

View File

@@ -78,7 +78,7 @@
"fontfaceobserver": "2.3.0",
"history": "4.10.1",
"html-webpack-plugin": "5.5.0",
"http-proxy-middleware": "3.0.5",
"http-proxy-middleware": "3.0.3",
"http-status-codes": "2.3.0",
"i18next": "^21.6.12",
"i18next-browser-languagedetector": "^6.1.3",
@@ -213,9 +213,7 @@
"eslint-plugin-simple-import-sort": "^7.0.0",
"eslint-plugin-sonarjs": "^0.12.0",
"husky": "^7.0.4",
"image-minimizer-webpack-plugin": "^4.0.0",
"imagemin": "^8.0.1",
"imagemin-svgo": "^10.0.1",
"image-webpack-loader": "8.1.0",
"is-ci": "^3.0.1",
"jest-styled-components": "^7.0.8",
"lint-staged": "^12.5.0",
@@ -232,7 +230,6 @@
"redux-mock-store": "1.5.4",
"sass": "1.66.1",
"sass-loader": "13.3.2",
"sharp": "^0.33.4",
"ts-jest": "^27.1.5",
"ts-node": "^10.2.1",
"typescript-plugin-css-modules": "5.0.1",
@@ -253,11 +250,10 @@
"xml2js": "0.5.0",
"phin": "^3.7.1",
"body-parser": "1.20.3",
"http-proxy-middleware": "3.0.5",
"http-proxy-middleware": "3.0.3",
"cross-spawn": "7.0.5",
"cookie": "^0.7.1",
"serialize-javascript": "6.0.2",
"prismjs": "1.30.0",
"got": "11.8.5"
"prismjs": "1.30.0"
}
}

View File

@@ -14,8 +14,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",
@@ -62,8 +62,5 @@
"channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly",
"channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again",
"webhook_url_required": "Webhook URL is mandatory",
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)",
"api_key_required": "API Key is mandatory",
"to_required": "To field is mandatory",
"channel_name_required": "Channel name is mandatory"
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)"
}

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -129,6 +129,5 @@
"text_num_points": "data points in each result group",
"text_alert_frequency": "Run alert every",
"text_for": "minutes",
"selected_query_placeholder": "Select query",
"alert_rule_not_found": "Alert Rule not found"
"selected_query_placeholder": "Select query"
}

View File

@@ -9,8 +9,8 @@
"tooltip_notification_channels": "More details on how to setting notification channels",
"sending_channels_note": "The alerts will be sent to all the configured channels.",
"loading_channels_message": "Loading Channels..",
"page_title_create": "New Notification Channel",
"page_title_edit": "Edit Notification Channel",
"page_title_create": "New Notification Channels",
"page_title_edit": "Edit Notification Channels",
"button_save_channel": "Save",
"button_test_channel": "Test",
"button_return": "Back",
@@ -77,8 +77,5 @@
"channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly",
"channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again",
"webhook_url_required": "Webhook URL is mandatory",
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)",
"api_key_required": "API Key is mandatory",
"to_required": "To field is mandatory",
"channel_name_required": "Channel name is mandatory"
"slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)"
}

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -3,7 +3,6 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import getAll from 'api/v1/user/get';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import ROUTES from 'constants/routes';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import history from 'lib/history';
@@ -15,7 +14,6 @@ import { matchPath, useLocation } from 'react-router-dom';
import { SuccessResponseV2 } from 'types/api';
import APIError from 'types/api/error';
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
import { OrgPreference } from 'types/api/preferences/preference';
import { Organization } from 'types/api/user/getOrganization';
import { UserResponse } from 'types/api/user/getUser';
import { USER_ROLES } from 'types/roles';
@@ -97,8 +95,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
usersData.data
) {
const isOnboardingComplete = orgPreferences?.find(
(preference: OrgPreference) =>
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
(preference: Record<string, any>) => preference.name === 'org_onboarding',
)?.value;
const isFirstUser = checkFirstTimeUser();
@@ -126,8 +123,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const isRouteEnabledForWorkspaceBlockedState =
isAdmin &&
(path === ROUTES.SETTINGS ||
path === ROUTES.ORG_SETTINGS ||
(path === ROUTES.ORG_SETTINGS ||
path === ROUTES.BILLING ||
path === ROUTES.MY_SETTINGS);

View File

@@ -71,7 +71,7 @@ function App(): JSX.Element {
const orgName =
org && Array.isArray(org) && org.length > 0 ? org[0].displayName : '';
const { displayName, email, role, id, orgId } = user;
const { displayName, email, role } = user;
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
@@ -105,7 +105,7 @@ function App(): JSX.Element {
logEvent('Domain Identified', groupTraits, 'group');
}
if (window && window.Appcues) {
window.Appcues.identify(id, {
window.Appcues.identify(email, {
name: displayName,
tenant_id: hostNameParts[0],
@@ -131,7 +131,7 @@ function App(): JSX.Element {
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.identify(id, {
posthog?.identify(email, {
email,
name: displayName,
orgName,
@@ -143,7 +143,7 @@ function App(): JSX.Element {
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.group('company', orgId, {
posthog?.group('company', domain, {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
@@ -191,22 +191,19 @@ function App(): JSX.Element {
// if the user is on basic plan then remove billing
if (isOnBasicPlan) {
updatedRoutes = updatedRoutes.filter(
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
(route) => route?.path !== ROUTES.BILLING,
);
}
if (isEnterpriseSelfHostedUser) {
updatedRoutes.push(LIST_LICENSES);
if (isEnterpriseSelfHostedUser) {
updatedRoutes.push(LIST_LICENSES);
}
}
// always add support route for cloud users
updatedRoutes = [...updatedRoutes, SUPPORT_ROUTE];
} else {
// if not a cloud user then remove billing and add list licenses route
updatedRoutes = updatedRoutes.filter(
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
(route) => route?.path !== ROUTES.BILLING,
);
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
}

View File

@@ -128,7 +128,12 @@ export const AlertOverview = Loadable(
);
export const CreateAlertChannelAlerts = Loadable(
() => import(/* webpackChunkName: "Create Channels" */ 'pages/Settings'),
() =>
import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'),
);
export const EditAlertChannelsAlerts = Loadable(
() => import(/* webpackChunkName: "Edit Channels" */ 'pages/ChannelsEdit'),
);
export const AllAlertChannels = Loadable(
@@ -160,7 +165,7 @@ export const APIKeys = Loadable(
);
export const MySettings = Loadable(
() => import(/* webpackChunkName: "All MySettings" */ 'pages/Settings'),
() => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'),
);
export const CustomDomainSettings = Loadable(
@@ -217,7 +222,7 @@ export const LogsIndexToFields = Loadable(
);
export const BillingPage = Loadable(
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Settings'),
() => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'),
);
export const SupportPage = Loadable(
@@ -244,7 +249,7 @@ export const WorkspaceAccessRestricted = Loadable(
);
export const ShortcutsPage = Loadable(
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Settings'),
() => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'),
);
export const InstalledIntegrations = Loadable(

View File

@@ -7,15 +7,20 @@ import {
AlertOverview,
AllAlertChannels,
AllErrors,
APIKeys,
ApiMonitoring,
BillingPage,
CreateAlertChannelAlerts,
CreateNewAlerts,
CustomDomainSettings,
DashboardPage,
DashboardWidget,
EditAlertChannelsAlerts,
EditRulesPage,
ErrorDetails,
Home,
InfrastructureMonitoring,
IngestionSettings,
InstalledIntegrations,
LicensePage,
ListAllALertsPage,
@@ -26,10 +31,12 @@ import {
LogsIndexToFields,
LogsSaveViews,
MetricsExplorer,
MySettings,
NewDashboardPage,
OldLogsExplorer,
Onboarding,
OnboardingV2,
OrganizationSettings,
OrgOnboarding,
PasswordReset,
PipelinePage,
@@ -38,6 +45,7 @@ import {
ServicesTablePage,
ServiceTopLevelOperationsPage,
SettingsPage,
ShortcutsPage,
SignupPage,
SomethingWentWrong,
StatusPage,
@@ -142,7 +150,7 @@ const routes: AppRoutes[] = [
},
{
path: ROUTES.SETTINGS,
exact: false,
exact: true,
component: SettingsPage,
isPrivate: true,
key: 'SETTINGS',
@@ -252,6 +260,13 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'CHANNELS_NEW',
},
{
path: ROUTES.CHANNELS_EDIT,
exact: true,
component: EditAlertChannelsAlerts,
isPrivate: true,
key: 'CHANNELS_EDIT',
},
{
path: ROUTES.ALL_CHANNELS,
exact: true,
@@ -280,6 +295,41 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'VERSION',
},
{
path: ROUTES.ORG_SETTINGS,
exact: true,
component: OrganizationSettings,
isPrivate: true,
key: 'ORG_SETTINGS',
},
{
path: ROUTES.INGESTION_SETTINGS,
exact: true,
component: IngestionSettings,
isPrivate: true,
key: 'INGESTION_SETTINGS',
},
{
path: ROUTES.API_KEYS,
exact: true,
component: APIKeys,
isPrivate: true,
key: 'API_KEYS',
},
{
path: ROUTES.MY_SETTINGS,
exact: true,
component: MySettings,
isPrivate: true,
key: 'MY_SETTINGS',
},
{
path: ROUTES.CUSTOM_DOMAIN_SETTINGS,
exact: true,
component: CustomDomainSettings,
isPrivate: true,
key: 'CUSTOM_DOMAIN_SETTINGS',
},
{
path: ROUTES.LOGS,
exact: true,
@@ -343,6 +393,13 @@ const routes: AppRoutes[] = [
key: 'SOMETHING_WENT_WRONG',
isPrivate: false,
},
{
path: ROUTES.BILLING,
exact: true,
component: BillingPage,
key: 'BILLING',
isPrivate: true,
},
{
path: ROUTES.WORKSPACE_LOCKED,
exact: true,
@@ -364,6 +421,13 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'WORKSPACE_ACCESS_RESTRICTED',
},
{
path: ROUTES.SHORTCUTS,
exact: true,
component: ShortcutsPage,
isPrivate: true,
key: 'SHORTCUTS',
},
{
path: ROUTES.INTEGRATIONS,
exact: true,

View File

@@ -1,29 +0,0 @@
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import axios, { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
const getChangelogByVersion = async (
versionId: string,
): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> => {
try {
const response = await axios.get(`
https://cms.signoz.cloud/api/release-changelogs?filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText
`);
if (!Array.isArray(response.data.data) || response.data.data.length === 0) {
throw new Error('No changelog found!');
}
return {
statusCode: 200,
error: null,
message: response.statusText,
payload: response.data.data[0],
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default getChangelogByVersion;

View File

@@ -119,7 +119,6 @@ export const updateFunnelSteps = async (
export interface ValidateFunnelPayload {
start_time: number;
end_time: number;
steps: FunnelStepData[];
}
export interface ValidateFunnelResponse {
@@ -133,11 +132,12 @@ export interface ValidateFunnelResponse {
}
export const validateFunnelSteps = async (
funnelId: string,
payload: ValidateFunnelPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<ValidateFunnelResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/validate`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/validate`,
payload,
{ signal },
);
@@ -185,7 +185,6 @@ export interface FunnelOverviewPayload {
end_time: number;
step_start?: number;
step_end?: number;
steps: FunnelStepData[];
}
export interface FunnelOverviewResponse {
@@ -203,11 +202,12 @@ export interface FunnelOverviewResponse {
}
export const getFunnelOverview = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/overview`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/overview`,
payload,
{
signal,
@@ -235,11 +235,12 @@ export interface SlowTraceData {
}
export const getFunnelSlowTraces = async (
funnelId: string,
payload: FunnelOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<SlowTraceData> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/slow-traces`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/slow-traces`,
payload,
{
signal,
@@ -272,7 +273,7 @@ export const getFunnelErrorTraces = async (
signal?: AbortSignal,
): Promise<SuccessResponse<ErrorTraceData> | ErrorResponse> => {
const response: AxiosResponse = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/error-traces`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/error-traces`,
payload,
{
signal,
@@ -290,7 +291,6 @@ export const getFunnelErrorTraces = async (
export interface FunnelStepsPayload {
start_time: number;
end_time: number;
steps: FunnelStepData[];
}
export interface FunnelStepGraphMetrics {
@@ -307,11 +307,12 @@ export interface FunnelStepsResponse {
}
export const getFunnelSteps = async (
funnelId: string,
payload: FunnelStepsPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/steps`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps`,
payload,
{ signal },
);
@@ -329,7 +330,6 @@ export interface FunnelStepsOverviewPayload {
end_time: number;
step_start?: number;
step_end?: number;
steps: FunnelStepData[];
}
export interface FunnelStepsOverviewResponse {
@@ -341,11 +341,12 @@ export interface FunnelStepsOverviewResponse {
}
export const getFunnelStepsOverview = async (
funnelId: string,
payload: FunnelStepsOverviewPayload,
signal?: AbortSignal,
): Promise<SuccessResponse<FunnelStepsOverviewResponse> | ErrorResponse> => {
const response = await axios.post(
`${FUNNELS_BASE_PATH}/analytics/steps/overview`,
`${FUNNELS_BASE_PATH}/${funnelId}/analytics/steps/overview`,
payload,
{ signal },
);

View File

@@ -0,0 +1,21 @@
import axios from 'api';
import { Props } from 'types/api/userFeedback/sendResponse';
const sendFeedback = async (props: Props): Promise<number> => {
const response = await axios.post(
'/feedback',
{
email: props.email,
message: props.message,
},
{
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
},
);
return response.status;
};
export default sendFeedback;

View File

@@ -1,161 +0,0 @@
.changelog-modal {
.ant-modal-content {
padding: unset;
background-color: var(--bg-ink-400, #121317);
.ant-modal-header {
margin-bottom: unset;
}
.ant-modal-footer {
margin-top: unset;
}
}
&-title {
display: flex;
align-items: center;
gap: 8px;
background-color: var(--bg-ink-400, #121317);
padding: 16px;
font-size: 14px;
line-height: 20px;
color: var(--text-vanilla-100, #fff);
border-bottom: 1px solid var(--bg-slate-500, #161922);
}
&-footer.scroll-available {
.scroll-btn-container {
display: block;
}
}
&-footer {
position: relative;
border: 1px solid var(--bg-slate-500, #161922);
padding: 12px;
display: flex;
align-items: center;
justify-content: space-between;
&-label {
color: var(--text-robin-400, #7190f9);
font-size: 14px;
line-height: 24px;
position: relative;
padding-left: 14px;
&::before {
content: '';
position: absolute;
top: 50%;
left: 0;
transform: translateY(-50%);
width: 6px;
height: 6px;
border-radius: 100%;
background-color: var(--bg-robin-500, #7190f9);
}
}
&-ctas {
display: flex;
& svg {
font-size: 14px;
}
}
.scroll-btn-container {
display: none;
position: absolute;
top: -40px;
left: 50%;
transform: translateX(-50%);
.scroll-btn {
all: unset;
padding: 4px 12px 4px 10px;
background-color: var(--bg-slate-400, #1d212d);
border-radius: 20px;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
gap: 4px;
transition: background-color 0.1s;
&:hover {
background-color: var(--bg-slate-200, #2c3140);
}
&:active {
background-color: var(--bg-slate-600, #1c1f2a);
}
span {
font-size: 12px;
line-height: 18px;
color: var(--text-vanilla-400, #c0c1c3);
}
// add animation to the chevrons down icon
svg {
animation: pulse 1s infinite;
}
}
}
}
&-content {
max-height: calc(100vh - 300px);
overflow-y: auto;
padding: 16px;
border: 1px solid var(--bg-slate-500, #161922);
border-top-width: 0;
border-bottom-width: 0;
}
}
// pulse for the scroll for more icon
@keyframes pulse {
0% {
opacity: 1;
}
50% {
opacity: 0.5;
}
}
.lightMode {
.changelog-modal {
.ant-modal-content {
background-color: var(--bg-vanilla-100);
border-color: var(--bg-vanilla-300);
}
&-title {
background: var(--bg-vanilla-100);
color: var(--bg-ink-500);
border-color: var(--bg-vanilla-300);
}
&-content {
border-color: var(--bg-vanilla-300);
}
&-footer {
border-color: var(--bg-vanilla-300);
.scroll-btn-container {
.scroll-btn {
background-color: var(--bg-vanilla-300);
span {
color: var(--text-ink-500);
}
}
}
}
}
}

View File

@@ -1,131 +0,0 @@
import './ChangelogModal.styles.scss';
import { CheckOutlined, CloseOutlined } from '@ant-design/icons';
import { Button, Modal } from 'antd';
import cx from 'classnames';
import dayjs from 'dayjs';
import { ChevronsDown, ScrollText } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useCallback, useEffect, useRef, useState } from 'react';
import ChangelogRenderer from './components/ChangelogRenderer';
interface Props {
onClose: () => void;
}
function ChangelogModal({ onClose }: Props): JSX.Element {
const [hasScroll, setHasScroll] = useState(false);
const changelogContentSectionRef = useRef<HTMLDivElement>(null);
const { changelog } = useAppContext();
const formattedReleaseDate = dayjs(changelog?.release_date).format(
'MMMM D, YYYY',
);
const checkScroll = useCallback((): void => {
if (changelogContentSectionRef.current) {
const {
scrollHeight,
clientHeight,
scrollTop,
} = changelogContentSectionRef.current;
const isAtBottom = scrollHeight - clientHeight - scrollTop <= 8;
setHasScroll(scrollHeight > clientHeight + 24 && !isAtBottom); // 24px - buffer height to show show more
}
}, []);
useEffect(() => {
checkScroll();
const changelogContentSection = changelogContentSectionRef.current;
if (changelogContentSection) {
changelogContentSection.addEventListener('scroll', checkScroll);
}
return (): void => {
if (changelogContentSection) {
changelogContentSection.removeEventListener('scroll', checkScroll);
}
};
}, [checkScroll]);
const onClickUpdateWorkspace = (): void => {
window.open(
'https://github.com/SigNoz/signoz/releases',
'_blank',
'noopener,noreferrer',
);
};
const onClickScrollForMore = (): void => {
if (changelogContentSectionRef.current) {
changelogContentSectionRef.current.scrollTo({
top: changelogContentSectionRef.current.scrollTop + 600, // Scroll 600px from the current position
behavior: 'smooth',
});
}
};
return (
<Modal
className={cx('changelog-modal')}
title={
<div className="changelog-modal-title">
<ScrollText size={16} />
Whats New Changelog : {formattedReleaseDate}
</div>
}
width={820}
open
onCancel={onClose}
footer={
<div
className={cx('changelog-modal-footer', hasScroll && 'scroll-available')}
>
{changelog?.features && changelog.features.length > 0 && (
<span className="changelog-modal-footer-label">
{changelog.features.length} new&nbsp;
{changelog.features.length > 1 ? 'features' : 'feature'}
</span>
)}
<div className="changelog-modal-footer-ctas">
<Button type="default" icon={<CloseOutlined />} onClick={onClose}>
Skip for now
</Button>
<Button
type="primary"
icon={<CheckOutlined />}
onClick={onClickUpdateWorkspace}
>
Update my workspace
</Button>
</div>
{changelog && (
<div className="scroll-btn-container">
<button
data-testid="scroll-more-btn"
type="button"
className="scroll-btn"
onClick={onClickScrollForMore}
>
<ChevronsDown size={14} />
<span>Scroll for more</span>
</button>
</div>
)}
</div>
}
>
<div
className="changelog-modal-content"
data-testid="changelog-content"
ref={changelogContentSectionRef}
>
{changelog && <ChangelogRenderer changelog={changelog} />}
</div>
</Modal>
);
}
export default ChangelogModal;

View File

@@ -1,79 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable sonarjs/no-identical-functions */
/* eslint-disable @typescript-eslint/explicit-function-return-type */
import { fireEvent, render, screen } from '@testing-library/react';
import ChangelogModal from '../ChangelogModal';
const mockChangelog = {
release_date: '2025-06-10',
features: [
{
id: 1,
title: 'Feature 1',
description: 'Description for feature 1',
media: null,
},
],
bug_fixes: 'Bug fix details',
maintenance: 'Maintenance details',
};
// Mock react-markdown to just render children as plain text
jest.mock(
'react-markdown',
() =>
function ReactMarkdown({ children }: any) {
return <div>{children}</div>;
},
);
// mock useAppContext
jest.mock('providers/App/App', () => ({
useAppContext: jest.fn(() => ({ changelog: mockChangelog })),
}));
describe('ChangelogModal', () => {
it('renders modal with changelog data', () => {
render(<ChangelogModal onClose={jest.fn()} />);
expect(
screen.getByText('Whats New ⎯ Changelog : June 10, 2025'),
).toBeInTheDocument();
expect(screen.getByText('Feature 1')).toBeInTheDocument();
expect(screen.getByText('Description for feature 1')).toBeInTheDocument();
expect(screen.getByText('Bug fix details')).toBeInTheDocument();
expect(screen.getByText('Maintenance details')).toBeInTheDocument();
});
it('calls onClose when Skip for now is clicked', () => {
const onClose = jest.fn();
render(<ChangelogModal onClose={onClose} />);
fireEvent.click(screen.getByText('Skip for now'));
expect(onClose).toHaveBeenCalled();
});
it('opens migration docs when Update my workspace is clicked', () => {
window.open = jest.fn();
render(<ChangelogModal onClose={jest.fn()} />);
fireEvent.click(screen.getByText('Update my workspace'));
expect(window.open).toHaveBeenCalledWith(
'https://github.com/SigNoz/signoz/releases',
'_blank',
'noopener,noreferrer',
);
});
it('scrolls for more when Scroll for more is clicked', () => {
render(<ChangelogModal onClose={jest.fn()} />);
const scrollBtn = screen.getByTestId('scroll-more-btn');
const contentDiv = screen.getByTestId('changelog-content');
if (contentDiv) {
contentDiv.scrollTo = jest.fn();
}
fireEvent.click(scrollBtn);
if (contentDiv) {
expect(contentDiv.scrollTo).toHaveBeenCalled();
}
});
});

View File

@@ -1,63 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable sonarjs/no-identical-functions */
/* eslint-disable @typescript-eslint/explicit-function-return-type */
import { render, screen } from '@testing-library/react';
import ChangelogRenderer from '../components/ChangelogRenderer';
// Mock react-markdown to just render children as plain text
jest.mock(
'react-markdown',
() =>
function ReactMarkdown({ children }: any) {
return <div>{children}</div>;
},
);
const mockChangelog = {
id: 1,
documentId: 'changelog-doc-1',
version: '1.0.0',
createdAt: '2025-06-09T12:00:00Z',
release_date: '2025-06-10',
features: [
{
id: 1,
documentId: '1',
title: 'Feature 1',
description: 'Description for feature 1',
sort_order: 1,
createdAt: '',
updatedAt: '',
publishedAt: '',
deployment_type: 'All',
media: {
id: 1,
documentId: 'doc1',
ext: '.webp',
url: '/uploads/feature1.webp',
mime: 'image/webp',
alternativeText: null,
},
},
],
bug_fixes: 'Bug fix details',
updatedAt: '2025-06-09T12:00:00Z',
publishedAt: '2025-06-09T12:00:00Z',
maintenance: 'Maintenance details',
};
describe('ChangelogRenderer', () => {
it('renders release date', () => {
render(<ChangelogRenderer changelog={mockChangelog} />);
expect(screen.getByText('June 10, 2025')).toBeInTheDocument();
});
it('renders features, media, and description', () => {
render(<ChangelogRenderer changelog={mockChangelog} />);
expect(screen.getByText('Feature 1')).toBeInTheDocument();
expect(screen.getByAltText('Media')).toBeInTheDocument();
expect(screen.getByText('Description for feature 1')).toBeInTheDocument();
});
});

View File

@@ -1,141 +0,0 @@
.changelog-renderer {
position: relative;
padding-left: 20px;
.changelog-release-date {
font-size: 14px;
line-height: 20px;
color: var(--text-vanilla-400, #c0c1c3);
}
&-list {
display: flex;
flex-direction: column;
gap: 28px;
}
&-line {
position: absolute;
left: 0;
top: 6px;
bottom: -30px;
width: 1px;
background-color: var(--bg-slate-400, #1d212d);
.inner-ball {
position: absolute;
left: 50%;
width: 6px;
height: 6px;
border-radius: 100%;
transform: translateX(-50%);
background-color: var(--bg-robin-500, #7190f9);
}
}
ul,
ol {
list-style: none;
display: flex;
flex-direction: column;
gap: 16px;
padding-left: 30px;
li {
position: relative;
&::before {
content: '';
position: absolute;
left: -10px;
top: 10px;
width: 20px;
height: 2px;
background-color: var(--bg-robin-500, #7190f9);
transform: translate(-100%, -50%);
}
}
}
li,
p {
font-size: 14px;
line-height: 20px;
color: var(--text-vanilla-400, #c0c1c3);
}
code {
padding: 2px 4px;
background-color: var(--bg-slate-500, #161922);
border-radius: 6px;
font-size: 95%;
vertical-align: middle;
border: 1px solid var(--bg-slate-600, #1c1f2a);
}
a {
color: var(--text-robin-500, #7190f9);
font-weight: 600;
text-decoration: underline;
&:hover {
text-decoration: none;
}
}
h1,
h2,
h3,
h4,
h5,
h6 {
font-weight: 600;
color: var(--text-vanilla-100, #fff);
}
h1 {
font-size: 24px;
line-height: 32px;
}
h2 {
font-size: 20px;
line-height: 28px;
}
.changelog-media-image,
.changelog-media-video {
height: auto;
width: 100%;
overflow: hidden;
border-radius: 4px;
border: 1px solid var(--bg-slate-400, #1d212d);
}
.changelog-media-video {
margin: 12px 0;
}
}
.lightMode {
.changelog-renderer {
.changelog-release-date {
color: var(--text-ink-500);
}
&-line {
background-color: var(--bg-vanilla-300);
}
li,
p {
color: var(--text-ink-500);
}
h1,
h2,
h3,
h4,
h5,
h6 {
color: var(--text-ink-500);
}
}
}

View File

@@ -1,89 +0,0 @@
import './ChangelogRenderer.styles.scss';
import dayjs from 'dayjs';
import ReactMarkdown from 'react-markdown';
import {
ChangelogSchema,
Media,
SupportedImageTypes,
SupportedVideoTypes,
} from 'types/api/changelog/getChangelogByVersion';
interface Props {
changelog: ChangelogSchema;
}
function renderMedia(media: Media): JSX.Element | null {
if (SupportedImageTypes.includes(media.ext)) {
return (
<img
src={media.url}
alt={media.alternativeText || 'Media'}
width={800}
height={450}
className="changelog-media-image"
/>
);
}
if (SupportedVideoTypes.includes(media.ext)) {
return (
<video
autoPlay
controls
controlsList="nodownload noplaybackrate"
loop
className="changelog-media-video"
>
<source src={media.url} type={media.mime} />
<track kind="captions" src="" label="No captions available" default />
Your browser does not support the video tag.
</video>
);
}
return null;
}
function ChangelogRenderer({ changelog }: Props): JSX.Element {
const formattedReleaseDate = dayjs(changelog.release_date).format(
'MMMM D, YYYY',
);
return (
<div className="changelog-renderer">
<div className="changelog-renderer-line">
<div className="inner-ball" />
</div>
<span className="changelog-release-date">{formattedReleaseDate}</span>
{changelog.features && changelog.features.length > 0 && (
<div className="changelog-renderer-list">
{changelog.features.map((feature) => (
<div key={feature.id}>
<h2>{feature.title}</h2>
{feature.media && renderMedia(feature.media)}
<ReactMarkdown>{feature.description}</ReactMarkdown>
</div>
))}
</div>
)}
{changelog.bug_fixes && changelog.bug_fixes.length > 0 && (
<div>
<h2>Bug Fixes</h2>
{changelog.bug_fixes && (
<ReactMarkdown>{changelog.bug_fixes}</ReactMarkdown>
)}
</div>
)}
{changelog.maintenance && changelog.maintenance.length > 0 && (
<div>
<h2>Maintenance</h2>
{changelog.maintenance && (
<ReactMarkdown>{changelog.maintenance}</ReactMarkdown>
)}
</div>
)}
</div>
);
}
export default ChangelogRenderer;

View File

@@ -74,7 +74,6 @@ const formatMap = {
'MM/dd HH:mm': DATE_TIME_FORMATS.SLASH_SHORT,
'MM/DD': DATE_TIME_FORMATS.DATE_SHORT,
'YY-MM': DATE_TIME_FORMATS.YEAR_MONTH,
'MMM d, yyyy, h:mm:ss aaaa': DATE_TIME_FORMATS.DASH_DATETIME,
YY: DATE_TIME_FORMATS.YEAR_SHORT,
};

View File

@@ -194,7 +194,7 @@ function HostMetricTraces({
{!isError && traces.length > 0 && (
<div className="host-metric-traces-table">
<TraceExplorerControls
isLoading={isFetching && traces.length === 0}
isLoading={isFetching}
totalCount={totalCount}
perPageOptions={PER_PAGE_OPTIONS}
showSizeChanger={false}
@@ -203,7 +203,7 @@ function HostMetricTraces({
tableLayout="fixed"
pagination={false}
scroll={{ x: true }}
loading={isFetching && traces.length === 0}
loading={isFetching}
dataSource={traces}
columns={traceListColumns}
onRow={(): Record<string, unknown> => ({

View File

@@ -37,7 +37,7 @@ import {
ScrollText,
X,
} from 'lucide-react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { AppState } from 'store/reducers';
@@ -86,12 +86,8 @@ function HostMetricsDetails({
endTime: endMs,
}));
const lastSelectedInterval = useRef<Time | null>(null);
const [selectedInterval, setSelectedInterval] = useState<Time>(
lastSelectedInterval.current
? lastSelectedInterval.current
: (selectedTime as Time),
selectedTime as Time,
);
const [selectedView, setSelectedView] = useState<VIEWS>(
@@ -154,11 +150,10 @@ function HostMetricsDetails({
}, [initialFilters]);
useEffect(() => {
const currentSelectedInterval = lastSelectedInterval.current || selectedTime;
setSelectedInterval(currentSelectedInterval as Time);
setSelectedInterval(selectedTime as Time);
if (currentSelectedInterval !== 'custom') {
const { maxTime, minTime } = GetMinMax(currentSelectedInterval);
if (selectedTime !== 'custom') {
const { maxTime, minTime } = GetMinMax(selectedTime);
setModalTimeRange({
startTime: Math.floor(minTime / 1000000000),
@@ -186,7 +181,6 @@ function HostMetricsDetails({
const handleTimeChange = useCallback(
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
lastSelectedInterval.current = interval as Time;
setSelectedInterval(interval as Time);
if (interval === 'custom' && dateTimeRange) {
@@ -362,7 +356,6 @@ function HostMetricsDetails({
const handleClose = (): void => {
setSelectedInterval(selectedTime as Time);
lastSelectedInterval.current = null;
setSearchParams({});
if (selectedTime !== 'custom') {
@@ -437,13 +430,9 @@ function HostMetricsDetails({
>
{host.active ? 'ACTIVE' : 'INACTIVE'}
</Tag>
{host.os ? (
<Tag className="infra-monitoring-tags" bordered>
{host.os}
</Tag>
) : (
<Typography.Text>-</Typography.Text>
)}
<Tag className="infra-monitoring-tags" bordered>
{host.os}
</Tag>
<div className="progress-container">
<Progress
percent={Number((host.cpu * 100).toFixed(1))}

View File

@@ -15,12 +15,11 @@ import {
} from 'container/TopNav/DateTimeSelectionV2/config';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { useMultiIntersectionObserver } from 'hooks/useMultiIntersectionObserver';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { QueryFunctionContext, useQueries, UseQueryResult } from 'react-query';
import { useQueries, UseQueryResult } from 'react-query';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
@@ -54,11 +53,6 @@ function Metrics({
featureFlags?.find((flag) => flag.name === FeatureKeys.DOT_METRICS_ENABLED)
?.active || false;
const {
visibilities,
setElement,
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
const queryPayloads = useMemo(
() =>
getHostQueryPayload(
@@ -71,15 +65,11 @@ function Metrics({
);
const queries = useQueries(
queryPayloads.map((payload, index) => ({
queryPayloads.map((payload) => ({
queryKey: ['host-metrics', payload, ENTITY_VERSION_V4, 'HOST'],
queryFn: ({
signal,
}: QueryFunctionContext): Promise<
SuccessResponse<MetricRangePayloadProps>
> => GetMetricQueryRange(payload, ENTITY_VERSION_V4, signal),
enabled: !!payload && visibilities[index],
keepPreviousData: true,
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
enabled: !!payload,
})),
);
@@ -153,7 +143,7 @@ function Metrics({
query: UseQueryResult<SuccessResponse<MetricRangePayloadProps>, unknown>,
idx: number,
): JSX.Element => {
if ((!query.data && query.isLoading) || !visibilities[idx]) {
if (query.isLoading) {
return <Skeleton />;
}
@@ -191,7 +181,7 @@ function Metrics({
</div>
<Row gutter={24} className="host-metrics-container">
{queries.map((query, idx) => (
<Col ref={setElement(idx)} span={12} key={hostWidgetInfo[idx].title}>
<Col span={12} key={hostWidgetInfo[idx].title}>
<Typography.Text>{hostWidgetInfo[idx].title}</Typography.Text>
<Card bordered className="host-metrics-card" ref={graphRef}>
{renderCardContent(query, idx)}

View File

@@ -71,7 +71,7 @@ function LogDetail({
const [contextQuery, setContextQuery] = useState<Query | undefined>();
const [filters, setFilters] = useState<TagFilter | null>(null);
const [isEdit, setIsEdit] = useState<boolean>(false);
const { stagedQuery } = useQueryBuilder();
const { initialDataSource, stagedQuery } = useQueryBuilder();
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
@@ -81,7 +81,7 @@ function LogDetail({
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: DataSource.LOGS,
dataSource: initialDataSource || DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});

View File

@@ -5,19 +5,17 @@ import cx from 'classnames';
import { OPERATORS } from 'constants/queryBuilder';
import { FontSize } from 'container/OptionsMenu/types';
import { memo, MouseEvent, ReactNode, useMemo } from 'react';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
function AddToQueryHOC({
fieldKey,
fieldValue,
onAddToQuery,
fontSize,
dataType = DataTypes.EMPTY,
children,
}: AddToQueryHOCProps): JSX.Element {
const handleQueryAdd = (event: MouseEvent<HTMLDivElement>): void => {
event.stopPropagation();
onAddToQuery(fieldKey, fieldValue, OPERATORS['='], undefined, dataType);
onAddToQuery(fieldKey, fieldValue, OPERATORS['=']);
};
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
@@ -37,20 +35,9 @@ function AddToQueryHOC({
export interface AddToQueryHOCProps {
fieldKey: string;
fieldValue: string;
onAddToQuery: (
fieldKey: string,
fieldValue: string,
operator: string,
isJSON?: boolean,
dataType?: DataTypes,
) => void;
onAddToQuery: (fieldKey: string, fieldValue: string, operator: string) => void;
fontSize: FontSize;
dataType?: DataTypes;
children: ReactNode;
}
AddToQueryHOC.defaultProps = {
dataType: DataTypes.EMPTY,
};
export default memo(AddToQueryHOC);

View File

@@ -20,7 +20,6 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties {
export const defaultTableStyle: CSSProperties = {
minWidth: '40rem',
maxWidth: '60rem',
};
export const defaultListViewPanelStyle: CSSProperties = {

View File

@@ -1,17 +1,7 @@
import { Tabs, TabsProps } from 'antd';
import {
generatePath,
matchPath,
useLocation,
useParams,
} from 'react-router-dom';
import { RouteTabProps } from './types';
interface Params {
[key: string]: string;
}
function RouteTab({
routes,
activeKey,
@@ -19,18 +9,6 @@ function RouteTab({
history,
...rest
}: RouteTabProps & TabsProps): JSX.Element {
const params = useParams<Params>();
const location = useLocation();
// Find the matching route for the current pathname
const currentRoute = routes.find((route) => {
const routePath = route.route.split('?')[0];
return matchPath(location.pathname, {
path: routePath,
exact: true,
});
});
const onChange = (activeRoute: string): void => {
if (onChangeHandler) {
onChangeHandler(activeRoute);
@@ -39,8 +17,7 @@ function RouteTab({
const selectedRoute = routes.find((e) => e.key === activeRoute);
if (selectedRoute) {
const resolvedRoute = generatePath(selectedRoute.route, params);
history.push(resolvedRoute);
history.push(selectedRoute.route);
}
};
@@ -55,8 +32,8 @@ function RouteTab({
<Tabs
onChange={onChange}
destroyInactiveTabPane
activeKey={currentRoute?.key || activeKey}
defaultActiveKey={currentRoute?.key || activeKey}
activeKey={activeKey}
defaultActiveKey={activeKey}
animated
items={items}
// eslint-disable-next-line react/jsx-props-no-spreading

View File

@@ -30,5 +30,5 @@ export enum LOCALSTORAGE {
SHOW_EXCEPTIONS_QUICK_FILTERS = 'SHOW_EXCEPTIONS_QUICK_FILTERS',
BANNER_DISMISSED = 'BANNER_DISMISSED',
QUICK_FILTERS_SETTINGS_ANNOUNCEMENT = 'QUICK_FILTERS_SETTINGS_ANNOUNCEMENT',
FUNNEL_STEPS = 'FUNNEL_STEPS',
UNEXECUTED_FUNNELS = 'UNEXECUTED_FUNNELS',
}

View File

@@ -1,18 +0,0 @@
export const ORG_PREFERENCES = {
ORG_ONBOARDING: 'org_onboarding',
WELCOME_CHECKLIST_DO_LATER: 'welcome_checklist_do_later',
WELCOME_CHECKLIST_SEND_LOGS_SKIPPED: 'welcome_checklist_send_logs_skipped',
WELCOME_CHECKLIST_SEND_TRACES_SKIPPED: 'welcome_checklist_send_traces_skipped',
WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED:
'welcome_checklist_setup_alerts_skipped',
WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED:
'welcome_checklist_setup_saved_view_skipped',
WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED:
'welcome_checklist_send_infra_metrics_skipped',
WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED:
'welcome_checklist_setup_dashboards_skipped',
WELCOME_CHECKLIST_SETUP_WORKSPACE_SKIPPED:
'welcome_checklist_setup_workspace_skipped',
WELCOME_CHECKLIST_ADD_DATA_SOURCE_SKIPPED:
'welcome_checklist_add_data_source_skipped',
};

View File

@@ -46,5 +46,4 @@ export enum QueryParams {
msgSystem = 'msgSystem',
destination = 'destination',
kindString = 'kindString',
tab = 'tab',
}

View File

@@ -29,12 +29,12 @@ const ROUTES = {
ALERT_OVERVIEW: '/alerts/overview',
ALL_CHANNELS: '/settings/channels',
CHANNELS_NEW: '/settings/channels/new',
CHANNELS_EDIT: '/settings/channels/edit/:channelId',
CHANNELS_EDIT: '/settings/channels/:id',
ALL_ERROR: '/exceptions',
ERROR_DETAIL: '/error-detail',
VERSION: '/status',
MY_SETTINGS: '/my-settings',
SETTINGS: '/settings',
MY_SETTINGS: '/settings/my-settings',
ORG_SETTINGS: '/settings/org-settings',
CUSTOM_DOMAIN_SETTINGS: '/settings/custom-domain-settings',
API_KEYS: '/settings/api-keys',
@@ -52,7 +52,7 @@ const ROUTES = {
LIST_LICENSES: '/licenses',
LOGS_INDEX_FIELDS: '/logs-explorer/index-fields',
TRACE_EXPLORER: '/trace-explorer',
BILLING: '/settings/billing',
BILLING: '/billing',
SUPPORT: '/support',
LOGS_SAVE_VIEWS: '/logs/saved-views',
TRACES_SAVE_VIEWS: '/traces/saved-views',
@@ -60,12 +60,10 @@ const ROUTES = {
TRACES_FUNNELS_DETAIL: '/traces/funnels/:funnelId',
WORKSPACE_LOCKED: '/workspace-locked',
WORKSPACE_SUSPENDED: '/workspace-suspended',
SHORTCUTS: '/settings/shortcuts',
SHORTCUTS: '/shortcuts',
INTEGRATIONS: '/integrations',
MESSAGING_QUEUES_BASE: '/messaging-queues',
MESSAGING_QUEUES_KAFKA: '/messaging-queues/kafka',
MESSAGING_QUEUES_KAFKA_DETAIL: '/messaging-queues/kafka/detail',
INFRASTRUCTURE_MONITORING_BASE: '/infrastructure-monitoring',
INFRASTRUCTURE_MONITORING_HOSTS: '/infrastructure-monitoring/hosts',
INFRASTRUCTURE_MONITORING_KUBERNETES: '/infrastructure-monitoring/kubernetes',
MESSAGING_QUEUES_CELERY_TASK: '/messaging-queues/celery-task',
@@ -73,7 +71,6 @@ const ROUTES = {
METRICS_EXPLORER: '/metrics-explorer/summary',
METRICS_EXPLORER_EXPLORER: '/metrics-explorer/explorer',
METRICS_EXPLORER_VIEWS: '/metrics-explorer/views',
API_MONITORING_BASE: '/api-monitoring',
API_MONITORING: '/api-monitoring/explorer',
METRICS_EXPLORER_BASE: '/metrics-explorer',
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',

View File

@@ -1,4 +0,0 @@
export const USER_PREFERENCES = {
SIDENAV_PINNED: 'sidenav_pinned',
NAV_SHORTCUTS: 'nav_shortcuts',
};

View File

@@ -21,9 +21,9 @@ function AlertChannels({ allChannels }: AlertChannelsProps): JSX.Element {
const [action] = useComponentPermission(['new_alert_action'], user.role);
const onClickEditHandler = useCallback((id: string) => {
history.push(
history.replace(
generatePath(ROUTES.CHANNELS_EDIT, {
channelId: id,
id,
}),
);
}, []);

View File

@@ -1,4 +0,0 @@
.alert-channels-container {
width: 90%;
margin: 12px auto;
}

View File

@@ -120,19 +120,14 @@ describe('Create Alert Channel', () => {
expect(screen.getByText('button_test_channel')).toBeInTheDocument();
expect(screen.getByText('button_return')).toBeInTheDocument();
});
it('Should check if saving the form without filling the name displays error notification', async () => {
it('Should check if saving the form without filling the name displays "Something went wrong"', async () => {
const saveButton = screen.getByRole('button', {
name: 'button_save_channel',
});
fireEvent.click(saveButton);
await waitFor(() =>
expect(errorNotification).toHaveBeenCalledWith({
message: 'Error',
description: 'channel_name_required',
}),
);
await waitFor(() => expect(showErrorModal).toHaveBeenCalled());
});
it('Should check if clicking on Test button shows "An alert has been sent to this channel" success message if testing passes', async () => {
server.use(

View File

@@ -1,5 +1,3 @@
import './AllAlertChannels.styles.scss';
import { PlusOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import getAll from 'api/channels/getAll';
@@ -58,7 +56,7 @@ function AlertChannels(): JSX.Element {
}
return (
<div className="alert-channels-container">
<>
<ButtonContainer>
<Paragraph ellipsis type="secondary">
{t('sending_channels_note')}
@@ -89,7 +87,7 @@ function AlertChannels(): JSX.Element {
</ButtonContainer>
<AlertChannelsComponent allChannels={data?.data || []} />
</div>
</>
);
}

View File

@@ -1,37 +1,8 @@
// Earlier we were having app-banner-container class
// we change it to app-banner-wrapper as the adblocker was blocking the app-banner-container class
// Keep an eye on What classnames are used in the codebase
.app-banner-wrapper {
position: relative;
width: 100%;
}
.app-layout {
position: relative;
height: 100%;
width: 100%;
&.isWorkspaceRestricted {
height: calc(100% - 32px);
// same styles as its either trial expired or payment failed
&.isTrialExpired {
height: calc(100% - 64px);
}
&.isPaymentFailed {
height: calc(100% - 64px);
}
}
&.isTrialExpired {
height: calc(100% - 32px);
}
&.isPaymentFailed {
height: calc(100% - 32px);
}
.app-content {
width: calc(100% - 64px); // width of the sidebar
z-index: 0;
@@ -51,12 +22,6 @@
width: 100%;
}
}
&.side-nav-pinned {
.app-content {
width: calc(100% - 240px);
}
}
}
.chat-support-gateway {
@@ -192,9 +157,3 @@
text-underline-offset: 2px;
}
}
.workspace-restricted-banner,
.trial-expiry-banner,
.payment-failed-banner {
height: 32px;
}

View File

@@ -7,7 +7,6 @@ import * as Sentry from '@sentry/react';
import { Flex } from 'antd';
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import getChangelogByVersion from 'api/changelog/getChangelogByVersion';
import logEvent from 'api/common/logEvent';
import manageCreditCardApi from 'api/v1/portal/create';
import getUserLatestVersion from 'api/v1/version/getLatestVersion';
@@ -19,7 +18,6 @@ import { Events } from 'constants/events';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import { USER_PREFERENCES } from 'constants/userPreferences';
import SideNav from 'container/SideNav';
import TopNav from 'container/TopNav';
import dayjs from 'dayjs';
@@ -29,6 +27,7 @@ import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { isNull } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { INTEGRATION_TYPES } from 'pages/Integrations/utils';
import { useAppContext } from 'providers/App/App';
import {
ReactNode,
@@ -41,10 +40,9 @@ import {
import { Helmet } from 'react-helmet-async';
import { useTranslation } from 'react-i18next';
import { useMutation, useQueries } from 'react-query';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
import { useDispatch } from 'react-redux';
import { matchPath, useLocation } from 'react-router-dom';
import { Dispatch } from 'redux';
import { AppState } from 'store/reducers';
import AppActions from 'types/actions';
import {
UPDATE_CURRENT_ERROR,
@@ -52,16 +50,14 @@ import {
UPDATE_LATEST_VERSION,
UPDATE_LATEST_VERSION_ERROR,
} from 'types/actions/app';
import { ErrorResponse, SuccessResponse, SuccessResponseV2 } from 'types/api';
import { SuccessResponseV2 } from 'types/api';
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
import APIError from 'types/api/error';
import {
LicenseEvent,
LicensePlatform,
LicenseState,
} from 'types/api/licensesV3/getActive';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import { eventEmitter } from 'utils/getEventEmitter';
import {
@@ -84,8 +80,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
featureFlags,
isFetchingFeatureFlags,
featureFlagsFetchError,
userPreferences,
updateChangelog,
} = useAppContext();
const { notifications } = useNotifications();
@@ -98,10 +92,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
const [showSlowApiWarning, setShowSlowApiWarning] = useState(false);
const [slowApiWarningShown, setSlowApiWarningShown] = useState(false);
const { latestVersion } = useSelector<AppState, AppReducer>(
(state) => state.app,
);
const handleBillingOnSuccess = (
data: SuccessResponseV2<CheckoutSuccessPayloadProps>,
): void => {
@@ -138,11 +128,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
const [
getUserVersionResponse,
getUserLatestVersionResponse,
getChangelogByVersionResponse,
] = useQueries([
const [getUserVersionResponse, getUserLatestVersionResponse] = useQueries([
{
queryFn: getUserVersion,
queryKey: ['getUserVersion', user?.accessJwt],
@@ -153,12 +139,6 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
queryKey: ['getUserLatestVersion', user?.accessJwt],
enabled: isLoggedIn,
},
{
queryFn: (): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> =>
getChangelogByVersion(latestVersion),
queryKey: ['getChangelogByVersion', latestVersion],
enabled: isLoggedIn && !isCloudUserVal && Boolean(latestVersion),
},
]);
useEffect(() => {
@@ -217,7 +197,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
if (
getUserVersionResponse.isFetched &&
getUserVersionResponse.isSuccess &&
getUserLatestVersionResponse.isSuccess &&
getUserVersionResponse.data &&
getUserVersionResponse.data.payload
) {
@@ -255,31 +235,12 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
getUserVersionResponse.isLoading,
getUserVersionResponse.isError,
getUserVersionResponse.data,
getUserVersionResponse.isSuccess,
getUserLatestVersionResponse.isFetched,
getUserVersionResponse.isFetched,
getUserLatestVersionResponse.isSuccess,
notifications,
]);
useEffect(() => {
if (
getChangelogByVersionResponse.isFetched &&
getChangelogByVersionResponse.isSuccess &&
getChangelogByVersionResponse.data &&
getChangelogByVersionResponse.data.payload
) {
updateChangelog(getChangelogByVersionResponse.data.payload);
}
}, [
updateChangelog,
getChangelogByVersionResponse.isFetched,
getChangelogByVersionResponse.isLoading,
getChangelogByVersionResponse.isError,
getChangelogByVersionResponse.data,
getChangelogByVersionResponse.isSuccess,
]);
const isToDisplayLayout = isLoggedIn;
const routeKey = useMemo(() => getRouteKey(pathname), [pathname]);
@@ -369,6 +330,53 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
});
}, [manageCreditCard]);
const isHome = (): boolean => routeKey === 'HOME';
const isLogsView = (): boolean =>
routeKey === 'LOGS' ||
routeKey === 'LOGS_EXPLORER' ||
routeKey === 'LOGS_PIPELINES' ||
routeKey === 'LOGS_SAVE_VIEWS';
const isApiMonitoringView = (): boolean => routeKey === 'API_MONITORING';
const isExceptionsView = (): boolean => routeKey === 'ALL_ERROR';
const isTracesView = (): boolean =>
routeKey === 'TRACES_EXPLORER' || routeKey === 'TRACES_SAVE_VIEWS';
const isMessagingQueues = (): boolean =>
routeKey === 'MESSAGING_QUEUES_KAFKA' ||
routeKey === 'MESSAGING_QUEUES_KAFKA_DETAIL' ||
routeKey === 'MESSAGING_QUEUES_CELERY_TASK' ||
routeKey === 'MESSAGING_QUEUES_OVERVIEW';
const isCloudIntegrationPage = (): boolean =>
routeKey === 'INTEGRATIONS' &&
new URLSearchParams(window.location.search).get('integration') ===
INTEGRATION_TYPES.AWS_INTEGRATION;
const isDashboardListView = (): boolean => routeKey === 'ALL_DASHBOARD';
const isAlertHistory = (): boolean => routeKey === 'ALERT_HISTORY';
const isAlertOverview = (): boolean => routeKey === 'ALERT_OVERVIEW';
const isInfraMonitoring = (): boolean =>
routeKey === 'INFRASTRUCTURE_MONITORING_HOSTS' ||
routeKey === 'INFRASTRUCTURE_MONITORING_KUBERNETES';
const isTracesFunnels = (): boolean => routeKey === 'TRACES_FUNNELS';
const isTracesFunnelDetails = (): boolean =>
!!matchPath(pathname, ROUTES.TRACES_FUNNELS_DETAIL);
const isPathMatch = (regex: RegExp): boolean => regex.test(pathname);
const isDashboardView = (): boolean =>
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+$/);
const isDashboardWidgetView = (): boolean =>
isPathMatch(/^\/dashboard\/[a-zA-Z0-9_-]+\/new$/);
const isTraceDetailsView = (): boolean =>
isPathMatch(/^\/trace\/[a-zA-Z0-9]+(\?.*)?$/);
useEffect(() => {
if (isDarkMode) {
document.body.classList.remove('lightMode');
@@ -585,84 +593,61 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
</div>
);
const sideNavPinned = userPreferences?.find(
(preference) => preference.name === USER_PREFERENCES.SIDENAV_PINNED,
)?.value as boolean;
const SHOW_TRIAL_EXPIRY_BANNER =
showTrialExpiryBanner && !showPaymentFailedWarning;
const SHOW_WORKSPACE_RESTRICTED_BANNER = showWorkspaceRestricted;
const SHOW_PAYMENT_FAILED_BANNER =
!showTrialExpiryBanner && showPaymentFailedWarning;
return (
<Layout className={cx(isDarkMode ? 'darkMode dark' : 'lightMode')}>
<Helmet>
<title>{pageTitle}</title>
</Helmet>
{isLoggedIn && (
<div className={cx('app-banner-wrapper')}>
{SHOW_TRIAL_EXPIRY_BANNER && (
<div className="trial-expiry-banner">
You are in free trial period. Your free trial will end on{' '}
<span>{getFormattedDate(trialInfo?.trialEnd || Date.now())}.</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleUpgrade}>
upgrade
</a>
to continue using SigNoz features.
</span>
) : (
'Please contact your administrator for upgrading to a paid plan.'
)}
</div>
{showTrialExpiryBanner && !showPaymentFailedWarning && (
<div className="trial-expiry-banner">
You are in free trial period. Your free trial will end on{' '}
<span>{getFormattedDate(trialInfo?.trialEnd || Date.now())}.</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleUpgrade}>
upgrade
</a>
to continue using SigNoz features.
</span>
) : (
'Please contact your administrator for upgrading to a paid plan.'
)}
</div>
)}
{SHOW_WORKSPACE_RESTRICTED_BANNER && renderWorkspaceRestrictedBanner()}
{showWorkspaceRestricted && renderWorkspaceRestrictedBanner()}
{SHOW_PAYMENT_FAILED_BANNER && (
<div className="payment-failed-banner">
Your bill payment has failed. Your workspace will get suspended on{' '}
<span>
{getFormattedDateWithMinutes(
dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(),
)}
.
</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleFailedPayment}>
pay the bill
</a>
to continue using SigNoz features.
</span>
) : (
' Please contact your administrator to pay the bill.'
)}
</div>
{!showTrialExpiryBanner && showPaymentFailedWarning && (
<div className="payment-failed-banner">
Your bill payment has failed. Your workspace will get suspended on{' '}
<span>
{getFormattedDateWithMinutes(
dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(),
)}
.
</span>
{user.role === USER_ROLES.ADMIN ? (
<span>
{' '}
Please{' '}
<a className="upgrade-link" onClick={handleFailedPayment}>
pay the bill
</a>
to continue using SigNoz features.
</span>
) : (
' Please contact your administrator to pay the bill.'
)}
</div>
)}
<Flex
className={cx(
'app-layout',
isDarkMode ? 'darkMode dark' : 'lightMode',
sideNavPinned ? 'side-nav-pinned' : '',
SHOW_WORKSPACE_RESTRICTED_BANNER ? 'isWorkspaceRestricted' : '',
SHOW_TRIAL_EXPIRY_BANNER ? 'isTrialExpired' : '',
SHOW_PAYMENT_FAILED_BANNER ? 'isPaymentFailed' : '',
)}
className={cx('app-layout', isDarkMode ? 'darkMode dark' : 'lightMode')}
>
{isToDisplayLayout && !renderFullScreen && (
<SideNav isPinned={sideNavPinned} />
)}
{isToDisplayLayout && !renderFullScreen && <SideNav />}
<div
className={cx('app-content', {
'full-screen-content': renderFullScreen,
@@ -672,7 +657,32 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<LayoutContent data-overlayscrollbars-initialize>
<OverlayScrollbar>
<ChildrenContainer>
<ChildrenContainer
style={{
margin:
isHome() ||
isLogsView() ||
isTracesView() ||
isDashboardView() ||
isDashboardWidgetView() ||
isDashboardListView() ||
isAlertHistory() ||
isAlertOverview() ||
isMessagingQueues() ||
isCloudIntegrationPage() ||
isInfraMonitoring() ||
isApiMonitoringView() ||
isExceptionsView()
? 0
: '0 1rem',
...(isTraceDetailsView() ||
isTracesFunnels() ||
isTracesFunnelDetails()
? { margin: 0 }
: {}),
}}
>
{isToDisplayLayout && !renderFullScreen && <TopNav />}
{children}
</ChildrenContainer>

View File

@@ -1,8 +1,7 @@
.billing-container {
margin-bottom: 40px;
padding-top: 36px;
width: 90%;
margin: 0 auto;
width: 65%;
.billing-summary {
margin: 24px 8px;

View File

@@ -14,7 +14,7 @@ function CloudIntegrationPage(): JSX.Element {
<HeroSection />
<RequestIntegrationBtn
type={IntegrationType.AWS_SERVICES}
message="Can't find the AWS service you're looking for? Request more integrations"
message="Cannot find the AWS service you're looking for? Request more integrations"
/>
<ServicesTabs />
</div>

View File

@@ -60,30 +60,26 @@ function CloudServiceDataCollected({
return (
<div className="cloud-service-data-collected">
{logsData && logsData.length > 0 && (
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Logs</div>
<Table
columns={logsColumns}
dataSource={logsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-logs"
/>
</div>
)}
{metricsData && metricsData.length > 0 && (
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Metrics</div>
<Table
columns={metricsColumns}
dataSource={metricsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-metrics"
/>
</div>
)}
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Logs</div>
<Table
columns={logsColumns}
dataSource={logsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-logs"
/>
</div>
<div className="cloud-service-data-collected__table">
<div className="cloud-service-data-collected__table-heading">Metrics</div>
<Table
columns={metricsColumns}
dataSource={metricsData}
// eslint-disable-next-line react/jsx-props-no-spreading
{...tableProps}
className="cloud-service-data-collected__table-metrics"
/>
</div>
</div>
);
}

View File

@@ -51,33 +51,6 @@ function ServiceStatus({
return <div className={`service-status ${className}`}>{text}</div>;
}
function getTabItems(serviceDetailsData: any): TabsProps['items'] {
const dashboards = serviceDetailsData?.assets.dashboards || [];
const dataCollected = serviceDetailsData?.data_collected || {};
const items: TabsProps['items'] = [];
if (dashboards.length) {
items.push({
key: 'dashboards',
label: `Dashboards (${dashboards.length})`,
children: <CloudServiceDashboards service={serviceDetailsData} />,
});
}
items.push({
key: 'data-collected',
label: 'Data Collected',
children: (
<CloudServiceDataCollected
logsData={dataCollected.logs || []}
metricsData={dataCollected.metrics || []}
/>
),
});
return items;
}
function ServiceDetails(): JSX.Element | null {
const urlQuery = useUrlQuery();
const cloudAccountId = urlQuery.get('cloudAccountId');
@@ -133,7 +106,23 @@ function ServiceDetails(): JSX.Element | null {
return null;
}
const tabItems = getTabItems(serviceDetailsData);
const tabItems: TabsProps['items'] = [
{
key: 'dashboards',
label: `Dashboards (${serviceDetailsData?.assets.dashboards.length})`,
children: <CloudServiceDashboards service={serviceDetailsData} />,
},
{
key: 'data-collected',
label: 'Data Collected',
children: (
<CloudServiceDataCollected
logsData={serviceDetailsData?.data_collected.logs || []}
metricsData={serviceDetailsData?.data_collected.metrics || []}
/>
),
},
];
return (
<div className="service-details">

View File

@@ -34,7 +34,7 @@ describe('Request AWS integration', () => {
expect(
screen.getByText(
/can't find what youre looking for\? request more integrations/i,
/cannot find what youre looking for\? request more integrations/i,
),
).toBeInTheDocument();

View File

@@ -1,15 +0,0 @@
.create-alert-channels-container {
width: 90%;
margin: 12px auto;
border: 1px solid var(--Slate-500, #161922);
background: var(--Ink-400, #121317);
border-radius: 3px;
padding: 16px;
.form-alert-channels-title {
margin-top: 0px;
margin-bottom: 16px;
}
}

View File

@@ -1,5 +1,3 @@
import './CreateAlertChannels.styles.scss';
import { Form } from 'antd';
import createEmail from 'api/channels/createEmail';
import createMsTeamsApi from 'api/channels/createMsTeams';
@@ -138,14 +136,6 @@ function CreateAlertChannels({
);
const onSlackHandler = useCallback(async () => {
if (!selectedConfig.api_url) {
notifications.error({
message: 'Error',
description: t('webhook_url_required'),
});
return;
}
setSavingState(true);
try {
@@ -162,7 +152,7 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [selectedConfig, notifications, t, prepareSlackRequest, showErrorModal]);
}, [prepareSlackRequest, notifications, t, showErrorModal]);
const prepareWebhookRequest = useCallback(() => {
// initial api request without auth params
@@ -200,14 +190,6 @@ function CreateAlertChannels({
}, [notifications, t, selectedConfig]);
const onWebhookHandler = useCallback(async () => {
if (!selectedConfig.api_url) {
notifications.error({
message: 'Error',
description: t('webhook_url_required'),
});
return;
}
setSavingState(true);
try {
const request = prepareWebhookRequest();
@@ -224,13 +206,7 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [
selectedConfig.api_url,
notifications,
t,
prepareWebhookRequest,
showErrorModal,
]);
}, [prepareWebhookRequest, notifications, t, showErrorModal]);
const preparePagerRequest = useCallback(() => {
const validationError = ValidatePagerChannel(selectedConfig as PagerChannel);
@@ -294,14 +270,6 @@ function CreateAlertChannels({
);
const onOpsgenieHandler = useCallback(async () => {
if (!selectedConfig.api_key) {
notifications.error({
message: 'Error',
description: t('api_key_required'),
});
return;
}
setSavingState(true);
try {
await createOpsgenie(prepareOpsgenieRequest());
@@ -317,13 +285,7 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [
selectedConfig.api_key,
notifications,
t,
prepareOpsgenieRequest,
showErrorModal,
]);
}, [prepareOpsgenieRequest, notifications, t, showErrorModal]);
const prepareEmailRequest = useCallback(
() => ({
@@ -337,14 +299,6 @@ function CreateAlertChannels({
);
const onEmailHandler = useCallback(async () => {
if (!selectedConfig.to) {
notifications.error({
message: 'Error',
description: t('to_required'),
});
return;
}
setSavingState(true);
try {
const request = prepareEmailRequest();
@@ -361,7 +315,7 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [prepareEmailRequest, notifications, t, showErrorModal, selectedConfig.to]);
}, [prepareEmailRequest, notifications, t, showErrorModal]);
const prepareMsTeamsRequest = useCallback(
() => ({
@@ -375,14 +329,6 @@ function CreateAlertChannels({
);
const onMsTeamsHandler = useCallback(async () => {
if (!selectedConfig.webhook_url) {
notifications.error({
message: 'Error',
description: t('webhook_url_required'),
});
return;
}
setSavingState(true);
try {
@@ -399,24 +345,10 @@ function CreateAlertChannels({
} finally {
setSavingState(false);
}
}, [
selectedConfig.webhook_url,
notifications,
t,
prepareMsTeamsRequest,
showErrorModal,
]);
}, [prepareMsTeamsRequest, notifications, t, showErrorModal]);
const onSaveHandler = useCallback(
async (value: ChannelType) => {
if (!selectedConfig.name) {
notifications.error({
message: 'Error',
description: t('channel_name_required'),
});
return;
}
const functionMapper = {
[ChannelType.Slack]: onSlackHandler,
[ChannelType.Webhook]: onWebhookHandler,
@@ -545,28 +477,26 @@ function CreateAlertChannels({
);
return (
<div className="create-alert-channels-container">
<FormAlertChannels
{...{
formInstance,
onTypeChangeHandler,
setSelectedConfig,
<FormAlertChannels
{...{
formInstance,
onTypeChangeHandler,
setSelectedConfig,
type,
onTestHandler,
onSaveHandler,
savingState,
testingState,
title: t('page_title_create'),
initialValue: {
type,
onTestHandler,
onSaveHandler,
savingState,
testingState,
title: t('page_title_create'),
initialValue: {
type,
...selectedConfig,
...PagerInitialConfig,
...OpsgenieInitialConfig,
...EmailInitialConfig,
},
}}
/>
</div>
...selectedConfig,
...PagerInitialConfig,
...OpsgenieInitialConfig,
...EmailInitialConfig,
},
}}
/>
);
}

View File

@@ -28,6 +28,7 @@ import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { useCallback, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useParams } from 'react-router-dom';
import APIError from 'types/api/error';
function EditAlertChannels({
@@ -52,11 +53,7 @@ function EditAlertChannels({
const [savingState, setSavingState] = useState<boolean>(false);
const [testingState, setTestingState] = useState<boolean>(false);
const { notifications } = useNotifications();
// Extract channelId from URL pathname since useParams doesn't work in nested routing
const { pathname } = window.location;
const channelIdMatch = pathname.match(/\/settings\/channels\/edit\/([^/]+)/);
const id = channelIdMatch ? channelIdMatch[1] : '';
const { id } = useParams<{ id: string }>();
const [type, setType] = useState<ChannelType>(
initialValue?.type ? (initialValue.type as ChannelType) : ChannelType.Slack,

View File

@@ -1,173 +1,30 @@
.empty-logs-search {
&__container {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 240px;
}
&__content {
.empty-logs-search-container {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 240px;
.empty-logs-search-container-content {
display: flex;
flex-direction: column;
gap: 4px;
color: var(--text-vanilla-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 18px;
line-height: 18px; /* 128.571% */
letter-spacing: -0.07px;
align-items: flex-start;
.empty-state-svg {
height: 50px;
width: 50px;
}
}
&__sub-text {
font-weight: 600;
}
&__container {
&--custom-message {
height: 445px;
.empty-state-svg {
height: 32px;
width: 32px;
}
.empty-logs-search {
&__header {
display: flex;
align-items: center;
gap: 4px;
}
&__title {
color: var(--bg-vanilla-100);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
}
&__subtitle {
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.07px;
}
&__description {
font-size: 14px;
color: var(--text-vanilla-400);
line-height: 20px;
}
&__description-list {
margin: 0;
margin-top: 8px;
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.07px;
display: flex;
flex-direction: column;
gap: 6px;
list-style: none;
padding: 0;
font-family: Inter;
}
&__description-list li {
position: relative;
padding-left: 20px;
}
&__description-list li::before {
content: '';
font-family: Inter;
position: absolute;
left: 0;
color: var(--bg-robin-400);
font-weight: bold;
font-size: 16px;
line-height: 20px;
}
&__clear-filters-btn {
display: flex;
width: 468px;
font-family: Inter;
padding: 12px;
justify-content: space-between;
align-items: flex-start;
border-radius: 3px;
border: 1px dashed var(--bg-slate-500);
background: transparent;
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.07px;
cursor: pointer;
margin-top: 12px;
}
&__clear-filters-btn-icon {
display: flex;
align-items: center;
gap: 6px;
}
&__row {
display: flex;
flex-direction: row;
align-items: flex-end;
max-width: 825px;
gap: 25px;
justify-content: center;
margin-left: 21px;
}
&__content {
display: flex;
flex-direction: column;
gap: 4px;
min-width: 260px;
}
&__resources-card {
background: var(--bg-ink-400);
border: 1px solid var(--bg-slate-500);
border-radius: 4px;
width: 332px;
}
&__resources-title {
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 11px;
font-weight: 600;
line-height: 18px;
letter-spacing: 0.88px;
text-transform: uppercase;
padding: 16px 16px 12px;
border-bottom: 1px solid var(--bg-slate-500);
height: 46px;
}
&__resources-links {
padding: 16px;
display: flex;
flex-direction: column;
gap: 16px;
.learn-more {
height: 18px;
}
}
}
.sub-text {
font-weight: 600;
}
}
}

View File

@@ -2,24 +2,16 @@ import './EmptyLogsSearch.styles.scss';
import { Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import LearnMore from 'components/LearnMore/LearnMore';
import { EmptyLogsListConfig } from 'container/LogsExplorerList/utils';
import { Delete } from 'lucide-react';
import { useEffect, useRef } from 'react';
import { DataSource, PanelTypeKeys } from 'types/common/queryBuilder';
interface EmptyLogsSearchProps {
dataSource: DataSource;
panelType: PanelTypeKeys;
customMessage?: EmptyLogsListConfig;
}
export default function EmptyLogsSearch({
dataSource,
panelType,
customMessage,
}: EmptyLogsSearchProps): JSX.Element {
}: {
dataSource: DataSource;
panelType: PanelTypeKeys;
}): JSX.Element {
const logEventCalledRef = useRef(false);
useEffect(() => {
if (!logEventCalledRef.current) {
@@ -38,80 +30,18 @@ export default function EmptyLogsSearch({
}, []);
return (
<div
className={cx('empty-logs-search__container', {
'empty-logs-search__container--custom-message': !!customMessage,
})}
>
<div className="empty-logs-search__row">
<div className="empty-logs-search__content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
{customMessage ? (
<>
<div className="empty-logs-search__header">
<Typography.Text className="empty-logs-search__title">
{customMessage.title}
</Typography.Text>
{customMessage.subTitle && (
<Typography.Text className="empty-logs-search__subtitle">
{customMessage.subTitle}
</Typography.Text>
)}
</div>
{Array.isArray(customMessage.description) ? (
<ul className="empty-logs-search__description-list">
{customMessage.description.map((desc) => (
<li key={desc}>{desc}</li>
))}
</ul>
) : (
<Typography.Text className="empty-logs-search__description">
{customMessage.description}
</Typography.Text>
)}
{/* Clear filters button */}
{customMessage.showClearFiltersButton && (
<button
type="button"
className="empty-logs-search__clear-filters-btn"
onClick={customMessage.onClearFilters}
>
{customMessage.clearFiltersButtonText}
<span className="empty-logs-search__clear-filters-btn-icon">
<Delete size={14} />
Clear filters
</span>
</button>
)}
</>
) : (
<Typography.Text>
<span className="empty-logs-search__sub-text">
This query had no results.{' '}
</span>
Edit your query and try again!
</Typography.Text>
)}
</div>
{customMessage?.documentationLinks && (
<div className="empty-logs-search__resources-card">
<div className="empty-logs-search__resources-title">RESOURCES</div>
<div className="empty-logs-search__resources-links">
{customMessage.documentationLinks.map((link) => (
<LearnMore key={link.text} text={link.text} url={link.url} />
))}
</div>
</div>
)}
<div className="empty-logs-search-container">
<div className="empty-logs-search-container-content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
<Typography.Text>
<span className="sub-text">This query had no results. </span>
Edit your query and try again!
</Typography.Text>
</div>
</div>
);
}
EmptyLogsSearch.defaultProps = {
customMessage: null,
};

View File

@@ -57,9 +57,7 @@ function FormAlertChannels({
return (
<>
<Typography.Title level={4} className="form-alert-channels-title">
{title}
</Typography.Title>
<Typography.Title level={3}>{title}</Typography.Title>
<Form initialValues={initialValue} layout="vertical" form={formInstance}>
<Form.Item label={t('field_channel_name')} labelAlign="left" name="name">
@@ -149,7 +147,7 @@ function FormAlertChannels({
</Button>
<Button
onClick={(): void => {
history.replace(ROUTES.ALL_CHANNELS);
history.replace(ROUTES.SETTINGS);
}}
>
{t('button_return')}

View File

@@ -212,12 +212,9 @@ function QuerySection({
return null;
}
};
const step2Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '2' : '1';
return (
<>
<StepHeading> {t('alert_form_step2', { step: step2Label })}</StepHeading>
<StepHeading> {t('alert_form_step2')}</StepHeading>
<FormContainer>
<div>{renderTabs(alertType)}</div>
{renderQuerySection(currentTab)}

View File

@@ -371,11 +371,9 @@ function RuleOptions({
selectedCategory?.name,
);
const step3Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '3' : '2';
return (
<>
<StepHeading>{t('alert_form_step3', { step: step3Label })}</StepHeading>
<StepHeading>{t('alert_form_step3')}</StepHeading>
<FormContainer>
{queryCategory === EQueryType.PROM && renderPromRuleOptions()}
{queryCategory !== EQueryType.PROM &&

View File

@@ -85,13 +85,7 @@ function LabelSelect({
}, [handleBlur]);
const handleLabelChange = (event: ChangeEvent<HTMLInputElement>): void => {
// Remove the colon if it's the last character.
// As the colon is used to separate the key and value in the query.
setCurrentVal(
event.target?.value.endsWith(':')
? event.target?.value.slice(0, -1)
: event.target?.value,
);
setCurrentVal(event.target?.value.replace(':', ''));
};
const handleClose = (key: string): void => {

View File

@@ -2,7 +2,6 @@
overflow: auto;
margin: 8px -8px;
margin-right: 0;
margin-bottom: 64px;
.react-grid-layout {
border: none !important;

View File

@@ -109,7 +109,7 @@ describe('GridCardLayout Utils', () => {
builder: {
queryData: [
{
stepInterval: 60,
stepInterval: 30,
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
queryName: 'A',
@@ -181,7 +181,7 @@ describe('GridCardLayout Utils', () => {
expect(result.builder.queryData).toHaveLength(2);
expect(result.builder.queryData[0].stepInterval).toBe(180);
expect(result.builder.queryData[1].stepInterval).toBe(45); // 45 is the stepInterval of the second query - custom value
expect(result.builder.queryData[1].stepInterval).toBe(180);
});
it('should use calculated stepInterval when original is undefined', () => {

View File

@@ -2,7 +2,7 @@ import { FORMULA_REGEXP } from 'constants/regExp';
import { isEmpty, isEqual } from 'lodash-es';
import { Layout } from 'react-grid-layout';
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] =>
layout.map((obj) =>
@@ -99,12 +99,6 @@ export function updateStepInterval(
): Query {
const stepIntervalPoints = getStepIntervalPoints(minTime, maxTime);
// if user haven't enter anything manually, that is we have default value of 60 then do the interval adjustment for bar otherwise apply the user's value
const getSteps = (queryData: IBuilderQuery): number =>
queryData.stepInterval === 60
? stepIntervalPoints || 60
: queryData?.stepInterval || 60;
return {
...query,
builder: {
@@ -112,7 +106,7 @@ export function updateStepInterval(
queryData: [
...(query?.builder?.queryData ?? []).map((queryData) => ({
...queryData,
stepInterval: getSteps(queryData),
stepInterval: stepIntervalPoints || queryData?.stepInterval || 60,
})),
],
},

View File

@@ -12,7 +12,6 @@ import Header from 'components/Header/Header';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import ROUTES from 'constants/routes';
@@ -185,25 +184,18 @@ export default function Home(): JSX.Element {
);
const processUserPreferences = (userPreferences: UserPreference[]): void => {
const checklistSkipped = Boolean(
userPreferences?.find(
(preference) =>
preference.name === ORG_PREFERENCES.WELCOME_CHECKLIST_DO_LATER,
)?.value,
);
const checklistSkipped = userPreferences?.find(
(preference) => preference.name === 'welcome_checklist_do_later',
)?.value;
const updatedChecklistItems = cloneDeep(checklistItems);
const newChecklistItems = updatedChecklistItems.map((item) => {
const newItem = { ...item };
const isSkipped = Boolean(
newItem.isSkipped =
userPreferences?.find(
(preference) => preference.name === item.skippedPreferenceKey,
)?.value,
);
newItem.isSkipped = isSkipped || false;
)?.value || false;
return newItem;
});
@@ -247,7 +239,7 @@ export default function Home(): JSX.Element {
setUpdatingUserPreferences(true);
updateUserPreference({
name: ORG_PREFERENCES.WELCOME_CHECKLIST_DO_LATER,
name: 'welcome_checklist_do_later',
value: true,
});
};

View File

@@ -1,19 +1,17 @@
import { ORG_PREFERENCES } from 'constants/orgPreferences';
import ROUTES from 'constants/routes';
import { ChecklistItem } from './HomeChecklist/HomeChecklist';
export const checkListStepToPreferenceKeyMap = {
WILL_DO_LATER: ORG_PREFERENCES.WELCOME_CHECKLIST_DO_LATER,
SEND_LOGS: ORG_PREFERENCES.WELCOME_CHECKLIST_SEND_LOGS_SKIPPED,
SEND_TRACES: ORG_PREFERENCES.WELCOME_CHECKLIST_SEND_TRACES_SKIPPED,
SEND_INFRA_METRICS:
ORG_PREFERENCES.WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED,
SETUP_DASHBOARDS: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED,
SETUP_ALERTS: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED,
SETUP_SAVED_VIEWS: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED,
SETUP_WORKSPACE: ORG_PREFERENCES.WELCOME_CHECKLIST_SETUP_WORKSPACE_SKIPPED,
ADD_DATA_SOURCE: ORG_PREFERENCES.WELCOME_CHECKLIST_ADD_DATA_SOURCE_SKIPPED,
WILL_DO_LATER: 'welcome_checklist_do_later',
SEND_LOGS: 'welcome_checklist_send_logs_skipped',
SEND_TRACES: 'welcome_checklist_send_traces_skipped',
SEND_INFRA_METRICS: 'welcome_checklist_send_infra_metrics_skipped',
SETUP_DASHBOARDS: 'welcome_checklist_setup_dashboards_skipped',
SETUP_ALERTS: 'welcome_checklist_setup_alerts_skipped',
SETUP_SAVED_VIEWS: 'welcome_checklist_setup_saved_view_skipped',
SETUP_WORKSPACE: 'welcome_checklist_setup_workspace_skipped',
ADD_DATA_SOURCE: 'welcome_checklist_add_data_source_skipped',
};
export const DOCS_LINKS = {

View File

@@ -96,41 +96,11 @@ function HostsList(): JSX.Element {
};
}, [pageSize, currentPage, filters, minTime, maxTime, orderBy]);
const queryKey = useMemo(() => {
if (selectedHostName) {
return [
'hostList',
String(pageSize),
String(currentPage),
JSON.stringify(filters),
JSON.stringify(orderBy),
];
}
return [
'hostList',
String(pageSize),
String(currentPage),
JSON.stringify(filters),
JSON.stringify(orderBy),
String(minTime),
String(maxTime),
];
}, [
pageSize,
currentPage,
filters,
orderBy,
selectedHostName,
minTime,
maxTime,
]);
const { data, isFetching, isLoading, isError } = useGetHostList(
query as HostListPayload,
{
queryKey,
queryKey: ['hostList', query],
enabled: !!query,
keepPreviousData: true,
},
);
@@ -242,7 +212,6 @@ function HostsList(): JSX.Element {
<HostsListControls
filters={filters}
handleFiltersChange={handleFiltersChange}
showAutoRefresh={!selectedHostData}
/>
</div>
<HostsListTable

View File

@@ -11,11 +11,9 @@ import { DataSource } from 'types/common/queryBuilder';
function HostsListControls({
handleFiltersChange,
filters,
showAutoRefresh,
}: {
handleFiltersChange: (value: IBuilderQuery['filters']) => void;
filters: IBuilderQuery['filters'];
showAutoRefresh: boolean;
}): JSX.Element {
const currentQuery = initialQueriesMap[DataSource.METRICS];
const updatedCurrentQuery = useMemo(
@@ -60,7 +58,7 @@ function HostsListControls({
<div className="time-selector">
<DateTimeSelectionV2
showAutoRefresh={showAutoRefresh}
showAutoRefresh
showRefreshText={false}
hideShareModal
/>

View File

@@ -93,13 +93,9 @@ export default function HostsListTable({
const showHostsEmptyState =
!isFetching &&
!isLoading &&
formattedHostMetricsData.length === 0 &&
(!sentAnyHostMetricsData || isSendingIncorrectK8SAgentMetrics) &&
!filters.items.length;
const showTableLoadingState =
(isLoading || isFetching) && formattedHostMetricsData.length === 0;
if (isError) {
return <Typography>{data?.error || 'Something went wrong'}</Typography>;
}
@@ -131,7 +127,7 @@ export default function HostsListTable({
);
}
if (showTableLoadingState) {
if (isLoading || isFetching) {
return (
<div className="hosts-list-loading-state">
<Skeleton.Input
@@ -159,7 +155,7 @@ export default function HostsListTable({
return (
<Table
className="hosts-list-table"
dataSource={showTableLoadingState ? [] : formattedHostMetricsData}
dataSource={isLoading || isFetching ? [] : formattedHostMetricsData}
columns={columns}
pagination={{
current: currentPage,
@@ -174,7 +170,7 @@ export default function HostsListTable({
}}
scroll={{ x: true }}
loading={{
spinning: showTableLoadingState,
spinning: isFetching || isLoading,
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
}}
tableLayout="fixed"

View File

@@ -172,13 +172,6 @@
.ant-table-cell:nth-child(n + 3) {
padding-right: 24px;
}
.memory-usage-header {
display: flex;
align-items: center;
justify-content: flex-end;
gap: 4px;
margin-right: 4px;
}
.column-header-right {
text-align: right;
}

View File

@@ -1,43 +0,0 @@
import { render, screen } from '@testing-library/react';
import HostsEmptyOrIncorrectMetrics from '../HostsEmptyOrIncorrectMetrics';
describe('HostsEmptyOrIncorrectMetrics', () => {
it('shows no data message when noData is true', () => {
render(<HostsEmptyOrIncorrectMetrics noData incorrectData={false} />);
expect(
screen.getByText('No host metrics data received yet.'),
).toBeInTheDocument();
expect(
screen.getByText(/Infrastructure monitoring requires the/),
).toBeInTheDocument();
});
it('shows incorrect data message when incorrectData is true', () => {
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData />);
expect(
screen.getByText(
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
),
).toBeInTheDocument();
});
it('does not show no data message when noData is false', () => {
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
expect(
screen.queryByText('No host metrics data received yet.'),
).not.toBeInTheDocument();
expect(
screen.queryByText(/Infrastructure monitoring requires the/),
).not.toBeInTheDocument();
});
it('does not show incorrect data message when incorrectData is false', () => {
render(<HostsEmptyOrIncorrectMetrics noData={false} incorrectData={false} />);
expect(
screen.queryByText(
'To see host metrics, upgrade to the latest version of SigNoz k8s-infra chart. Please contact support if you need help.',
),
).not.toBeInTheDocument();
});
});

View File

@@ -1,166 +0,0 @@
/* eslint-disable react/button-has-type */
import { render } from '@testing-library/react';
import ROUTES from 'constants/routes';
import * as useGetHostListHooks from 'hooks/infraMonitoring/useGetHostList';
import * as appContextHooks from 'providers/App/App';
import * as timezoneHooks from 'providers/Timezone';
import { QueryClient, QueryClientProvider } from 'react-query';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import store from 'store';
import { LicenseEvent } from 'types/api/licensesV3/getActive';
import HostsList from '../HostsList';
jest.mock('lib/getMinMax', () => ({
__esModule: true,
default: jest.fn().mockImplementation(() => ({
minTime: 1713734400000,
maxTime: 1713738000000,
isValidTimeFormat: jest.fn().mockReturnValue(true),
})),
}));
jest.mock('components/CustomTimePicker/CustomTimePicker', () => ({
__esModule: true,
default: ({ onSelect, selectedTime, selectedValue }: any): JSX.Element => (
<div data-testid="custom-time-picker">
<button onClick={(): void => onSelect('custom')}>
{selectedTime} - {selectedValue}
</button>
</div>
),
}));
const queryClient = new QueryClient();
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: (): any => ({
globalTime: {
selectedTime: {
startTime: 1713734400000,
endTime: 1713738000000,
},
maxTime: 1713738000000,
minTime: 1713734400000,
},
}),
}));
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: jest.fn().mockReturnValue({
pathname: ROUTES.INFRASTRUCTURE_MONITORING_HOSTS,
}),
}));
jest.mock('react-router-dom-v5-compat', () => {
const actual = jest.requireActual('react-router-dom-v5-compat');
return {
...actual,
useSearchParams: jest
.fn()
.mockReturnValue([
{ get: jest.fn(), entries: jest.fn().mockReturnValue([]) },
jest.fn(),
]),
useNavigationType: (): any => 'PUSH',
};
});
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
safeNavigate: jest.fn(),
}),
}));
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
timezone: {
offset: 0,
},
browserTimezone: {
offset: 0,
},
} as any);
jest.spyOn(useGetHostListHooks, 'useGetHostList').mockReturnValue({
data: {
payload: {
data: {
records: [
{
hostName: 'test-host',
active: true,
cpu: 0.75,
memory: 0.65,
wait: 0.03,
},
],
isSendingK8SAgentMetrics: false,
sentAnyHostMetricsData: true,
},
},
},
isLoading: false,
isError: false,
} as any);
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
user: {
role: 'admin',
},
activeLicenseV3: {
event_queue: {
created_at: '0',
event: LicenseEvent.NO_EVENT,
scheduled_at: '0',
status: '',
updated_at: '0',
},
license: {
license_key: 'test-license-key',
license_type: 'trial',
org_id: 'test-org-id',
plan_id: 'test-plan-id',
plan_name: 'test-plan-name',
plan_type: 'trial',
plan_version: 'test-plan-version',
},
},
} as any);
describe('HostsList', () => {
it('renders hosts list table', () => {
const { container } = render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(container.querySelector('.hosts-list-table')).toBeInTheDocument();
});
it('renders filters', () => {
const { container } = render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<HostsList />
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(container.querySelector('.filters')).toBeInTheDocument();
});
});

View File

@@ -1,38 +0,0 @@
import { render, screen } from '@testing-library/react';
import HostsListControls from '../HostsListControls';
jest.mock('container/QueryBuilder/filters/QueryBuilderSearch', () => ({
__esModule: true,
default: (): JSX.Element => (
<div data-testid="query-builder-search">Search</div>
),
}));
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
__esModule: true,
default: (): JSX.Element => (
<div data-testid="date-time-selection">Date Time</div>
),
}));
describe('HostsListControls', () => {
const mockHandleFiltersChange = jest.fn();
const mockFilters = {
items: [],
op: 'AND',
};
it('renders search and date time filters', () => {
render(
<HostsListControls
handleFiltersChange={mockHandleFiltersChange}
filters={mockFilters}
showAutoRefresh={false}
/>,
);
expect(screen.getByTestId('query-builder-search')).toBeInTheDocument();
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
});
});

View File

@@ -1,167 +0,0 @@
/* eslint-disable react/jsx-props-no-spreading */
import { render, screen } from '@testing-library/react';
import HostsListTable from '../HostsListTable';
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
const EMPTY_STATE_CONTAINER_CLASS = '.hosts-empty-state-container';
describe('HostsListTable', () => {
const mockHost = {
hostName: 'test-host-1',
active: true,
cpu: 0.75,
memory: 0.65,
wait: 0.03,
load15: 1.5,
os: 'linux',
};
const mockTableData = {
payload: {
data: {
hosts: [mockHost],
},
},
};
const mockOnHostClick = jest.fn();
const mockSetCurrentPage = jest.fn();
const mockSetOrderBy = jest.fn();
const mockSetPageSize = jest.fn();
const mockProps = {
isLoading: false,
isError: false,
isFetching: false,
tableData: mockTableData,
hostMetricsData: [mockHost],
filters: {
items: [],
op: 'AND',
},
onHostClick: mockOnHostClick,
currentPage: 1,
setCurrentPage: mockSetCurrentPage,
pageSize: 10,
setOrderBy: mockSetOrderBy,
setPageSize: mockSetPageSize,
} as any;
it('renders loading state if isLoading is true and tableData is empty', () => {
const { container } = render(
<HostsListTable
{...mockProps}
isLoading
hostMetricsData={[]}
tableData={{ payload: { data: { hosts: [] } } }}
/>,
);
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
});
it('renders loading state if isFetching is true and tableData is empty', () => {
const { container } = render(
<HostsListTable
{...mockProps}
isFetching
hostMetricsData={[]}
tableData={{ payload: { data: { hosts: [] } } }}
/>,
);
expect(container.querySelector('.hosts-list-loading-state')).toBeTruthy();
});
it('renders error state if isError is true', () => {
render(<HostsListTable {...mockProps} isError />);
expect(screen.getByText('Something went wrong')).toBeTruthy();
});
it('renders empty state if no hosts are found', () => {
const { container } = render(
<HostsListTable
{...mockProps}
hostMetricsData={[]}
tableData={{
payload: {
data: { hosts: [] },
},
}}
/>,
);
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
});
it('renders empty state if sentAnyHostMetricsData is false', () => {
const { container } = render(
<HostsListTable
{...mockProps}
hostMetricsData={[]}
tableData={{
...mockTableData,
payload: {
...mockTableData.payload,
data: {
...mockTableData.payload.data,
sentAnyHostMetricsData: false,
hosts: [],
},
},
}}
/>,
);
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
});
it('renders empty state if isSendingIncorrectK8SAgentMetrics is true', () => {
const { container } = render(
<HostsListTable
{...mockProps}
hostMetricsData={[]}
tableData={{
...mockTableData,
payload: {
...mockTableData.payload,
data: {
...mockTableData.payload.data,
isSendingIncorrectK8SAgentMetrics: true,
hosts: [],
},
},
}}
/>,
);
expect(container.querySelector(EMPTY_STATE_CONTAINER_CLASS)).toBeTruthy();
});
it('renders table data', () => {
const { container } = render(
<HostsListTable
{...mockProps}
tableData={{
...mockTableData,
payload: {
...mockTableData.payload,
data: {
...mockTableData.payload.data,
isSendingIncorrectK8SAgentMetrics: false,
sentAnyHostMetricsData: true,
},
},
}}
/>,
);
expect(container.querySelector('.hosts-list-table')).toBeTruthy();
});
});

View File

@@ -1,104 +0,0 @@
import { render } from '@testing-library/react';
import { formatDataForTable, GetHostsQuickFiltersConfig } from '../utils';
const PROGRESS_BAR_CLASS = '.progress-bar';
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
describe('InfraMonitoringHosts utils', () => {
describe('formatDataForTable', () => {
it('should format host data correctly', () => {
const mockData = [
{
hostName: 'test-host',
active: true,
cpu: 0.95,
memory: 0.85,
wait: 0.05,
load15: 2.5,
os: 'linux',
},
] as any;
const result = formatDataForTable(mockData);
expect(result[0].hostName).toBe('test-host');
expect(result[0].wait).toBe('5%');
expect(result[0].load15).toBe(2.5);
// Test active tag rendering
const activeTag = render(result[0].active as JSX.Element);
expect(activeTag.container.textContent).toBe('ACTIVE');
expect(activeTag.container.querySelector('.active')).toBeTruthy();
// Test CPU progress bar
const cpuProgress = render(result[0].cpu as JSX.Element);
const cpuProgressBar = cpuProgress.container.querySelector(
PROGRESS_BAR_CLASS,
);
expect(cpuProgressBar).toBeTruthy();
// Test memory progress bar
const memoryProgress = render(result[0].memory as JSX.Element);
const memoryProgressBar = memoryProgress.container.querySelector(
PROGRESS_BAR_CLASS,
);
expect(memoryProgressBar).toBeTruthy();
});
it('should handle inactive hosts', () => {
const mockData = [
{
hostName: 'test-host',
active: false,
cpu: 0.3,
memory: 0.4,
wait: 0.02,
load15: 1.2,
os: 'linux',
cpuTimeSeries: [],
memoryTimeSeries: [],
waitTimeSeries: [],
load15TimeSeries: [],
},
] as any;
const result = formatDataForTable(mockData);
const inactiveTag = render(result[0].active as JSX.Element);
expect(inactiveTag.container.textContent).toBe('INACTIVE');
expect(inactiveTag.container.querySelector('.inactive')).toBeTruthy();
});
});
describe('GetHostsQuickFiltersConfig', () => {
it('should return correct config when dotMetricsEnabled is true', () => {
const result = GetHostsQuickFiltersConfig(true);
expect(result[0].attributeKey.key).toBe('host.name');
expect(result[1].attributeKey.key).toBe('os.type');
expect(result[0].aggregateAttribute).toBe('system.cpu.load_average.15m');
});
it('should return correct config when dotMetricsEnabled is false', () => {
const result = GetHostsQuickFiltersConfig(false);
expect(result[0].attributeKey.key).toBe('host_name');
expect(result[1].attributeKey.key).toBe('os_type');
expect(result[0].aggregateAttribute).toBe('system_cpu_load_average_15m');
});
});
});

View File

@@ -1,8 +1,7 @@
import './InfraMonitoring.styles.scss';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Color } from '@signozhq/design-tokens';
import { Progress, TabsProps, Tag, Tooltip } from 'antd';
import { Progress, TabsProps, Tag } from 'antd';
import { ColumnType } from 'antd/es/table';
import {
HostData,
@@ -94,14 +93,7 @@ export const getHostsListColumns = (): ColumnType<HostRowData>[] => [
align: 'right',
},
{
title: (
<div className="column-header-right memory-usage-header">
Memory Usage
<Tooltip title="Excluding cache memory">
<InfoCircleOutlined />
</Tooltip>
</div>
),
title: <div className="column-header-right">Memory Usage</div>,
dataIndex: 'memory',
key: 'memory',
width: 100,
@@ -218,10 +210,6 @@ export function GetHostsQuickFiltersConfig(
? 'system.cpu.load_average.15m'
: 'system_cpu_load_average_15m';
const environmentKey = dotMetricsEnabled
? 'deployment.environment'
: 'deployment_environment';
return [
{
type: FiltersType.CHECKBOX,
@@ -253,17 +241,5 @@ export function GetHostsQuickFiltersConfig(
dataSource: DataSource.METRICS,
defaultOpen: true,
},
{
type: FiltersType.CHECKBOX,
title: 'Environment',
attributeKey: {
key: environmentKey,
dataType: DataTypes.String,
type: 'resource',
isColumn: false,
isJSON: false,
},
defaultOpen: true,
},
];
}

View File

@@ -38,7 +38,7 @@ import {
ScrollText,
X,
} from 'lucide-react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { AppState } from 'store/reducers';
@@ -85,12 +85,8 @@ function ClusterDetails({
endTime: endMs,
}));
const lastSelectedInterval = useRef<Time | null>(null);
const [selectedInterval, setSelectedInterval] = useState<Time>(
lastSelectedInterval.current
? lastSelectedInterval.current
: (selectedTime as Time),
selectedTime as Time,
);
const [searchParams, setSearchParams] = useSearchParams();
@@ -199,11 +195,10 @@ function ClusterDetails({
}, [initialFilters, initialEventsFilters]);
useEffect(() => {
const currentSelectedInterval = lastSelectedInterval.current || selectedTime;
setSelectedInterval(currentSelectedInterval as Time);
setSelectedInterval(selectedTime as Time);
if (currentSelectedInterval !== 'custom') {
const { maxTime, minTime } = GetMinMax(currentSelectedInterval);
if (selectedTime !== 'custom') {
const { maxTime, minTime } = GetMinMax(selectedTime);
setModalTimeRange({
startTime: Math.floor(minTime / 1000000000),
@@ -231,7 +226,6 @@ function ClusterDetails({
const handleTimeChange = useCallback(
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
lastSelectedInterval.current = interval as Time;
setSelectedInterval(interval as Time);
if (interval === 'custom' && dateTimeRange) {
@@ -468,7 +462,6 @@ function ClusterDetails({
};
const handleClose = (): void => {
lastSelectedInterval.current = null;
setSelectedInterval(selectedTime as Time);
if (selectedTime !== 'custom') {

View File

@@ -51,8 +51,8 @@ export const getClusterMetricsQueryPayload = (
const getKey = (dotKey: string, underscoreKey: string): string =>
dotMetricsEnabled ? dotKey : underscoreKey;
const k8sPodCpuUtilizationKey = getKey(
'k8s.pod.cpu.usage',
'k8s_pod_cpu_usage',
'k8s.pod.cpu.utilization',
'k8s_pod_cpu_utilization',
);
const k8sNodeAllocatableCpuKey = getKey(
'k8s.node.allocatable_cpu',
@@ -146,7 +146,7 @@ export const getClusterMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,
@@ -189,7 +189,7 @@ export const getClusterMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,
@@ -232,7 +232,7 @@ export const getClusterMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,
@@ -731,7 +731,7 @@ export const getClusterMetricsQueryPayload = (
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TABLE,
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
@@ -751,7 +751,7 @@ export const getClusterMetricsQueryPayload = (
filters: {
items: [
{
id: 'a7da59c7',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -786,12 +786,12 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: 'available',
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'last',
spaceAggregation: 'sum',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
@@ -804,14 +804,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDeploymentDesiredKey,
type: 'Gauge',
},
aggregateOperator: 'avg',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: '55110885',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -846,14 +846,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: 'desired',
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'last',
spaceAggregation: 'sum',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'avg',
timeAggregation: 'latest',
},
],
queryFormulas: [],
@@ -890,13 +890,13 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: true,
formatForWeb: false,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TABLE,
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
@@ -909,14 +909,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetCurrentPodsKey,
type: 'Gauge',
},
aggregateOperator: 'max',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: '3c57b4d1',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -951,14 +951,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: 'current',
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'last',
spaceAggregation: 'sum',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'max',
timeAggregation: 'latest',
},
{
aggregateAttribute: {
@@ -969,14 +969,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetDesiredPodsKey,
type: 'Gauge',
},
aggregateOperator: 'max',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: '0f49fe64',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1011,14 +1011,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: 'desired',
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'last',
spaceAggregation: 'sum',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'max',
timeAggregation: 'latest',
},
{
aggregateAttribute: {
@@ -1029,14 +1029,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetReadyPodsKey,
type: 'Gauge',
},
aggregateOperator: 'max',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: '0bebf625',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1071,14 +1071,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: 'ready',
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'last',
spaceAggregation: 'sum',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'max',
timeAggregation: 'latest',
},
{
aggregateAttribute: {
@@ -1089,14 +1089,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetUpdatedPodsKey,
type: 'Gauge',
},
aggregateOperator: 'max',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'D',
filters: {
items: [
{
id: '1ddacbbe',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1131,14 +1131,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: 'updated',
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'D',
reduceTo: 'last',
spaceAggregation: 'sum',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'max',
timeAggregation: 'latest',
},
],
queryFormulas: [],
@@ -1199,13 +1199,13 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: true,
formatForWeb: false,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TABLE,
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
@@ -1218,14 +1218,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetCurrentScheduledNodesKey,
type: 'Gauge',
},
aggregateOperator: 'avg',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'e0bea554',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1250,16 +1250,24 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: 'current_nodes',
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'last',
spaceAggregation: 'avg',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'avg',
timeAggregation: 'latest',
},
{
aggregateAttribute: {
@@ -1270,14 +1278,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetDesiredScheduledNodesKey,
type: 'Gauge',
},
aggregateOperator: 'avg',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: '741052f7',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1302,16 +1310,24 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: 'desired_nodes',
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'last',
spaceAggregation: 'avg',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'avg',
timeAggregation: 'latest',
},
{
aggregateAttribute: {
@@ -1322,14 +1338,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetReadyNodesKey,
type: 'Gauge',
},
aggregateOperator: 'avg',
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'f23759f2',
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1354,16 +1370,24 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: 'ready_nodes',
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'last',
spaceAggregation: 'avg',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'avg',
timeAggregation: 'latest',
},
],
queryFormulas: [],
@@ -1412,7 +1436,316 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: true,
formatForWeb: false,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_active_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobActivePodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_successful_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobSuccessfulPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_failed_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobFailedPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_desired_successful_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobDesiredSuccessfulPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'D',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'D',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
],
queryFormulas: [],
},
clickhouse_sql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
id: v4(),
promql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
start,
end,
},
@@ -1444,7 +1777,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
key: 'k8s_cluster_name',
type: 'tag',
},
op: '=',
@@ -1504,7 +1837,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
key: 'k8s_cluster_name',
type: 'tag',
},
op: '=',
@@ -1564,7 +1897,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
key: 'k8s_cluster_name',
type: 'tag',
},
op: '=',
@@ -1624,7 +1957,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
key: 'k8s_cluster_name',
type: 'tag',
},
op: '=',
@@ -1672,24 +2005,6 @@ export const getClusterMetricsQueryPayload = (
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
id: v4(),
promql: [
@@ -1699,24 +2014,6 @@ export const getClusterMetricsQueryPayload = (
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},

View File

@@ -189,32 +189,6 @@ function K8sClustersList({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [minTime, maxTime, orderBy, selectedRowData, groupBy]);
const groupedByRowDataQueryKey = useMemo(() => {
if (selectedClusterName) {
return [
'clusterList',
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(selectedRowData),
];
}
return [
'clusterList',
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(selectedRowData),
String(minTime),
String(maxTime),
];
}, [
queryFilters,
orderBy,
selectedClusterName,
minTime,
maxTime,
selectedRowData,
]);
const {
data: groupedByRowData,
isFetching: isFetchingGroupedByRowData,
@@ -224,7 +198,7 @@ function K8sClustersList({
} = useGetK8sClustersList(
fetchGroupedByRowDataQuery as K8sClustersListPayload,
{
queryKey: groupedByRowDataQueryKey,
queryKey: ['clusterList', fetchGroupedByRowDataQuery],
enabled: !!fetchGroupedByRowDataQuery && !!selectedRowData,
},
undefined,
@@ -280,44 +254,11 @@ function K8sClustersList({
return groupedByRowData?.payload?.data?.records || [];
}, [groupedByRowData, selectedRowData]);
const queryKey = useMemo(() => {
if (selectedClusterName) {
return [
'clusterList',
String(pageSize),
String(currentPage),
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(groupBy),
];
}
return [
'clusterList',
String(pageSize),
String(currentPage),
JSON.stringify(queryFilters),
JSON.stringify(orderBy),
JSON.stringify(groupBy),
String(minTime),
String(maxTime),
];
}, [
selectedClusterName,
pageSize,
currentPage,
queryFilters,
orderBy,
groupBy,
minTime,
maxTime,
]);
const { data, isFetching, isLoading, isError } = useGetK8sClustersList(
query as K8sClustersListPayload,
{
queryKey,
queryKey: ['clusterList', query],
enabled: !!query,
keepPreviousData: true,
},
undefined,
dotMetricsEnabled,
@@ -642,9 +583,6 @@ function K8sClustersList({
});
};
const showTableLoadingState =
(isFetching || isLoading) && formattedClustersData.length === 0;
return (
<div className="k8s-list">
<K8sHeader
@@ -657,13 +595,12 @@ function K8sClustersList({
handleGroupByChange={handleGroupByChange}
selectedGroupBy={groupBy}
entity={K8sCategory.NODES}
showAutoRefresh={!selectedClusterData}
/>
{isError && <Typography>{data?.error || 'Something went wrong'}</Typography>}
<Table
className="k8s-list-table clusters-list-table"
dataSource={showTableLoadingState ? [] : formattedClustersData}
dataSource={isFetching || isLoading ? [] : formattedClustersData}
columns={columns}
pagination={{
current: currentPage,
@@ -675,25 +612,26 @@ function K8sClustersList({
}}
scroll={{ x: true }}
loading={{
spinning: showTableLoadingState,
spinning: isFetching || isLoading,
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
}}
locale={{
emptyText: showTableLoadingState ? null : (
<div className="no-filtered-hosts-message-container">
<div className="no-filtered-hosts-message-content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
emptyText:
isFetching || isLoading ? null : (
<div className="no-filtered-hosts-message-container">
<div className="no-filtered-hosts-message-content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
<Typography.Text className="no-filtered-hosts-message">
This query had no results. Edit your query and try again!
</Typography.Text>
<Typography.Text className="no-filtered-hosts-message">
This query had no results. Edit your query and try again!
</Typography.Text>
</div>
</div>
</div>
),
),
}}
tableLayout="fixed"
onChange={handleTableChange}

View File

@@ -33,7 +33,7 @@ import {
ScrollText,
X,
} from 'lucide-react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { AppState } from 'store/reducers';
@@ -84,12 +84,8 @@ function DaemonSetDetails({
endTime: endMs,
}));
const lastSelectedInterval = useRef<Time | null>(null);
const [selectedInterval, setSelectedInterval] = useState<Time>(
lastSelectedInterval.current
? lastSelectedInterval.current
: (selectedTime as Time),
selectedTime as Time,
);
const [searchParams, setSearchParams] = useSearchParams();
@@ -215,11 +211,10 @@ function DaemonSetDetails({
}, [initialFilters, initialEventsFilters]);
useEffect(() => {
const currentSelectedInterval = lastSelectedInterval.current || selectedTime;
setSelectedInterval(currentSelectedInterval as Time);
setSelectedInterval(selectedTime as Time);
if (currentSelectedInterval !== 'custom') {
const { maxTime, minTime } = GetMinMax(currentSelectedInterval);
if (selectedTime !== 'custom') {
const { maxTime, minTime } = GetMinMax(selectedTime);
setModalTimeRange({
startTime: Math.floor(minTime / 1000000000),
@@ -247,7 +242,6 @@ function DaemonSetDetails({
const handleTimeChange = useCallback(
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
lastSelectedInterval.current = interval as Time;
setSelectedInterval(interval as Time);
if (interval === 'custom' && dateTimeRange) {
@@ -482,7 +476,6 @@ function DaemonSetDetails({
};
const handleClose = (): void => {
lastSelectedInterval.current = null;
setSelectedInterval(selectedTime as Time);
if (selectedTime !== 'custom') {

View File

@@ -33,8 +33,8 @@ export const getDaemonSetMetricsQueryPayload = (
dotMetricsEnabled: boolean,
): GetQueryResultsProps[] => {
const k8sPodCpuUtilizationKey = dotMetricsEnabled
? 'k8s.pod.cpu.usage'
: 'k8s_pod_cpu_usage';
? 'k8s.pod.cpu.utilization'
: 'k8s_pod_cpu_utilization';
const k8sContainerCpuRequestKey = dotMetricsEnabled
? 'k8s.container.cpu_request'
@@ -84,7 +84,7 @@ export const getDaemonSetMetricsQueryPayload = (
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_pod_cpu_usage--float64--Gauge--true',
id: 'k8s_pod_cpu_utilization--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sPodCpuUtilizationKey,

Some files were not shown because too many files have changed in this diff Show More