Compare commits
92 Commits
fix/multi-
...
v0.90.1-e9
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e97c7d64d0 | ||
|
|
d6383a722e | ||
|
|
21f2bf28a8 | ||
|
|
d07348caae | ||
|
|
3ef1e884dc | ||
|
|
ff3bb04655 | ||
|
|
31c4f800fc | ||
|
|
51c2bbcd4b | ||
|
|
5610cb1f81 | ||
|
|
478d28eda1 | ||
|
|
ebb2f1fd63 | ||
|
|
629e502703 | ||
|
|
7bafe2d724 | ||
|
|
cf4e44d341 | ||
|
|
7ce1a1cbca | ||
|
|
e2253ec7c0 | ||
|
|
86be2869a9 | ||
|
|
9ec503e302 | ||
|
|
77ee201bb7 | ||
|
|
168a7baf6c | ||
|
|
c36c492877 | ||
|
|
4da7d62b0c | ||
|
|
15332b90c1 | ||
|
|
e78ca467fc | ||
|
|
53b71d7062 | ||
|
|
c08e781250 | ||
|
|
5e0bf930d6 | ||
|
|
d6eed8e79d | ||
|
|
3d26aa5c8c | ||
|
|
6137740907 | ||
|
|
a90a6725a9 | ||
|
|
a24de21043 | ||
|
|
1aa6c98822 | ||
|
|
e36391995c | ||
|
|
5349384097 | ||
|
|
ee1cafd87b | ||
|
|
8cf511cdb9 | ||
|
|
a7ce6da7d1 | ||
|
|
ddb08b3883 | ||
|
|
893b11c4a0 | ||
|
|
89efcc0804 | ||
|
|
b7025af703 | ||
|
|
552d44d208 | ||
|
|
497315579f | ||
|
|
bfaac15ccb | ||
|
|
5e18be6a23 | ||
|
|
1793706f87 | ||
|
|
da2a3c738a | ||
|
|
d17dab9a1d | ||
|
|
88b75d4e72 | ||
|
|
6327ab5ec6 | ||
|
|
5b09490ad7 | ||
|
|
0936e1cdd7 | ||
|
|
b50127b567 | ||
|
|
3c139e40ed | ||
|
|
ef16ad0fc7 | ||
|
|
437c0c9479 | ||
|
|
ba2ed3ad22 | ||
|
|
eb3dfbf63b | ||
|
|
c3e048470d | ||
|
|
4563ff0e62 | ||
|
|
c9e48b6de9 | ||
|
|
06ef9ff384 | ||
|
|
26d55875f5 | ||
|
|
b1864ee328 | ||
|
|
8b62c8dced | ||
|
|
273452352d | ||
|
|
8274ebfe37 | ||
|
|
7d5e14abb6 | ||
|
|
7c17ac42b1 | ||
|
|
74ee7bb2c7 | ||
|
|
2f5640b2e6 | ||
|
|
121debcecc | ||
|
|
ff13504a74 | ||
|
|
d4e373443b | ||
|
|
3ccf822d67 | ||
|
|
0e270e6f51 | ||
|
|
749df2a979 | ||
|
|
9ee5d5d599 | ||
|
|
4940dfd46f | ||
|
|
79a31cc205 | ||
|
|
5102cf2b7b | ||
|
|
9ec5594648 | ||
|
|
b6c2ebd6d7 | ||
|
|
9a3a8c8305 | ||
|
|
2ac45b0174 | ||
|
|
2a53918ebd | ||
|
|
9daefeb881 | ||
|
|
526cf01cb7 | ||
|
|
cd4766ec2b | ||
|
|
2196b58d36 | ||
|
|
53c58b9983 |
@@ -40,7 +40,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
30
.github/CODEOWNERS
vendored
@@ -7,14 +7,38 @@
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
/deploy/ @SigNoz/devops
|
||||
.github @SigNoz/devops
|
||||
|
||||
# Scaffold Owners
|
||||
/pkg/config/ @grandwizard28
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
/pkg/valuer/ @grandwizard28
|
||||
/cmd/ @grandwizard28
|
||||
.golangci.yml @grandwizard28
|
||||
|
||||
# Zeus Owners
|
||||
/pkg/zeus/ @vikrantgupta25
|
||||
/pkg/licensing/ @vikrantgupta25
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
/ee/zeus/ @vikrantgupta25
|
||||
/pkg/licensing/ @vikrantgupta25
|
||||
/ee/licensing/ @vikrantgupta25
|
||||
/ee/sqlmigration/ @vikrantgupta25
|
||||
|
||||
# SQL Owners
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
/ee/sqlmigration/ @vikrantgupta25
|
||||
/pkg/sqlschema/ @vikrantgupta25
|
||||
/ee/sqlschema/ @vikrantgupta25
|
||||
|
||||
# Analytics Owners
|
||||
/pkg/analytics/ @vikrantgupta25
|
||||
/pkg/statsreporter/ @vikrantgupta25
|
||||
|
||||
# Querier Owners
|
||||
/pkg/querier/ @srikanthccv
|
||||
/pkg/variables/ @srikanthccv
|
||||
/pkg/types/querybuildertypes/ @srikanthccv
|
||||
/pkg/querybuilder/ @srikanthccv
|
||||
/pkg/telemetrylogs/ @srikanthccv
|
||||
/pkg/telemetrymetadata/ @srikanthccv
|
||||
/pkg/telemetrymetrics/ @srikanthccv
|
||||
/pkg/telemetrytraces/ @srikanthccv
|
||||
|
||||
4
.github/workflows/build-community.yaml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./pkg/query-service
|
||||
GO_BUILD_CONTEXT: ./cmd/community
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
@@ -78,6 +78,6 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/community/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: dockerhub
|
||||
|
||||
4
.github/workflows/build-enterprise.yaml
vendored
@@ -96,7 +96,7 @@ jobs:
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
@@ -112,6 +112,6 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }}
|
||||
|
||||
4
.github/workflows/build-staging.yaml
vendored
@@ -95,7 +95,7 @@ jobs:
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./ee/query-service
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
DOCKER_PROVIDERS: gcp
|
||||
staging:
|
||||
|
||||
4
.github/workflows/gor-signoz-community.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
- ubuntu-latest
|
||||
- macos-latest
|
||||
env:
|
||||
CONFIG_PATH: pkg/query-service/.goreleaser.yaml
|
||||
CONFIG_PATH: cmd/community/.goreleaser.yaml
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: checkout
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
needs: build
|
||||
env:
|
||||
DOCKER_CLI_EXPERIMENTAL: "enabled"
|
||||
WORKDIR: pkg/query-service
|
||||
WORKDIR: cmd/community
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
2
.github/workflows/gor-signoz.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
- ubuntu-latest
|
||||
- macos-latest
|
||||
env:
|
||||
CONFIG_PATH: ee/query-service/.goreleaser.yaml
|
||||
CONFIG_PATH: cmd/enterprise/.goreleaser.yaml
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: checkout
|
||||
|
||||
4
.github/workflows/integrationci.yaml
vendored
@@ -20,9 +20,9 @@ jobs:
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 24.12-alpine
|
||||
- 25.5.6
|
||||
schema-migrator-version:
|
||||
- v0.111.38
|
||||
- v0.128.1
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
2
LICENSE
@@ -2,7 +2,7 @@ Copyright (c) 2020-present SigNoz Inc.
|
||||
|
||||
Portions of this software are licensed as follows:
|
||||
|
||||
* All content that resides under the "ee/" directory of this repository, if that directory exists, is licensed under the license defined in "ee/LICENSE".
|
||||
* All content that resides under the "ee/" and the "cmd/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "ee/LICENSE".
|
||||
* All third party components incorporated into the SigNoz Software are licensed under the original license provided by the owner of the applicable component.
|
||||
* Content outside of the above mentioned directories or restrictions above is available under the "MIT Expat" license as defined below.
|
||||
|
||||
|
||||
12
Makefile
@@ -20,18 +20,18 @@ GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/zeus.depreca
|
||||
|
||||
GO_BUILD_VERSION_LDFLAGS = -X github.com/SigNoz/signoz/pkg/version.version=$(VERSION) -X github.com/SigNoz/signoz/pkg/version.hash=$(COMMIT_SHORT_SHA) -X github.com/SigNoz/signoz/pkg/version.time=$(TIMESTAMP) -X github.com/SigNoz/signoz/pkg/version.branch=$(BRANCH_NAME)
|
||||
GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service
|
||||
GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/cmd/community
|
||||
GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS))
|
||||
GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS))
|
||||
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service
|
||||
GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/cmd/enterprise
|
||||
GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO)
|
||||
|
||||
DOCKER_BUILD_ARCHS_COMMUNITY = $(addprefix docker-build-community-,$(ARCHS))
|
||||
DOCKERFILE_COMMUNITY = $(SRC)/pkg/query-service/Dockerfile
|
||||
DOCKERFILE_COMMUNITY = $(SRC)/cmd/community/Dockerfile
|
||||
DOCKER_REGISTRY_COMMUNITY ?= docker.io/signoz/signoz-community
|
||||
DOCKER_BUILD_ARCHS_ENTERPRISE = $(addprefix docker-build-enterprise-,$(ARCHS))
|
||||
DOCKERFILE_ENTERPRISE = $(SRC)/ee/query-service/Dockerfile
|
||||
DOCKERFILE_ENTERPRISE = $(SRC)/cmd/enterprise/Dockerfile
|
||||
DOCKER_REGISTRY_ENTERPRISE ?= docker.io/signoz/signoz
|
||||
JS_BUILD_CONTEXT = $(SRC)/frontend
|
||||
|
||||
@@ -74,7 +74,7 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
|
||||
@@ -92,7 +92,7 @@ go-run-community: ## Runs the community go backend server
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/main.go \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
|
||||
|
||||
@@ -231,6 +231,8 @@ Not sure how to get started? Just ping us on `#contributing` in our [slack commu
|
||||
- [Shaheer Kochai](https://github.com/ahmadshaheer)
|
||||
- [Amlan Kumar Nandy](https://github.com/amlannandy)
|
||||
- [Sahil Khan](https://github.com/sawhil)
|
||||
- [Aditya Singh](https://github.com/aks07)
|
||||
- [Abhi Kumar](https://github.com/ahrefabhi)
|
||||
|
||||
#### DevOps
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ before:
|
||||
builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: pkg/query-service/main.go
|
||||
main: cmd/community
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
@@ -16,4 +16,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
ENTRYPOINT ["./signoz", "server"]
|
||||
@@ -17,4 +17,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz-community
|
||||
|
||||
ENTRYPOINT ["./signoz-community"]
|
||||
ENTRYPOINT ["./signoz-community", "server"]
|
||||
18
cmd/community/main.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// initialize logger for logging in the cmd/ package. This logger is different from the logger used in the application.
|
||||
logger := instrumentation.NewLogger(instrumentation.Config{Logs: instrumentation.LogsConfig{Level: slog.LevelInfo}})
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
116
cmd/community/server.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/SigNoz/signoz/pkg/zeus/noopzeus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
var flags signoz.DeprecatedFlags
|
||||
|
||||
serverCmd := &cobra.Command{
|
||||
Use: "server",
|
||||
Short: "Run the SigNoz server",
|
||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return runServer(currCmd.Context(), config, logger)
|
||||
},
|
||||
}
|
||||
|
||||
flags.RegisterFlags(serverCmd)
|
||||
parentCmd.AddCommand(serverCmd)
|
||||
}
|
||||
|
||||
func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) error {
|
||||
// print the version
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
// add enterprise sqlstore factories to the community sqlstore factories
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
ctx,
|
||||
config,
|
||||
jwt,
|
||||
zeus.Config{},
|
||||
noopzeus.NewProviderFactory(),
|
||||
licensing.Config{},
|
||||
func(_ sqlstore.SQLStore, _ zeus.Zeus, _ organization.Getter, _ analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
||||
return nooplicensing.NewFactory()
|
||||
},
|
||||
signoz.NewEmailingProviderFactories(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] {
|
||||
return signoz.NewSQLSchemaProviderFactories(sqlstore)
|
||||
},
|
||||
signoz.NewSQLStoreProviderFactories(),
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
server, err := app.NewServer(config, signoz, jwt)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if err := server.Start(ctx); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to start server", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
signoz.Start(ctx)
|
||||
|
||||
if err := signoz.Wait(ctx); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.Stop(ctx)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to stop server", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = signoz.Stop(ctx)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
45
cmd/config.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
)
|
||||
|
||||
func NewSigNozConfig(ctx context.Context, flags signoz.DeprecatedFlags) (signoz.Config, error) {
|
||||
config, err := signoz.NewConfig(
|
||||
ctx,
|
||||
config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
envprovider.NewFactory(),
|
||||
fileprovider.NewFactory(),
|
||||
},
|
||||
},
|
||||
flags,
|
||||
)
|
||||
if err != nil {
|
||||
return signoz.Config{}, err
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func NewJWTSecret(_ context.Context, _ *slog.Logger) string {
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
if len(jwtSecret) == 0 {
|
||||
fmt.Println("🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!")
|
||||
fmt.Println("SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application.")
|
||||
fmt.Println("Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access.")
|
||||
fmt.Println("Please set the SIGNOZ_JWT_SECRET environment variable immediately.")
|
||||
fmt.Println("For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
}
|
||||
|
||||
return jwtSecret
|
||||
}
|
||||
@@ -11,7 +11,7 @@ before:
|
||||
builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ee/query-service/main.go
|
||||
main: cmd/enterprise
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
@@ -16,4 +16,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
ENTRYPOINT ["./signoz", "server"]
|
||||
@@ -23,6 +23,7 @@ COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./cmd/ ./cmd/
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
@@ -33,4 +34,4 @@ RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz"]
|
||||
ENTRYPOINT ["/root/signoz", "server"]
|
||||
@@ -17,4 +17,4 @@ COPY frontend/build/ /etc/signoz/web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["./signoz"]
|
||||
ENTRYPOINT ["./signoz", "server"]
|
||||
18
cmd/enterprise/main.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// initialize logger for logging in the cmd/ package. This logger is different from the logger used in the application.
|
||||
logger := instrumentation.NewLogger(instrumentation.Config{Logs: instrumentation.LogsConfig{Level: slog.LevelInfo}})
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
124
cmd/enterprise/server.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
enterprisezeus "github.com/SigNoz/signoz/ee/zeus"
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
var flags signoz.DeprecatedFlags
|
||||
|
||||
serverCmd := &cobra.Command{
|
||||
Use: "server",
|
||||
Short: "Run the SigNoz server",
|
||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return runServer(currCmd.Context(), config, logger)
|
||||
},
|
||||
}
|
||||
|
||||
flags.RegisterFlags(serverCmd)
|
||||
parentCmd.AddCommand(serverCmd)
|
||||
}
|
||||
|
||||
func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) error {
|
||||
// print the version
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
// add enterprise sqlstore factories to the community sqlstore factories
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
ctx,
|
||||
config,
|
||||
jwt,
|
||||
enterprisezeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
enterpriselicensing.Config(24*time.Hour, 3),
|
||||
func(sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
||||
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||
},
|
||||
signoz.NewEmailingProviderFactories(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] {
|
||||
existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore)
|
||||
if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return existingFactories
|
||||
},
|
||||
sqlstoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
server, err := enterpriseapp.NewServer(config, signoz, jwt)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if err := server.Start(ctx); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to start server", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
signoz.Start(ctx)
|
||||
|
||||
if err := signoz.Wait(ctx); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.Stop(ctx)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to stop server", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = signoz.Stop(ctx)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
33
cmd/root.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"os"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/spf13/cobra"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
)
|
||||
|
||||
var RootCmd = &cobra.Command{
|
||||
Use: "signoz",
|
||||
Short: "OpenTelemetry-Native Logs, Metrics and Traces in a single pane",
|
||||
Version: version.Info.Version(),
|
||||
SilenceUsage: true,
|
||||
SilenceErrors: true,
|
||||
CompletionOptions: cobra.CompletionOptions{DisableDefaultCmd: true},
|
||||
}
|
||||
|
||||
func Execute(logger *slog.Logger) {
|
||||
zapLogger := newZapLogger()
|
||||
zap.ReplaceGlobals(zapLogger)
|
||||
defer func() {
|
||||
_ = zapLogger.Sync()
|
||||
}()
|
||||
|
||||
err := RootCmd.Execute()
|
||||
if err != nil {
|
||||
logger.ErrorContext(RootCmd.Context(), "error running command", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
15
cmd/zap.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
"go.uber.org/zap/zapcore" //nolint:depguard
|
||||
)
|
||||
|
||||
// Deprecated: Use `NewLogger` from `pkg/instrumentation` instead.
|
||||
func newZapLogger() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
logger, _ := config.Build()
|
||||
return logger
|
||||
}
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.87.0
|
||||
image: signoz/signoz:v0.90.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -194,6 +194,7 @@ services:
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- SIGNOZ_JWT_SECRET=secret
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -206,7 +207,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
image: signoz/signoz-otel-collector:v0.128.2
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -230,7 +231,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -100,7 +100,7 @@ services:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
# - "9181:9181"
|
||||
|
||||
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
@@ -110,13 +110,12 @@ services:
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.xml
|
||||
|
||||
volumes:
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.87.0
|
||||
image: signoz/signoz:v0.90.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -136,6 +135,7 @@ services:
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -148,7 +148,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
image: signoz/signoz-otel-collector:v0.128.2
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.2
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -195,7 +195,6 @@ volumes:
|
||||
name: signoz-sqlite
|
||||
zookeeper-1:
|
||||
name: signoz-zookeeper-1
|
||||
|
||||
configs:
|
||||
clickhouse-config:
|
||||
file: ../common/clickhouse/config.xml
|
||||
@@ -205,7 +204,6 @@ configs:
|
||||
file: ../common/clickhouse/custom-function.xml
|
||||
clickhouse-cluster:
|
||||
file: ../common/clickhouse/cluster.xml
|
||||
|
||||
signoz-prometheus-config:
|
||||
file: ../common/signoz/prometheus.yml
|
||||
# If you have multiple dashboard files, you can list them individually:
|
||||
|
||||
@@ -26,7 +26,7 @@ processors:
|
||||
detectors: [env, system]
|
||||
timeout: 2s
|
||||
signozspanmetrics/delta:
|
||||
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
|
||||
metrics_exporter: signozclickhousemetrics
|
||||
metrics_flush_interval: 60s
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 100000
|
||||
@@ -60,27 +60,16 @@ exporters:
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
use_new_schema: true
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
disable_v2: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
disable_v2: true
|
||||
signozclickhousemetrics:
|
||||
dsn: tcp://clickhouse:9000/signoz_metrics
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
# debug: {}
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
@@ -92,11 +81,11 @@ service:
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite, signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
image: signoz/signoz:${VERSION:-v0.90.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -197,6 +197,7 @@ services:
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -210,7 +211,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -236,7 +237,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -247,7 +248,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
image: signoz/signoz:${VERSION:-v0.90.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -130,6 +130,7 @@ services:
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-standalone-amd
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD
|
||||
@@ -142,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -164,7 +165,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -176,7 +177,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -26,7 +26,7 @@ processors:
|
||||
detectors: [env, system]
|
||||
timeout: 2s
|
||||
signozspanmetrics/delta:
|
||||
metrics_exporter: clickhousemetricswrite, signozclickhousemetrics
|
||||
metrics_exporter: signozclickhousemetrics
|
||||
metrics_flush_interval: 60s
|
||||
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
||||
dimensions_cache_size: 100000
|
||||
@@ -60,27 +60,16 @@ exporters:
|
||||
datasource: tcp://clickhouse:9000/signoz_traces
|
||||
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
|
||||
use_new_schema: true
|
||||
clickhousemetricswrite:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
disable_v2: true
|
||||
resource_to_telemetry_conversion:
|
||||
enabled: true
|
||||
clickhousemetricswrite/prometheus:
|
||||
endpoint: tcp://clickhouse:9000/signoz_metrics
|
||||
disable_v2: true
|
||||
signozclickhousemetrics:
|
||||
dsn: tcp://clickhouse:9000/signoz_metrics
|
||||
clickhouselogsexporter:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
# debug: {}
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
@@ -92,11 +81,11 @@ service:
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite, signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
|
||||
@@ -16,7 +16,7 @@ __Table of Contents__
|
||||
- [Prerequisites](#prerequisites-1)
|
||||
- [Install Helm Repo and Charts](#install-helm-repo-and-charts)
|
||||
- [Start the OpenTelemetry Demo App](#start-the-opentelemetry-demo-app-1)
|
||||
- [Moniitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
|
||||
- [Monitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes)
|
||||
- [What's next](#whats-next)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
.vscode
|
||||
README.md
|
||||
signoz.db
|
||||
bin
|
||||
@@ -203,17 +203,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
|
||||
&opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation,
|
||||
)
|
||||
|
||||
orgs, err := apiHandler.Signoz.Modules.OrgGetter.ListByOwnedKeyRange(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, org := range orgs {
|
||||
errorList := reader.PreloadMetricsMetadata(context.Background(), org.ID)
|
||||
for _, er := range errorList {
|
||||
zap.L().Error("failed to preload metrics metadata", zap.Error(er))
|
||||
}
|
||||
}
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,179 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/ee/zeus"
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
pkglicensing "github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
pkgzeus "github.com/SigNoz/signoz/pkg/zeus"
|
||||
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
|
||||
// Deprecated: Please use the logger from pkg/instrumentation.
|
||||
func initZapLog() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
logger, _ := config.Build()
|
||||
return logger
|
||||
}
|
||||
|
||||
func main() {
|
||||
var promConfigPath, skipTopLvlOpsPath string
|
||||
|
||||
// disables rule execution but allows change to the rule definition
|
||||
var disableRules bool
|
||||
|
||||
// the url used to build link in the alert messages in slack and other systems
|
||||
var ruleRepoURL string
|
||||
var cluster string
|
||||
|
||||
var useLogsNewSchema bool
|
||||
var useTraceNewSchema bool
|
||||
var cacheConfigPath, fluxInterval, fluxIntervalForTraceDetail string
|
||||
var preferSpanMetrics bool
|
||||
|
||||
var maxIdleConns int
|
||||
var maxOpenConns int
|
||||
var dialTimeout time.Duration
|
||||
var gatewayUrl string
|
||||
var useLicensesV3 bool
|
||||
|
||||
// Deprecated
|
||||
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
|
||||
// Deprecated
|
||||
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
|
||||
// Deprecated
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
// Deprecated
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
// Deprecated
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
|
||||
// Deprecated
|
||||
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
|
||||
// Deprecated
|
||||
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
|
||||
// Deprecated
|
||||
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
|
||||
// Deprecated
|
||||
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
|
||||
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
|
||||
flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)")
|
||||
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
|
||||
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
|
||||
// Deprecated
|
||||
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
|
||||
flag.Parse()
|
||||
|
||||
loggerMgr := initZapLog()
|
||||
zap.ReplaceGlobals(loggerMgr)
|
||||
defer loggerMgr.Sync() // flushes buffer, if any
|
||||
ctx := context.Background()
|
||||
|
||||
config, err := signoz.NewConfig(ctx, config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
envprovider.NewFactory(),
|
||||
fileprovider.NewFactory(),
|
||||
},
|
||||
}, signoz.DeprecatedFlags{
|
||||
MaxIdleConns: maxIdleConns,
|
||||
MaxOpenConns: maxOpenConns,
|
||||
DialTimeout: dialTimeout,
|
||||
Config: promConfigPath,
|
||||
FluxInterval: fluxInterval,
|
||||
FluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
Cluster: cluster,
|
||||
GatewayUrl: gatewayUrl,
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create config", zap.Error(err))
|
||||
}
|
||||
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
sqlStoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
|
||||
}
|
||||
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
|
||||
if len(jwtSecret) == 0 {
|
||||
zap.L().Warn("No JWT secret key is specified.")
|
||||
} else {
|
||||
zap.L().Info("JWT secret key set successfully.")
|
||||
}
|
||||
|
||||
jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
jwt,
|
||||
zeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
licensing.Config(24*time.Hour, 3),
|
||||
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
|
||||
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||
},
|
||||
signoz.NewEmailingProviderFactories(),
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
server, err := app.NewServer(config, signoz, jwt)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(ctx); err != nil {
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz.Start(ctx)
|
||||
|
||||
if err := signoz.Wait(ctx); err != nil {
|
||||
zap.L().Fatal("Failed to start signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
err = server.Stop(ctx)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to stop server", zap.Error(err))
|
||||
}
|
||||
|
||||
err = signoz.Stop(ctx)
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to stop signoz", zap.Error(err))
|
||||
}
|
||||
}
|
||||
36
ee/sqlschema/postgressqlschema/formatter.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package postgressqlschema
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
)
|
||||
|
||||
type Formatter struct {
|
||||
sqlschema.Formatter
|
||||
}
|
||||
|
||||
func (formatter Formatter) SQLDataTypeOf(dataType sqlschema.DataType) string {
|
||||
if dataType == sqlschema.DataTypeTimestamp {
|
||||
return "TIMESTAMPTZ"
|
||||
}
|
||||
|
||||
return strings.ToUpper(dataType.String())
|
||||
}
|
||||
|
||||
func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
|
||||
switch strings.ToUpper(dataType) {
|
||||
case "TIMESTAMPTZ", "TIMESTAMP", "TIMESTAMP WITHOUT TIME ZONE", "TIMESTAMP WITH TIME ZONE":
|
||||
return sqlschema.DataTypeTimestamp
|
||||
case "INT8":
|
||||
return sqlschema.DataTypeBigInt
|
||||
case "INT2", "INT4", "SMALLINT", "INTEGER":
|
||||
return sqlschema.DataTypeInteger
|
||||
case "BOOL", "BOOLEAN":
|
||||
return sqlschema.DataTypeBoolean
|
||||
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
|
||||
return sqlschema.DataTypeText
|
||||
}
|
||||
|
||||
return formatter.Formatter.DataTypeOf(dataType)
|
||||
}
|
||||
285
ee/sqlschema/postgressqlschema/provider.go
Normal file
@@ -0,0 +1,285 @@
|
||||
package postgressqlschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
fmter sqlschema.SQLFormatter
|
||||
sqlstore sqlstore.SQLStore
|
||||
operator sqlschema.SQLOperator
|
||||
}
|
||||
|
||||
func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("postgres"), func(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config) (sqlschema.SQLSchema, error) {
|
||||
return New(ctx, providerSettings, config, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config, sqlstore sqlstore.SQLStore) (sqlschema.SQLSchema, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/sqlschema/postgressqlschema")
|
||||
fmter := Formatter{Formatter: sqlschema.NewFormatter(sqlstore.BunDB().Dialect())}
|
||||
|
||||
return &provider{
|
||||
sqlstore: sqlstore,
|
||||
fmter: fmter,
|
||||
settings: settings,
|
||||
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
|
||||
DropConstraint: true,
|
||||
ColumnIfNotExistsExists: true,
|
||||
AlterColumnSetNotNull: true,
|
||||
}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlschema.SQLFormatter {
|
||||
return provider.fmter
|
||||
}
|
||||
|
||||
func (provider *provider) Operator() sqlschema.SQLOperator {
|
||||
return provider.operator
|
||||
}
|
||||
|
||||
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
|
||||
rows, err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
QueryContext(ctx, `
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.is_nullable = 'YES',
|
||||
c.udt_name,
|
||||
c.column_default
|
||||
FROM
|
||||
information_schema.columns AS c
|
||||
WHERE
|
||||
c.table_name = ?`, string(tableName))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := rows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
columns := make([]*sqlschema.Column, 0)
|
||||
for rows.Next() {
|
||||
var (
|
||||
name string
|
||||
sqlDataType string
|
||||
nullable bool
|
||||
defaultVal *string
|
||||
)
|
||||
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
columnDefault := ""
|
||||
if defaultVal != nil {
|
||||
columnDefault = *defaultVal
|
||||
}
|
||||
|
||||
columns = append(columns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(name),
|
||||
Nullable: nullable,
|
||||
DataType: provider.fmter.DataTypeOf(sqlDataType),
|
||||
Default: columnDefault,
|
||||
})
|
||||
}
|
||||
|
||||
constraintsRows, err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
QueryContext(ctx, `
|
||||
SELECT
|
||||
c.column_name,
|
||||
constraint_name,
|
||||
constraint_type
|
||||
FROM
|
||||
information_schema.table_constraints tc
|
||||
JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_catalog, table_name, constraint_name)
|
||||
JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name
|
||||
WHERE
|
||||
c.table_name = ?`, string(tableName))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := constraintsRows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
var primaryKeyConstraint *sqlschema.PrimaryKeyConstraint
|
||||
uniqueConstraintsMap := make(map[string]*sqlschema.UniqueConstraint)
|
||||
for constraintsRows.Next() {
|
||||
var (
|
||||
name string
|
||||
constraintName string
|
||||
constraintType string
|
||||
)
|
||||
|
||||
if err := constraintsRows.Scan(&name, &constraintName, &constraintType); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if constraintType == "PRIMARY KEY" {
|
||||
if primaryKeyConstraint == nil {
|
||||
primaryKeyConstraint = (&sqlschema.PrimaryKeyConstraint{
|
||||
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
|
||||
}).Named(constraintName).(*sqlschema.PrimaryKeyConstraint)
|
||||
} else {
|
||||
primaryKeyConstraint.ColumnNames = append(primaryKeyConstraint.ColumnNames, sqlschema.ColumnName(name))
|
||||
}
|
||||
}
|
||||
|
||||
if constraintType == "UNIQUE" {
|
||||
if _, ok := uniqueConstraintsMap[constraintName]; !ok {
|
||||
uniqueConstraintsMap[constraintName] = (&sqlschema.UniqueConstraint{
|
||||
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)},
|
||||
}).Named(constraintName).(*sqlschema.UniqueConstraint)
|
||||
} else {
|
||||
uniqueConstraintsMap[constraintName].ColumnNames = append(uniqueConstraintsMap[constraintName].ColumnNames, sqlschema.ColumnName(name))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreignKeyConstraintsRows, err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
QueryContext(ctx, `
|
||||
SELECT
|
||||
tc.constraint_name,
|
||||
kcu.table_name AS referencing_table,
|
||||
kcu.column_name AS referencing_column,
|
||||
ccu.table_name AS referenced_table,
|
||||
ccu.column_name AS referenced_column
|
||||
FROM
|
||||
information_schema.key_column_usage kcu
|
||||
JOIN information_schema.table_constraints tc ON kcu.constraint_name = tc.constraint_name AND kcu.table_schema = tc.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name AND ccu.table_schema = tc.table_schema
|
||||
WHERE
|
||||
tc.constraint_type = ?
|
||||
AND kcu.table_name = ?`, "FOREIGN KEY", string(tableName))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := foreignKeyConstraintsRows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
foreignKeyConstraints := make([]*sqlschema.ForeignKeyConstraint, 0)
|
||||
for foreignKeyConstraintsRows.Next() {
|
||||
var (
|
||||
constraintName string
|
||||
referencingTable string
|
||||
referencingColumn string
|
||||
referencedTable string
|
||||
referencedColumn string
|
||||
)
|
||||
|
||||
if err := foreignKeyConstraintsRows.Scan(&constraintName, &referencingTable, &referencingColumn, &referencedTable, &referencedColumn); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
foreignKeyConstraints = append(foreignKeyConstraints, (&sqlschema.ForeignKeyConstraint{
|
||||
ReferencingColumnName: sqlschema.ColumnName(referencingColumn),
|
||||
ReferencedTableName: sqlschema.TableName(referencedTable),
|
||||
ReferencedColumnName: sqlschema.ColumnName(referencedColumn),
|
||||
}).Named(constraintName).(*sqlschema.ForeignKeyConstraint))
|
||||
}
|
||||
|
||||
uniqueConstraints := make([]*sqlschema.UniqueConstraint, 0)
|
||||
for _, uniqueConstraint := range uniqueConstraintsMap {
|
||||
uniqueConstraints = append(uniqueConstraints, uniqueConstraint)
|
||||
}
|
||||
|
||||
return &sqlschema.Table{
|
||||
Name: tableName,
|
||||
Columns: columns,
|
||||
PrimaryKeyConstraint: primaryKeyConstraint,
|
||||
ForeignKeyConstraints: foreignKeyConstraints,
|
||||
}, uniqueConstraints, nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetIndices(ctx context.Context, name sqlschema.TableName) ([]sqlschema.Index, error) {
|
||||
rows, err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
QueryContext(ctx, `
|
||||
SELECT
|
||||
ct.relname AS table_name,
|
||||
ci.relname AS index_name,
|
||||
i.indisunique AS unique,
|
||||
i.indisprimary AS primary,
|
||||
a.attname AS column_name
|
||||
FROM
|
||||
pg_index i
|
||||
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
|
||||
LEFT JOIN pg_class ci ON ci.oid = i.indexrelid
|
||||
LEFT JOIN pg_attribute a ON a.attrelid = ct.oid
|
||||
LEFT JOIN pg_constraint con ON con.conindid = i.indexrelid
|
||||
WHERE
|
||||
a.attnum = ANY(i.indkey)
|
||||
AND con.oid IS NULL
|
||||
AND ct.relkind = 'r'
|
||||
AND ct.relname = ?`, string(name))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := rows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex)
|
||||
for rows.Next() {
|
||||
var (
|
||||
tableName string
|
||||
indexName string
|
||||
unique bool
|
||||
primary bool
|
||||
columnName string
|
||||
)
|
||||
|
||||
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if unique {
|
||||
if _, ok := uniqueIndicesMap[indexName]; !ok {
|
||||
uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{
|
||||
TableName: name,
|
||||
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
|
||||
}
|
||||
} else {
|
||||
uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
indices := make([]sqlschema.Index, 0)
|
||||
for _, index := range uniqueIndicesMap {
|
||||
indices = append(indices, index)
|
||||
}
|
||||
|
||||
return indices, nil
|
||||
}
|
||||
|
||||
func (provider *provider) ToggleFKEnforcement(_ context.Context, _ bun.IDB, _ bool) error {
|
||||
return nil
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
|
||||
@@ -8,3 +8,6 @@ public/
|
||||
|
||||
# Ignore all JSON files:
|
||||
**/*.json
|
||||
|
||||
# Ignore all files in parser folder:
|
||||
src/parser/**
|
||||
@@ -28,6 +28,8 @@
|
||||
"dependencies": {
|
||||
"@ant-design/colors": "6.0.0",
|
||||
"@ant-design/icons": "4.8.0",
|
||||
"@codemirror/autocomplete": "6.18.6",
|
||||
"@codemirror/lang-javascript": "6.2.3",
|
||||
"@dnd-kit/core": "6.1.0",
|
||||
"@dnd-kit/modifiers": "7.0.0",
|
||||
"@dnd-kit/sortable": "8.0.0",
|
||||
@@ -43,6 +45,8 @@
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
"@uiw/react-codemirror": "4.23.10",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/hierarchy": "3.12.0",
|
||||
@@ -53,6 +57,7 @@
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"axios": "1.8.2",
|
||||
"antlr4": "4.13.2",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -213,7 +218,9 @@
|
||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||
"eslint-plugin-sonarjs": "^0.12.0",
|
||||
"husky": "^7.0.4",
|
||||
"image-webpack-loader": "8.1.0",
|
||||
"image-minimizer-webpack-plugin": "^4.0.0",
|
||||
"imagemin": "^8.0.1",
|
||||
"imagemin-svgo": "^10.0.1",
|
||||
"is-ci": "^3.0.1",
|
||||
"jest-styled-components": "^7.0.8",
|
||||
"lint-staged": "^12.5.0",
|
||||
@@ -230,6 +237,7 @@
|
||||
"redux-mock-store": "1.5.4",
|
||||
"sass": "1.66.1",
|
||||
"sass-loader": "13.3.2",
|
||||
"sharp": "^0.33.4",
|
||||
"ts-jest": "^27.1.5",
|
||||
"ts-node": "^10.2.1",
|
||||
"typescript-plugin-css-modules": "5.0.1",
|
||||
@@ -254,6 +262,7 @@
|
||||
"cross-spawn": "7.0.5",
|
||||
"cookie": "^0.7.1",
|
||||
"serialize-javascript": "6.0.2",
|
||||
"prismjs": "1.30.0"
|
||||
"prismjs": "1.30.0",
|
||||
"got": "11.8.5"
|
||||
}
|
||||
}
|
||||
|
||||
1
frontend/public/Logos/argocd.svg
Normal file
|
After Width: | Height: | Size: 6.1 KiB |
1
frontend/public/Logos/azure-mysql.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 18"><defs><linearGradient id="a" x1="2.59" y1="10.16" x2="15.41" y2="10.16" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#005ba1"/><stop offset=".07" stop-color="#0060a9"/><stop offset=".36" stop-color="#0071c8"/><stop offset=".52" stop-color="#0078d4"/><stop offset=".64" stop-color="#0074cd"/><stop offset=".82" stop-color="#006abb"/><stop offset="1" stop-color="#005ba1"/></linearGradient></defs><path d="M9 5.14c-3.54 0-6.41-1-6.41-2.32v12.36c0 1.27 2.82 2.3 6.32 2.32H9c3.54 0 6.41-1 6.41-2.32V2.82c0 1.29-2.87 2.32-6.41 2.32z" fill="url(#a)"/><path d="M15.41 2.82c0 1.29-2.87 2.32-6.41 2.32s-6.41-1-6.41-2.32S5.46.5 9 .5s6.41 1 6.41 2.32" fill="#e8e8e8"/><path d="M13.92 2.63c0 .82-2.21 1.48-4.92 1.48s-4.92-.66-4.92-1.48S6.29 1.16 9 1.16s4.92.66 4.92 1.47" fill="#50e6ff"/><path d="M9 3a11.55 11.55 0 00-3.89.57A11.42 11.42 0 009 4.11a11.15 11.15 0 003.89-.58A11.84 11.84 0 009 3z" fill="#198ab3"/><path d="M12.64 9v1.63h-1a.39.39 0 01-.29-.14V9H10v1.78a.92.92 0 001 .89h1.49l.26-.13s-.11.41-.26.43h-2.38v1h2.66A1.21 1.21 0 0014 11.7V9zM9.53 9v-.49a.7.7 0 00-.48-.77 1.74 1.74 0 00-.5-.08.94.94 0 00-.91.58l-.78 1.9-1-1.9A.93.93 0 005 7.66a1.44 1.44 0 00-.51.09c-.35.11-.43.34-.43.73v3.31h1.17V9.56l.63 1.57a1.08 1.08 0 001 .66c.44 0 .62-.26.8-.66l.67-1.51v2.15h1.18V9z" fill="#f2f2f2"/></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
frontend/public/Logos/cloudflare.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32" width="64" height="64"><path d="M8.16 23h21.177v-5.86l-4.023-2.307-.694-.3-16.46.113z" fill="#fff"/><path d="M22.012 22.222c.197-.675.122-1.294-.206-1.754-.3-.422-.807-.666-1.416-.694l-11.545-.15c-.075 0-.14-.038-.178-.094s-.047-.13-.028-.206c.038-.113.15-.197.272-.206l11.648-.15c1.38-.066 2.88-1.182 3.404-2.55l.666-1.735a.38.38 0 0 0 .02-.225c-.75-3.395-3.78-5.927-7.4-5.927-3.34 0-6.17 2.157-7.184 5.15-.657-.488-1.5-.75-2.392-.666-1.604.16-2.9 1.444-3.048 3.048a3.58 3.58 0 0 0 .084 1.191A4.84 4.84 0 0 0 0 22.1c0 .234.02.47.047.703.02.113.113.197.225.197H21.58a.29.29 0 0 0 .272-.206l.16-.572z" fill="#f38020"/><path d="M25.688 14.803l-.32.01c-.075 0-.14.056-.17.13l-.45 1.566c-.197.675-.122 1.294.206 1.754.3.422.807.666 1.416.694l2.457.15c.075 0 .14.038.178.094s.047.14.028.206c-.038.113-.15.197-.272.206l-2.56.15c-1.388.066-2.88 1.182-3.404 2.55l-.188.478c-.038.094.028.188.13.188h8.797a.23.23 0 0 0 .225-.169A6.41 6.41 0 0 0 32 21.106a6.32 6.32 0 0 0-6.312-6.302" fill="#faae40"/></svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
18
frontend/public/Logos/dynamodb.svg
Normal file
|
After Width: | Height: | Size: 6.2 KiB |
2
frontend/public/Logos/elk.svg
Normal file
@@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg width="800px" height="800px" viewBox="0 0 256 256" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMinYMin meet"><path d="M255.96 134.393c0-21.521-13.373-40.117-33.223-47.43a75.239 75.239 0 0 0 1.253-13.791c0-39.909-32.386-72.295-72.295-72.295-23.193 0-44.923 11.074-58.505 30.088-6.686-5.224-14.835-7.94-23.402-7.94-21.104 0-38.446 17.133-38.446 38.446 0 4.597.836 9.194 2.298 13.373C13.582 81.739 0 100.962 0 122.274c0 21.522 13.373 40.327 33.431 47.64-.835 4.388-1.253 8.985-1.253 13.79 0 39.7 32.386 72.087 72.086 72.087 23.402 0 44.924-11.283 58.505-30.088 6.686 5.223 15.044 8.149 23.611 8.149 21.104 0 38.446-17.134 38.446-38.446 0-4.597-.836-9.194-2.298-13.373 19.64-7.104 33.431-26.327 33.431-47.64z" fill="#FFF"/><path d="M100.085 110.364l57.043 26.119 57.669-50.565a64.312 64.312 0 0 0 1.253-12.746c0-35.52-28.834-64.355-64.355-64.355-21.313 0-41.162 10.447-53.072 27.998l-9.612 49.73 11.074 23.82z" fill="#F4BD19"/><path d="M40.953 170.75c-.835 4.179-1.253 8.567-1.253 12.955 0 35.52 29.043 64.564 64.564 64.564 21.522 0 41.372-10.656 53.49-28.208l9.403-49.729-12.746-24.238-57.251-26.118-56.207 50.774z" fill="#3CBEB1"/><path d="M40.536 71.918l39.073 9.194 8.775-44.506c-5.432-4.179-11.91-6.268-18.805-6.268-16.925 0-30.924 13.79-30.924 30.924 0 3.552.627 7.313 1.88 10.656z" fill="#E9478C"/><path d="M37.192 81.32c-17.551 5.642-29.67 22.567-29.67 40.954 0 17.97 11.074 34.059 27.79 40.327l54.953-49.73-10.03-21.52-43.043-10.03z" fill="#2C458F"/><path d="M167.784 219.852c5.432 4.18 11.91 6.478 18.596 6.478 16.925 0 30.924-13.79 30.924-30.924 0-3.761-.627-7.314-1.88-10.657l-39.073-9.193-8.567 44.296z" fill="#95C63D"/><path d="M175.724 165.317l43.043 10.03c17.551-5.85 29.67-22.566 29.67-40.954 0-17.97-11.074-33.849-27.79-40.326l-56.415 49.311 11.492 21.94z" fill="#176655"/></svg>
|
||||
|
After Width: | Height: | Size: 1.9 KiB |
19
frontend/public/Logos/external-api-monitoring.svg
Normal file
@@ -0,0 +1,19 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg version="1.1" baseProfile="tiny" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
width="800px" height="800px" viewBox="0 0 24 24" overflow="visible" xml:space="preserve">
|
||||
<g >
|
||||
<rect y="0" fill="none" width="24" height="24"/>
|
||||
<g transform="translate(1.000000, 8.000000)">
|
||||
<path fill-rule="evenodd" fill="#5C85DE" d="M2-1.9c-1.1,0-2.3,1.1-2.3,2.2V10H2V5.5h2.2V10h2.2V0.3c0-1.1-1.1-2.2-2.3-2.2H2
|
||||
L2-1.9z M2,3.2v-3h2.2v3H2L2,3.2z"/>
|
||||
<path fill-rule="evenodd" fill="#5C85DE" d="M10.3-2C9.1-2,8-0.9,8,0.2V10l2.2,0V5.5h2.2c1.1,0,2.3-1.1,2.3-2.2l0-3
|
||||
c0-1.1-1.1-2.2-2.3-2.2H10.3L10.3-2z M10.2,3.2v-3h2.2v3H10.2L10.2,3.2z"/>
|
||||
<polygon fill-rule="evenodd" fill="#5C85DE" points="18.5,0.3 18.5,7.8 16.2,7.8 16.2,10 23,10 23,7.8 20.8,7.8 20.8,0.3 23,0.3
|
||||
23,-1.9 16.2,-1.9 16.2,0.3 "/>
|
||||
<polygon fill-rule="evenodd" fill="#3367D6" points="2,5.5 2,3.2 3.5,3.2 "/>
|
||||
<polygon fill-rule="evenodd" fill="#3367D6" points="10.2,5.5 10.2,3.2 11.5,3.2 "/>
|
||||
<polygon fill-rule="evenodd" fill="#3367D6" points="18.5,1.8 18.5,1.8 18.5,0.3 20.8,0.3 "/>
|
||||
</g>
|
||||
</g>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
1
frontend/public/Logos/github-actions.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#2088ff" d="M26.666 0C11.97 0 0 11.97 0 26.666c0 12.87 9.181 23.651 21.334 26.13v37.87c0 11.77 9.68 21.334 21.332 21.334h.195c1.302 9.023 9.1 16 18.473 16C71.612 128 80 119.612 80 109.334s-8.388-18.668-18.666-18.668c-9.372 0-17.17 6.977-18.473 16h-.195c-8.737 0-16-7.152-16-16V63.779a18.514 18.514 0 0 0 13.24 5.555h2.955c1.303 9.023 9.1 16 18.473 16 9.372 0 17.169-6.977 18.47-16h11.057c1.303 9.023 9.1 16 18.473 16 10.278 0 18.666-8.39 18.666-18.668C128 56.388 119.612 48 109.334 48c-9.373 0-17.171 6.977-18.473 16H79.805c-1.301-9.023-9.098-16-18.471-16s-17.171 6.977-18.473 16h-2.955c-6.433 0-11.793-4.589-12.988-10.672 14.58-.136 26.416-12.05 26.416-26.662C53.334 11.97 41.362 0 26.666 0zm0 5.334A21.292 21.292 0 0 1 48 26.666 21.294 21.294 0 0 1 26.666 48 21.292 21.292 0 0 1 5.334 26.666 21.29 21.29 0 0 1 26.666 5.334zm-5.215 7.541C18.67 12.889 16 15.123 16 18.166v17.043c0 4.043 4.709 6.663 8.145 4.533l13.634-8.455c3.257-2.02 3.274-7.002.032-9.045l-13.635-8.59a5.024 5.024 0 0 0-2.725-.777zm-.117 5.291 13.635 8.588-13.635 8.455V18.166zm40 35.168a13.29 13.29 0 0 1 13.332 13.332A13.293 13.293 0 0 1 61.334 80 13.294 13.294 0 0 1 48 66.666a13.293 13.293 0 0 1 13.334-13.332zm48 0a13.29 13.29 0 0 1 13.332 13.332A13.293 13.293 0 0 1 109.334 80 13.294 13.294 0 0 1 96 66.666a13.293 13.293 0 0 1 13.334-13.332zm-42.568 6.951a2.667 2.667 0 0 0-1.887.78l-6.3 6.294-2.093-2.084a2.667 2.667 0 0 0-3.771.006 2.667 2.667 0 0 0 .008 3.772l3.974 3.96a2.667 2.667 0 0 0 3.766-.001l8.185-8.174a2.667 2.667 0 0 0 .002-3.772 2.667 2.667 0 0 0-1.884-.78zm48 0a2.667 2.667 0 0 0-1.887.78l-6.3 6.294-2.093-2.084a2.667 2.667 0 0 0-3.771.006 2.667 2.667 0 0 0 .008 3.772l3.974 3.96a2.667 2.667 0 0 0 3.766-.001l8.185-8.174a2.667 2.667 0 0 0 .002-3.772 2.667 2.667 0 0 0-1.884-.78zM61.334 96a13.293 13.293 0 0 1 13.332 13.334 13.29 13.29 0 0 1-13.332 13.332A13.293 13.293 0 0 1 48 109.334 13.294 13.294 0 0 1 61.334 96zM56 105.334c-2.193 0-4 1.807-4 4 0 2.195 1.808 4 4 4s4-1.805 4-4c0-2.193-1.807-4-4-4zm10.666 0c-2.193 0-4 1.807-4 4 0 2.195 1.808 4 4 4s4-1.805 4-4c0-2.193-1.807-4-4-4zM56 108c.75 0 1.334.585 1.334 1.334 0 .753-.583 1.332-1.334 1.332-.75 0-1.334-.58-1.334-1.332 0-.75.585-1.334 1.334-1.334zm10.666 0c.75 0 1.334.585 1.334 1.334 0 .753-.583 1.332-1.334 1.332-.75 0-1.332-.58-1.332-1.332 0-.75.583-1.334 1.332-1.334z"/><path fill="#79b8ff" d="M109.334 90.666c-9.383 0-17.188 6.993-18.477 16.031a2.667 2.667 0 0 0-.265-.011l-2.7.09a2.667 2.667 0 0 0-2.578 2.751 2.667 2.667 0 0 0 2.752 2.578l2.7-.087a2.667 2.667 0 0 0 .097-.006C92.17 121.029 99.965 128 109.334 128c10.278 0 18.666-8.388 18.666-18.666s-8.388-18.668-18.666-18.668zm0 5.334a13.293 13.293 0 0 1 13.332 13.334 13.29 13.29 0 0 1-13.332 13.332A13.293 13.293 0 0 1 96 109.334 13.294 13.294 0 0 1 109.334 96z"/></svg>
|
||||
|
After Width: | Height: | Size: 2.8 KiB |
5
frontend/public/Logos/github.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="lucide/github">
|
||||
<path id="Vector" d="M15 22V18C15.1391 16.7473 14.7799 15.4901 14 14.5C17 14.5 20 12.5 20 9C20.08 7.75 19.73 6.52 19 5.5C19.28 4.35 19.28 3.15 19 2C19 2 18 2 16 3.5C13.36 3 10.64 3 8 3.5C6 2 5 2 5 2C4.7 3.15 4.7 4.35 5 5.5C4.27187 6.51588 3.91847 7.75279 4 9C4 12.5 7 14.5 10 14.5C9.61 14.99 9.32 15.55 9.15 16.15C8.98 16.75 8.93 17.38 9 18M9 18V22M9 18C4.49 20 4 16 2 16" stroke="#C0C1C3" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 587 B |
1
frontend/public/Logos/http-monitoring.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 373.71 200"><defs><style type="text/css">.cls-1{fill:#008ec7;}.cls-2{fill:#005b9b;}.cls-3{fill:#fff;}</style></defs><title>IETF-Badge-HTTP</title><g id="Layer_2"><path class="cls-1" d="M326,0H47.73L0,100,47.73,200H326l47.73-100ZM310.05,183.36H58.22L18.43,100,58.22,16.64H310.05L349.84,100Z"/><polygon class="cls-2" points="349.84 100.01 310.05 183.37 58.22 183.37 18.43 100.01 58.22 16.64 310.05 16.64 349.84 100.01"/><path class="cls-3" d="M128.05,71.89v59.53H114.27V107h-27v24.41H73.46V71.89H87.23V95.36h27V71.89Z"/><path class="cls-3" d="M154.5,83.12H135.45V71.89h51.87V83.12h-19v48.3H154.5Z"/><path class="cls-3" d="M207.9,83.12H188.85V71.89h51.87V83.12H221.67v48.3H207.9Z"/><path class="cls-3" d="M287.62,74.53a20.45,20.45,0,0,1,9,7.48,20.67,20.67,0,0,1,3.14,11.48,20.73,20.73,0,0,1-3.14,11.44,20.06,20.06,0,0,1-9,7.48A33.55,33.55,0,0,1,273.88,115h-12v16.42H248.12V71.89h25.76A33.05,33.05,0,0,1,287.62,74.53Zm-5.06,26.57a9.33,9.33,0,0,0,3.23-7.61c0-3.34-1.08-5.91-3.23-7.69s-5.3-2.68-9.44-2.68H261.89v20.66h11.23Q279.33,103.78,282.56,101.1Z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
1
frontend/public/Logos/jenkins.svg
Normal file
|
After Width: | Height: | Size: 13 KiB |
1
frontend/public/Logos/newrelic.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg viewBox="0 0 832.8 959.8" xmlns="http://www.w3.org/2000/svg" width="2169" height="2500"><path d="M672.6 332.3l160.2-92.4v480L416.4 959.8V775.2l256.2-147.6z" fill="#00ac69"/><path d="M416.4 184.6L160.2 332.3 0 239.9 416.4 0l416.4 239.9-160.2 92.4z" fill="#1ce783"/><path d="M256.2 572.3L0 424.6V239.9l416.4 240v479.9l-160.2-92.2z" fill="#1d252c"/></svg>
|
||||
|
After Width: | Height: | Size: 357 B |
1
frontend/public/Logos/openai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 48 48"><path fill="#fff" d="M44.559 19.646a11.957 11.957 0 0 0-1.028-9.822 12.094 12.094 0 0 0-13.026-5.802A11.962 11.962 0 0 0 21.485 0 12.097 12.097 0 0 0 9.95 8.373a11.964 11.964 0 0 0-7.997 5.8A12.097 12.097 0 0 0 3.44 28.356a11.957 11.957 0 0 0 1.028 9.822 12.094 12.094 0 0 0 13.026 5.802 11.953 11.953 0 0 0 9.02 4.02 12.096 12.096 0 0 0 11.54-8.379 11.964 11.964 0 0 0 7.997-5.8 12.099 12.099 0 0 0-1.491-14.177zM26.517 44.863a8.966 8.966 0 0 1-5.759-2.082 6.85 6.85 0 0 0 .284-.16L30.6 37.1c.49-.278.79-.799.786-1.361V22.265l4.04 2.332a.141.141 0 0 1 .078.111v11.16a9.006 9.006 0 0 1-8.987 8.995zM7.191 36.608a8.957 8.957 0 0 1-1.073-6.027c.071.042.195.119.284.17l9.558 5.52a1.556 1.556 0 0 0 1.57 0l11.67-6.738v4.665a.15.15 0 0 1-.057.124l-9.662 5.579a9.006 9.006 0 0 1-12.288-3.293zM4.675 15.744a8.966 8.966 0 0 1 4.682-3.943c0 .082-.005.228-.005.33v11.042a1.555 1.555 0 0 0 .785 1.359l11.669 6.736-4.04 2.333a.143.143 0 0 1-.136.012L7.967 28.03a9.006 9.006 0 0 1-3.293-12.284zm33.19 7.724L26.196 16.73l4.04-2.331a.143.143 0 0 1 .136-.012l9.664 5.579c4.302 2.485 5.776 7.989 3.29 12.29a8.991 8.991 0 0 1-4.68 3.943V24.827a1.553 1.553 0 0 0-.78-1.36zm4.02-6.051c-.07-.044-.195-.119-.283-.17l-9.558-5.52a1.556 1.556 0 0 0-1.57 0l-11.67 6.738V13.8a.15.15 0 0 1 .057-.124l9.662-5.574a8.995 8.995 0 0 1 13.36 9.315zm-25.277 8.315-4.04-2.333a.141.141 0 0 1-.079-.11v-11.16a8.997 8.997 0 0 1 14.753-6.91c-.073.04-.2.11-.283.161L17.4 10.9a1.552 1.552 0 0 0-.786 1.36l-.006 13.469zM18.803 21l5.198-3.002 5.197 3V27l-5.197 3-5.198-3z"/></svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
1
frontend/public/Logos/s3.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg height="2500" width="2500" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 80 80"><linearGradient id="a" x1="0%" y1="100%" y2="0%"><stop offset="0" stop-color="#1b660f"/><stop offset="1" stop-color="#6cae3e"/></linearGradient><g fill="none" fill-rule="evenodd"><path d="M0 0h80v80H0z" fill="url(#a)"/><path d="M60.836 42.893l.384-2.704c3.54 2.12 3.587 2.997 3.586 3.02-.006.006-.61.51-3.97-.316zm-1.943-.54C52.773 40.5 44.25 36.59 40.8 34.96c0-.014.004-.027.004-.041a2.406 2.406 0 0 0-2.404-2.403c-1.324 0-2.402 1.078-2.402 2.403s1.078 2.403 2.402 2.403c.582 0 1.11-.217 1.527-.562 4.058 1.92 12.515 5.774 18.68 7.594L56.17 61.56a.955.955 0 0 0-.01.14c0 1.516-6.707 4.299-17.666 4.299-11.075 0-17.853-2.783-17.853-4.298 0-.046-.003-.091-.01-.136l-5.093-37.207c4.409 3.035 13.892 4.64 22.962 4.64 9.056 0 18.523-1.6 22.94-4.625zM15 20.478C15.072 19.162 22.634 14 38.5 14c15.864 0 23.427 5.16 23.5 6.478v.449C61.13 23.877 51.33 27 38.5 27c-12.852 0-22.657-3.132-23.5-6.087zm49 .022c0-3.465-9.934-8.5-25.5-8.5S13 17.035 13 20.5l.094.754 5.548 40.524C18.775 66.31 30.86 68 38.494 68c9.472 0 19.535-2.178 19.665-6.22l2.396-16.896c1.333.319 2.43.482 3.31.482 1.184 0 1.984-.29 2.469-.867a1.95 1.95 0 0 0 .436-1.66c-.26-1.383-1.902-2.875-5.248-4.784l2.376-16.762z" fill="#fff"/></g></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
frontend/public/Logos/snowflake.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" fill="#29b5e8"><path d="M9.86 15.298l13.008 7.8a3.72 3.72 0 0 0 4.589-.601 4.01 4.01 0 0 0 1.227-2.908V3.956a3.81 3.81 0 0 0-1.861-3.42 3.81 3.81 0 0 0-3.893 0 3.81 3.81 0 0 0-1.861 3.42v8.896l-7.387-4.43a3.79 3.79 0 0 0-2.922-.4c-.986.265-1.818.94-2.3 1.844-1.057 1.9-.44 4.28 1.4 5.422m31.27 7.8l13.008-7.8c1.84-1.143 2.458-3.533 1.4-5.424a3.75 3.75 0 0 0-5.22-1.452l-7.3 4.37v-8.84a3.81 3.81 0 1 0-7.615 0v15.323a4.08 4.08 0 0 0 .494 2.367c.482.903 1.314 1.57 2.3 1.844a3.71 3.71 0 0 0 2.922-.4M29.552 31.97c.013-.25.108-.5.272-.68l1.52-1.58a1.06 1.06 0 0 1 .658-.282h.057a1.05 1.05 0 0 1 .656.282l1.52 1.58a1.12 1.12 0 0 1 .272.681v.06a1.13 1.13 0 0 1-.272.683l-1.52 1.58a1.04 1.04 0 0 1-.656.284h-.057c-.246-.014-.48-.115-.658-.284l-1.52-1.58a1.13 1.13 0 0 1-.272-.683zm-4.604-.65v1.364a1.54 1.54 0 0 0 .372.93l5.16 5.357a1.42 1.42 0 0 0 .895.386h1.312a1.42 1.42 0 0 0 .895-.386l5.16-5.357a1.54 1.54 0 0 0 .372-.93V31.32a1.54 1.54 0 0 0-.372-.93l-5.16-5.357a1.42 1.42 0 0 0-.895-.386h-1.312a1.42 1.42 0 0 0-.895.386L25.32 30.4a1.55 1.55 0 0 0-.372.93M3.13 27.62l7.365 4.417L3.13 36.45a4.06 4.06 0 0 0-1.399 5.424 3.75 3.75 0 0 0 2.3 1.844c.986.274 2.042.133 2.922-.392l13.008-7.8c1.2-.762 1.9-2.078 1.9-3.492a4.16 4.16 0 0 0-1.9-3.492l-13.008-7.8a3.79 3.79 0 0 0-2.922-.4c-.986.265-1.818.94-2.3 1.844-1.057 1.9-.44 4.278 1.4 5.422m38.995 4.442a4 4 0 0 0 1.91 3.477l13 7.8c.88.524 1.934.666 2.92.392s1.817-.94 2.3-1.843a4.05 4.05 0 0 0-1.4-5.424L53.5 32.038l7.365-4.417c1.84-1.143 2.457-3.53 1.4-5.422a3.74 3.74 0 0 0-2.3-1.844c-.987-.274-2.042-.134-2.92.4l-13 7.8a4 4 0 0 0-1.91 3.507M25.48 40.508a3.7 3.7 0 0 0-2.611.464l-13.008 7.8c-1.84 1.143-2.456 3.53-1.4 5.422.483.903 1.314 1.57 2.3 1.843a3.75 3.75 0 0 0 2.922-.392l7.387-4.43v8.83a3.81 3.81 0 1 0 7.614 0V44.4a3.91 3.91 0 0 0-3.205-3.903m28.66 8.276l-13.008-7.8a3.75 3.75 0 0 0-2.922-.392 3.74 3.74 0 0 0-2.3 1.843 4.09 4.09 0 0 0-.494 2.37v15.25a3.81 3.81 0 1 0 7.614 0V51.28l7.287 4.37a3.79 3.79 0 0 0 2.922.4c.986-.265 1.818-.94 2.3-1.844 1.057-1.9.44-4.28-1.4-5.422"/></svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
18
frontend/public/Logos/sns.svg
Normal file
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 64 (93537) - https://sketch.com -->
|
||||
<title>Icon-Architecture/64/Arch_AWS-Simple-Notification-Service_64</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs>
|
||||
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
|
||||
<stop stop-color="#B0084D" offset="0%"></stop>
|
||||
<stop stop-color="#FF4F8B" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Icon-Architecture/64/Arch_AWS-Simple-Notification-Service_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Icon-Architecture-BG/64/Application-Integration" fill="url(#linearGradient-1)">
|
||||
<rect id="Rectangle" x="0" y="0" width="80" height="80"></rect>
|
||||
</g>
|
||||
<path d="M17,38 C18.103,38 19,38.897 19,40 C19,41.103 18.103,42 17,42 C15.897,42 15,41.103 15,40 C15,38.897 15.897,38 17,38 L17,38 Z M41,64 C29.314,64 19.289,55.466 17.194,43.98 C18.965,43.894 20.427,42.659 20.857,41 L27,41 L27,39 L20.857,39 C20.427,37.342 18.966,36.107 17.195,36.02 C19.285,24.71 29.511,16 41,16 C45.313,16 49.832,17.622 54.429,20.821 L55.571,19.179 C50.633,15.743 45.73,14 41,14 C28.27,14 16.949,23.865 15.063,36.521 C13.839,37.207 13,38.5 13,40 C13,41.5 13.839,42.793 15.063,43.478 C16.97,56.341 28.056,66 41,66 C46.407,66 51.942,64.157 56.585,60.811 L55.415,59.189 C51.11,62.292 45.991,64 41,64 L41,64 Z M30.101,36.442 C31.955,36.895 34.275,37 36,37 C37.642,37 39.823,36.905 41.629,36.506 L37.105,45.553 C37.036,45.691 37,45.845 37,46 L37,50.453 C36.199,50.964 34.833,51.812 34,51.986 L34,46 C34,45.868 33.974,45.737 33.923,45.615 L30.101,36.442 Z M36,33 C40.025,33 42.174,33.604 42.841,34 C42.174,34.396 40.025,35 36,35 C31.975,35 29.826,34.396 29.159,34 C29.826,33.604 31.975,33 36,33 L36,33 Z M33,54 L34,54 C34.043,54 34.086,53.997 34.128,53.992 C35.352,53.833 36.909,52.887 38.272,52.013 L38.535,51.845 C38.824,51.661 39,51.342 39,51 L39,46.236 L44.559,35.12 C44.833,34.801 45,34.434 45,34 C45,31.39 39.361,31 36,31 C32.639,31 27,31.39 27,34 C27,34.366 27.12,34.684 27.32,34.967 L32,46.2 L32,53 C32,53.552 32.447,54 33,54 L33,54 Z M62,53 C63.103,53 64,53.897 64,55 C64,56.103 63.103,57 62,57 C60.897,57 60,56.103 60,55 C60,53.897 60.897,53 62,53 L62,53 Z M62,23 C63.103,23 64,23.897 64,25 C64,26.103 63.103,27 62,27 C60.897,27 60,26.103 60,25 C60,23.897 60.897,23 62,23 L62,23 Z M64,38 C65.103,38 66,38.897 66,40 C66,41.103 65.103,42 64,42 C62.897,42 62,41.103 62,40 C62,38.897 62.897,38 64,38 L64,38 Z M54,41 L60.143,41 C60.589,42.72 62.142,44 64,44 C66.206,44 68,42.206 68,40 C68,37.794 66.206,36 64,36 C62.142,36 60.589,37.28 60.143,39 L54,39 L54,26 L58.143,26 C58.589,27.72 60.142,29 62,29 C64.206,29 66,27.206 66,25 C66,22.794 64.206,21 62,21 C60.142,21 58.589,22.28 58.143,24 L53,24 C52.447,24 52,24.448 52,25 L52,39 L45,39 L45,41 L52,41 L52,55 C52,55.552 52.447,56 53,56 L58.143,56 C58.589,57.72 60.142,59 62,59 C64.206,59 66,57.206 66,55 C66,52.794 64.206,51 62,51 C60.142,51 58.589,52.28 58.143,54 L54,54 L54,41 Z" id="AWS-Simple-Notification-Service_Icon_64_Squid" fill="#FFFFFF"></path>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.2 KiB |
1
frontend/public/Logos/sqs.svg
Normal file
|
After Width: | Height: | Size: 5.7 KiB |
8
frontend/public/Logos/systemd.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="300" height="200">
|
||||
<path fill="#fff" d="M0 0h300v200H0z"/>
|
||||
<g transform="translate(30.667 -1141.475) scale(1.33333)">
|
||||
<path d="M25 911.61v39h15v-6h-9v-27h9v-6zm114 0v6h9v27h-9v6h15v-39z" style="line-height:normal;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000;text-transform:none;text-orientation:mixed;white-space:normal;shape-padding:0;isolation:auto;mix-blend-mode:normal;solid-color:#000;solid-opacity:1" color="#000" font-weight="400" font-family="sans-serif" overflow="visible" fill="#201a26"/>
|
||||
<path d="M92.5 931.11l27-15v30z" fill="#30d475"/>
|
||||
<circle cx="70.002" cy="931.111" r="13.5" fill="#30d475"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 942 B |
1
frontend/public/Logos/wordpress.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32" width="64" height="64" fill="#00749a"><path d="M2.26 16c0 5.45 3.13 10.145 7.7 12.348L3.478 10.435C2.725 12.174 2.26 14.03 2.26 16zm23.015-.696c0-1.68-.638-2.9-1.16-3.768-.696-1.16-1.333-2.087-1.333-3.246 0-1.275.986-2.435 2.32-2.435h.174C22.84 3.594 19.594 2.26 16 2.26A13.95 13.95 0 0 0 4.522 8.463h.87c1.45 0 3.652-.174 3.652-.174.754-.058.812 1.043.116 1.16 0 0-.754.116-1.565.116l4.986 14.84 3.014-8.986-2.145-5.855L12 9.45c-.754-.058-.638-1.16.058-1.16 0 0 2.26.174 3.594.174 1.45 0 3.652-.174 3.652-.174.754-.058.812 1.043.116 1.16 0 0-.754.116-1.565.116L22.84 24.35l1.4-4.58c.58-1.913 1.043-3.246 1.043-4.464zm-9.043 1.913L12.116 29.16c1.217.348 2.55.58 3.884.58 1.623 0 3.13-.3 4.58-.754-.058-.058-.058-.116-.116-.174zM28.058 9.45l.116 1.4c0 1.4-.232 2.957-1.043 4.928l-4.174 12.116c4.058-2.377 6.84-6.783 6.84-11.884-.058-2.377-.696-4.58-1.74-6.55zM16 0C7.188 0 0 7.188 0 16s7.188 16 16 16 16-7.188 16-16S24.812 0 16 0zm0 31.304C7.594 31.304.754 24.464.754 16A15.27 15.27 0 0 1 16 .754 15.27 15.27 0 0 1 31.246 16c0 8.464-6.84 15.304-15.246 15.304z"/></svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
@@ -14,8 +14,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -8,5 +8,6 @@
|
||||
"actNow": "Act now to avoid any disruptions and continue where you left off.",
|
||||
"contactAdmin": "Contact your admin to proceed with the upgrade.",
|
||||
"continueMyJourney": "Settle your bill to continue",
|
||||
"somethingWentWrong": "Something went wrong"
|
||||
"somethingWentWrong": "Something went wrong",
|
||||
"refreshPaymentStatus": "Refresh Status"
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -8,5 +8,6 @@
|
||||
"actNow": "Act now to avoid any disruptions and continue where you left off.",
|
||||
"contactAdmin": "Contact your admin to proceed with the upgrade.",
|
||||
"continueMyJourney": "Settle your bill to continue",
|
||||
"somethingWentWrong": "Something went wrong"
|
||||
"somethingWentWrong": "Something went wrong",
|
||||
"refreshPaymentStatus": "Refresh Status"
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -191,7 +191,8 @@ function App(): JSX.Element {
|
||||
// if the user is on basic plan then remove billing
|
||||
if (isOnBasicPlan) {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
(route) =>
|
||||
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -204,7 +205,8 @@ function App(): JSX.Element {
|
||||
} else {
|
||||
// if not a cloud user then remove billing and add list licenses route
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
(route) =>
|
||||
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
|
||||
);
|
||||
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ const apiV1 = '/api/v1/';
|
||||
export const apiV2 = '/api/v2/';
|
||||
export const apiV3 = '/api/v3/';
|
||||
export const apiV4 = '/api/v4/';
|
||||
export const apiV5 = '/api/v5/';
|
||||
export const gatewayApiV1 = '/api/gateway/v1/';
|
||||
export const gatewayApiV2 = '/api/gateway/v2/';
|
||||
export const apiAlertManager = '/api/alertmanager/';
|
||||
|
||||
@@ -1,14 +1,32 @@
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import axios, { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
|
||||
import {
|
||||
ChangelogSchema,
|
||||
DeploymentType,
|
||||
} from 'types/api/changelog/getChangelogByVersion';
|
||||
|
||||
const getChangelogByVersion = async (
|
||||
versionId: string,
|
||||
deployment_type?: DeploymentType,
|
||||
): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> => {
|
||||
try {
|
||||
let queryParams = `filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText`;
|
||||
|
||||
if (
|
||||
deployment_type &&
|
||||
Object.values(DeploymentType).includes(deployment_type)
|
||||
) {
|
||||
const excludedDeploymentType =
|
||||
deployment_type === DeploymentType.CLOUD_ONLY
|
||||
? DeploymentType.OSS_ONLY
|
||||
: DeploymentType.CLOUD_ONLY;
|
||||
|
||||
queryParams = `${queryParams}&populate[features][filters][deployment_type][$notIn]=${excludedDeploymentType}`;
|
||||
}
|
||||
|
||||
const response = await axios.get(`
|
||||
https://cms.signoz.cloud/api/release-changelogs?filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText
|
||||
https://cms.signoz.cloud/api/release-changelogs?${queryParams}
|
||||
`);
|
||||
|
||||
if (!Array.isArray(response.data.data) || response.data.data.length === 0) {
|
||||
|
||||
@@ -19,6 +19,7 @@ import apiV1, {
|
||||
apiV2,
|
||||
apiV3,
|
||||
apiV4,
|
||||
apiV5,
|
||||
gatewayApiV1,
|
||||
gatewayApiV2,
|
||||
} from './apiV1';
|
||||
@@ -171,6 +172,18 @@ ApiV4Instance.interceptors.response.use(
|
||||
ApiV4Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios V5
|
||||
export const ApiV5Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV5}`,
|
||||
});
|
||||
|
||||
ApiV5Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios Base
|
||||
export const ApiBaseInstance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
|
||||
@@ -1,24 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { Pipeline } from 'types/api/pipeline/def';
|
||||
import { Props } from 'types/api/pipeline/post';
|
||||
|
||||
const post = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<Pipeline> | ErrorResponse> => {
|
||||
const post = async (props: Props): Promise<SuccessResponseV2<Pipeline>> => {
|
||||
try {
|
||||
const response = await axios.post('/logs/pipelines', props.data);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
28
frontend/src/api/querySuggestions/getKeySuggestions.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
QueryKeyRequestProps,
|
||||
QueryKeySuggestionsResponseProps,
|
||||
} from 'types/api/querySuggestions/types';
|
||||
|
||||
export const getKeySuggestions = (
|
||||
props: QueryKeyRequestProps,
|
||||
): Promise<AxiosResponse<QueryKeySuggestionsResponseProps>> => {
|
||||
const {
|
||||
signal = '',
|
||||
searchText = '',
|
||||
metricName = '',
|
||||
fieldContext = '',
|
||||
fieldDataType = '',
|
||||
} = props;
|
||||
|
||||
const encodedSignal = encodeURIComponent(signal);
|
||||
const encodedSearchText = encodeURIComponent(searchText);
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const encodedFieldContext = encodeURIComponent(fieldContext);
|
||||
const encodedFieldDataType = encodeURIComponent(fieldDataType);
|
||||
|
||||
return axios.get(
|
||||
`/fields/keys?signal=${encodedSignal}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&fieldContext=${encodedFieldContext}&fieldDataType=${encodedFieldDataType}`,
|
||||
);
|
||||
};
|
||||
20
frontend/src/api/querySuggestions/getValueSuggestion.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
QueryKeyValueRequestProps,
|
||||
QueryKeyValueSuggestionsResponseProps,
|
||||
} from 'types/api/querySuggestions/types';
|
||||
|
||||
export const getValueSuggestions = (
|
||||
props: QueryKeyValueRequestProps,
|
||||
): Promise<AxiosResponse<QueryKeyValueSuggestionsResponseProps>> => {
|
||||
const { signal, key, searchText } = props;
|
||||
|
||||
const encodedSignal = encodeURIComponent(signal);
|
||||
const encodedKey = encodeURIComponent(key);
|
||||
const encodedSearchText = encodeURIComponent(searchText);
|
||||
|
||||
return axios.get(
|
||||
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}`,
|
||||
);
|
||||
};
|
||||
24
frontend/src/api/v3/licenses/post.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { ApiV3Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/licenses/apply';
|
||||
|
||||
const apply = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post<PayloadProps>('/licenses', {
|
||||
key: props.key,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default apply;
|
||||
@@ -2,15 +2,11 @@ import { ApiV3Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/licenses/apply';
|
||||
import { PayloadProps } from 'types/api/licenses/apply';
|
||||
|
||||
const apply = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
const apply = async (): Promise<SuccessResponseV2<PayloadProps>> => {
|
||||
try {
|
||||
const response = await axios.post<PayloadProps>('/licenses', {
|
||||
key: props.key,
|
||||
});
|
||||
const response = await axios.put<PayloadProps>('/licenses');
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
168
frontend/src/api/v5/queryRange/constants.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
// V5 Query Range Constants
|
||||
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import {
|
||||
FunctionName,
|
||||
RequestType,
|
||||
SignalType,
|
||||
Step,
|
||||
} from 'types/api/v5/queryRange';
|
||||
|
||||
// ===================== Schema and Version Constants =====================
|
||||
|
||||
export const SCHEMA_VERSION_V5 = ENTITY_VERSION_V5;
|
||||
export const API_VERSION_V5 = 'v5';
|
||||
|
||||
// ===================== Default Values =====================
|
||||
|
||||
export const DEFAULT_STEP_INTERVAL: Step = '60s';
|
||||
export const DEFAULT_LIMIT = 100;
|
||||
export const DEFAULT_OFFSET = 0;
|
||||
|
||||
// ===================== Request Type Constants =====================
|
||||
|
||||
export const REQUEST_TYPES: Record<string, RequestType> = {
|
||||
SCALAR: 'scalar',
|
||||
TIME_SERIES: 'time_series',
|
||||
RAW: 'raw',
|
||||
DISTRIBUTION: 'distribution',
|
||||
} as const;
|
||||
|
||||
// ===================== Signal Type Constants =====================
|
||||
|
||||
export const SIGNAL_TYPES: Record<string, SignalType> = {
|
||||
TRACES: 'traces',
|
||||
LOGS: 'logs',
|
||||
METRICS: 'metrics',
|
||||
} as const;
|
||||
|
||||
// ===================== Common Aggregation Expressions =====================
|
||||
|
||||
export const TRACE_AGGREGATIONS = {
|
||||
COUNT: 'count()',
|
||||
COUNT_DISTINCT_TRACE_ID: 'count_distinct(traceID)',
|
||||
AVG_DURATION: 'avg(duration_nano)',
|
||||
P50_DURATION: 'p50(duration_nano)',
|
||||
P95_DURATION: 'p95(duration_nano)',
|
||||
P99_DURATION: 'p99(duration_nano)',
|
||||
MAX_DURATION: 'max(duration_nano)',
|
||||
MIN_DURATION: 'min(duration_nano)',
|
||||
SUM_DURATION: 'sum(duration_nano)',
|
||||
} as const;
|
||||
|
||||
export const LOG_AGGREGATIONS = {
|
||||
COUNT: 'count()',
|
||||
COUNT_DISTINCT_HOST: 'count_distinct(host.name)',
|
||||
COUNT_DISTINCT_SERVICE: 'count_distinct(service.name)',
|
||||
COUNT_DISTINCT_CONTAINER: 'count_distinct(container.name)',
|
||||
} as const;
|
||||
|
||||
// ===================== Common Filter Expressions =====================
|
||||
|
||||
export const COMMON_FILTERS = {
|
||||
// Trace filters
|
||||
SERVER_SPANS: "kind_string = 'Server'",
|
||||
CLIENT_SPANS: "kind_string = 'Client'",
|
||||
INTERNAL_SPANS: "kind_string = 'Internal'",
|
||||
ERROR_SPANS: 'http.status_code >= 400',
|
||||
SUCCESS_SPANS: 'http.status_code < 400',
|
||||
|
||||
// Common service filters
|
||||
EXCLUDE_HEALTH_CHECKS: "http.route != '/health' AND http.route != '/ping'",
|
||||
HTTP_REQUESTS: "http.method != ''",
|
||||
|
||||
// Log filters
|
||||
ERROR_LOGS: "severity_text = 'ERROR'",
|
||||
WARN_LOGS: "severity_text = 'WARN'",
|
||||
INFO_LOGS: "severity_text = 'INFO'",
|
||||
DEBUG_LOGS: "severity_text = 'DEBUG'",
|
||||
} as const;
|
||||
|
||||
// ===================== Common Group By Fields =====================
|
||||
|
||||
export const COMMON_GROUP_BY_FIELDS = {
|
||||
SERVICE_NAME: {
|
||||
name: 'service.name',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'resource' as const,
|
||||
},
|
||||
HTTP_METHOD: {
|
||||
name: 'http.method',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'attribute' as const,
|
||||
},
|
||||
HTTP_ROUTE: {
|
||||
name: 'http.route',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'attribute' as const,
|
||||
},
|
||||
HTTP_STATUS_CODE: {
|
||||
name: 'http.status_code',
|
||||
fieldDataType: 'int64' as const,
|
||||
fieldContext: 'attribute' as const,
|
||||
},
|
||||
HOST_NAME: {
|
||||
name: 'host.name',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'resource' as const,
|
||||
},
|
||||
CONTAINER_NAME: {
|
||||
name: 'container.name',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'resource' as const,
|
||||
},
|
||||
} as const;
|
||||
|
||||
// ===================== Function Names =====================
|
||||
|
||||
export const FUNCTION_NAMES: Record<string, FunctionName> = {
|
||||
CUT_OFF_MIN: 'cutOffMin',
|
||||
CUT_OFF_MAX: 'cutOffMax',
|
||||
CLAMP_MIN: 'clampMin',
|
||||
CLAMP_MAX: 'clampMax',
|
||||
ABSOLUTE: 'absolute',
|
||||
RUNNING_DIFF: 'runningDiff',
|
||||
LOG2: 'log2',
|
||||
LOG10: 'log10',
|
||||
CUM_SUM: 'cumSum',
|
||||
EWMA3: 'ewma3',
|
||||
EWMA5: 'ewma5',
|
||||
EWMA7: 'ewma7',
|
||||
MEDIAN3: 'median3',
|
||||
MEDIAN5: 'median5',
|
||||
MEDIAN7: 'median7',
|
||||
TIME_SHIFT: 'timeShift',
|
||||
ANOMALY: 'anomaly',
|
||||
} as const;
|
||||
|
||||
// ===================== Common Step Intervals =====================
|
||||
|
||||
export const STEP_INTERVALS = {
|
||||
FIFTEEN_SECONDS: '15s',
|
||||
THIRTY_SECONDS: '30s',
|
||||
ONE_MINUTE: '60s',
|
||||
FIVE_MINUTES: '300s',
|
||||
TEN_MINUTES: '600s',
|
||||
FIFTEEN_MINUTES: '900s',
|
||||
THIRTY_MINUTES: '1800s',
|
||||
ONE_HOUR: '3600s',
|
||||
TWO_HOURS: '7200s',
|
||||
SIX_HOURS: '21600s',
|
||||
TWELVE_HOURS: '43200s',
|
||||
ONE_DAY: '86400s',
|
||||
} as const;
|
||||
|
||||
// ===================== Time Range Presets =====================
|
||||
|
||||
export const TIME_RANGE_PRESETS = {
|
||||
LAST_5_MINUTES: 5 * 60 * 1000,
|
||||
LAST_15_MINUTES: 15 * 60 * 1000,
|
||||
LAST_30_MINUTES: 30 * 60 * 1000,
|
||||
LAST_HOUR: 60 * 60 * 1000,
|
||||
LAST_3_HOURS: 3 * 60 * 60 * 1000,
|
||||
LAST_6_HOURS: 6 * 60 * 60 * 1000,
|
||||
LAST_12_HOURS: 12 * 60 * 60 * 1000,
|
||||
LAST_24_HOURS: 24 * 60 * 60 * 1000,
|
||||
LAST_3_DAYS: 3 * 24 * 60 * 60 * 1000,
|
||||
LAST_7_DAYS: 7 * 24 * 60 * 60 * 1000,
|
||||
} as const;
|
||||
387
frontend/src/api/v5/queryRange/convertV5Response.ts
Normal file
@@ -0,0 +1,387 @@
|
||||
import { cloneDeep, isEmpty } from 'lodash-es';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadV3 } from 'types/api/metrics/getQueryRange';
|
||||
import {
|
||||
DistributionData,
|
||||
MetricRangePayloadV5,
|
||||
QueryRangeRequestV5,
|
||||
RawData,
|
||||
ScalarData,
|
||||
TimeSeriesData,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { QueryDataV3 } from 'types/api/widgets/getQuery';
|
||||
|
||||
function getColName(
|
||||
col: ScalarData['columns'][number],
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): string {
|
||||
if (col.columnType === 'group') {
|
||||
return col.name;
|
||||
}
|
||||
|
||||
const aggregation =
|
||||
aggregationPerQuery?.[col.queryName]?.[col.aggregationIndex];
|
||||
const legend = legendMap[col.queryName];
|
||||
const alias = aggregation?.alias;
|
||||
const expression = aggregation?.expression || '';
|
||||
const aggregationsCount = aggregationPerQuery[col.queryName]?.length || 0;
|
||||
const isSingleAggregation = aggregationsCount === 1;
|
||||
|
||||
// Single aggregation: Priority is alias > legend > expression
|
||||
if (isSingleAggregation) {
|
||||
return alias || legend || expression;
|
||||
}
|
||||
|
||||
// Multiple aggregations: Each follows single rules BUT never shows legend
|
||||
// Priority: alias > expression (legend is ignored for multiple aggregations)
|
||||
return alias || expression;
|
||||
}
|
||||
|
||||
function getColId(
|
||||
col: ScalarData['columns'][number],
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): string {
|
||||
if (col.columnType === 'group') {
|
||||
return col.name;
|
||||
}
|
||||
const aggregation =
|
||||
aggregationPerQuery?.[col.queryName]?.[col.aggregationIndex];
|
||||
const expression = aggregation?.expression || '';
|
||||
return `${col.queryName}.${expression}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 TimeSeriesData to legacy format
|
||||
*/
|
||||
function convertTimeSeriesData(
|
||||
timeSeriesData: TimeSeriesData,
|
||||
legendMap: Record<string, string>,
|
||||
): QueryDataV3 {
|
||||
// Convert V5 time series format to legacy QueryDataV3 format
|
||||
|
||||
return {
|
||||
queryName: timeSeriesData.queryName,
|
||||
legend: legendMap[timeSeriesData.queryName] || timeSeriesData.queryName,
|
||||
series: timeSeriesData?.aggregations?.flatMap((aggregation) => {
|
||||
const { index, alias, series } = aggregation;
|
||||
|
||||
if (!series || !series.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return series.map((series) => ({
|
||||
labels: series.labels
|
||||
? Object.fromEntries(
|
||||
series.labels.map((label) => [label.key.name, label.value]),
|
||||
)
|
||||
: {},
|
||||
labelsArray: series.labels
|
||||
? series.labels.map((label) => ({ [label.key.name]: label.value }))
|
||||
: [],
|
||||
values: series.values.map((value) => ({
|
||||
timestamp: value.timestamp,
|
||||
value: String(value.value),
|
||||
})),
|
||||
metaData: {
|
||||
alias,
|
||||
index,
|
||||
queryName: timeSeriesData.queryName,
|
||||
},
|
||||
}));
|
||||
}),
|
||||
list: null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 ScalarData array to legacy format with table structure
|
||||
*/
|
||||
function convertScalarDataArrayToTable(
|
||||
scalarDataArray: ScalarData[],
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): QueryDataV3[] {
|
||||
// If no scalar data, return empty structure
|
||||
|
||||
if (!scalarDataArray || scalarDataArray.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Process each scalar data separately to maintain query separation
|
||||
return scalarDataArray?.map((scalarData) => {
|
||||
// Get query name from the first column
|
||||
const queryName = scalarData?.columns?.[0]?.queryName || '';
|
||||
|
||||
if ((scalarData as any)?.aggregations?.length > 0) {
|
||||
return {
|
||||
...convertTimeSeriesData(scalarData as any, legendMap),
|
||||
table: {
|
||||
columns: [],
|
||||
rows: [],
|
||||
},
|
||||
list: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Collect columns for this specific query
|
||||
const columns = scalarData?.columns?.map((col) => ({
|
||||
name: getColName(col, legendMap, aggregationPerQuery),
|
||||
queryName: col.queryName,
|
||||
isValueColumn: col.columnType === 'aggregation',
|
||||
id: getColId(col, aggregationPerQuery),
|
||||
}));
|
||||
|
||||
// Process rows for this specific query
|
||||
const rows = scalarData?.data?.map((dataRow) => {
|
||||
const rowData: Record<string, any> = {};
|
||||
|
||||
scalarData?.columns?.forEach((col, colIndex) => {
|
||||
const columnName = getColName(col, legendMap, aggregationPerQuery);
|
||||
const columnId = getColId(col, aggregationPerQuery);
|
||||
rowData[columnId || columnName] = dataRow[colIndex];
|
||||
});
|
||||
|
||||
return { data: rowData };
|
||||
});
|
||||
|
||||
return {
|
||||
queryName,
|
||||
legend: legendMap[queryName] || '',
|
||||
series: null,
|
||||
list: null,
|
||||
table: {
|
||||
columns,
|
||||
rows,
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function convertScalarWithFormatForWeb(
|
||||
scalarDataArray: ScalarData[],
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): QueryDataV3[] {
|
||||
if (!scalarDataArray || scalarDataArray.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return scalarDataArray.map((scalarData) => {
|
||||
const columns =
|
||||
scalarData.columns?.map((col) => {
|
||||
const colName = getColName(col, legendMap, aggregationPerQuery);
|
||||
|
||||
return {
|
||||
name: colName,
|
||||
queryName: col.queryName,
|
||||
isValueColumn: col.columnType === 'aggregation',
|
||||
id: getColId(col, aggregationPerQuery),
|
||||
};
|
||||
}) || [];
|
||||
|
||||
const rows =
|
||||
scalarData.data?.map((dataRow) => {
|
||||
const rowData: Record<string, any> = {};
|
||||
columns?.forEach((col, colIndex) => {
|
||||
rowData[col.id || col.name] = dataRow[colIndex];
|
||||
});
|
||||
return { data: rowData };
|
||||
}) || [];
|
||||
|
||||
const queryName = scalarData.columns?.[0]?.queryName || '';
|
||||
|
||||
return {
|
||||
queryName,
|
||||
legend: legendMap[queryName] || queryName,
|
||||
series: null,
|
||||
list: null,
|
||||
table: {
|
||||
columns,
|
||||
rows,
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 RawData to legacy format
|
||||
*/
|
||||
function convertRawData(
|
||||
rawData: RawData,
|
||||
legendMap: Record<string, string>,
|
||||
): QueryDataV3 {
|
||||
// Convert V5 raw format to legacy QueryDataV3 format
|
||||
return {
|
||||
queryName: rawData.queryName,
|
||||
legend: legendMap[rawData.queryName] || rawData.queryName,
|
||||
series: null,
|
||||
list: rawData.rows?.map((row) => ({
|
||||
timestamp: row.timestamp,
|
||||
data: {
|
||||
// Map raw data to ILog structure - spread row.data first to include all properties
|
||||
...row.data,
|
||||
date: row.timestamp,
|
||||
} as any,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 DistributionData to legacy format
|
||||
*/
|
||||
function convertDistributionData(
|
||||
distributionData: DistributionData,
|
||||
legendMap: Record<string, string>,
|
||||
): any {
|
||||
// eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
// Convert V5 distribution format to legacy histogram format
|
||||
return {
|
||||
...distributionData,
|
||||
legendMap,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to convert V5 data based on type
|
||||
*/
|
||||
function convertV5DataByType(
|
||||
v5Data: any,
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): MetricRangePayloadV3['data'] {
|
||||
switch (v5Data?.type) {
|
||||
case 'time_series': {
|
||||
const timeSeriesData = v5Data.data.results as TimeSeriesData[];
|
||||
return {
|
||||
resultType: 'time_series',
|
||||
result: timeSeriesData.map((timeSeries) =>
|
||||
convertTimeSeriesData(timeSeries, legendMap),
|
||||
),
|
||||
};
|
||||
}
|
||||
case 'scalar': {
|
||||
const scalarData = v5Data.data.results as ScalarData[];
|
||||
// For scalar data, combine all results into separate table entries
|
||||
const combinedTables = convertScalarDataArrayToTable(
|
||||
scalarData,
|
||||
legendMap,
|
||||
aggregationPerQuery,
|
||||
);
|
||||
return {
|
||||
resultType: 'scalar',
|
||||
result: combinedTables,
|
||||
};
|
||||
}
|
||||
case 'raw': {
|
||||
const rawData = v5Data.data.results as RawData[];
|
||||
return {
|
||||
resultType: 'raw',
|
||||
result: rawData.map((raw) => convertRawData(raw, legendMap)),
|
||||
};
|
||||
}
|
||||
case 'distribution': {
|
||||
const distributionData = v5Data.data.results as DistributionData[];
|
||||
return {
|
||||
resultType: 'distribution',
|
||||
result: distributionData.map((distribution) =>
|
||||
convertDistributionData(distribution, legendMap),
|
||||
),
|
||||
};
|
||||
}
|
||||
default:
|
||||
return {
|
||||
resultType: '',
|
||||
result: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 API response to legacy format expected by frontend components
|
||||
*/
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function convertV5ResponseToLegacy(
|
||||
v5Response: SuccessResponse<MetricRangePayloadV5>,
|
||||
legendMap: Record<string, string>,
|
||||
formatForWeb?: boolean,
|
||||
): SuccessResponse<MetricRangePayloadV3> {
|
||||
const { payload, params } = v5Response;
|
||||
const v5Data = payload?.data;
|
||||
|
||||
const aggregationPerQuery =
|
||||
(params as QueryRangeRequestV5)?.compositeQuery?.queries
|
||||
?.filter((query) => query.type === 'builder_query')
|
||||
.reduce((acc, query) => {
|
||||
if (
|
||||
query.type === 'builder_query' &&
|
||||
'aggregations' in query.spec &&
|
||||
query.spec.name
|
||||
) {
|
||||
acc[query.spec.name] = query.spec.aggregations;
|
||||
}
|
||||
return acc;
|
||||
}, {} as Record<string, any>) || {};
|
||||
|
||||
// If formatForWeb is true, return as-is (like existing logic)
|
||||
if (formatForWeb && v5Data?.type === 'scalar') {
|
||||
const scalarData = v5Data.data.results as ScalarData[];
|
||||
const webTables = convertScalarWithFormatForWeb(
|
||||
scalarData,
|
||||
legendMap,
|
||||
aggregationPerQuery,
|
||||
);
|
||||
return {
|
||||
...v5Response,
|
||||
payload: {
|
||||
data: {
|
||||
resultType: 'scalar',
|
||||
result: webTables,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Convert based on V5 response type
|
||||
const convertedData = convertV5DataByType(
|
||||
v5Data,
|
||||
legendMap,
|
||||
aggregationPerQuery,
|
||||
);
|
||||
|
||||
// Create legacy-compatible response structure
|
||||
const legacyResponse: SuccessResponse<MetricRangePayloadV3> = {
|
||||
...v5Response,
|
||||
payload: {
|
||||
data: convertedData,
|
||||
},
|
||||
};
|
||||
|
||||
// Apply legend mapping (similar to existing logic)
|
||||
if (legacyResponse.payload?.data?.result) {
|
||||
legacyResponse.payload.data.result = legacyResponse.payload.data.result.map(
|
||||
(queryData: any) => {
|
||||
// eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
const newQueryData = cloneDeep(queryData);
|
||||
newQueryData.legend = legendMap[queryData.queryName];
|
||||
|
||||
// If metric names is an empty object
|
||||
if (isEmpty(queryData.metric)) {
|
||||
// If metrics list is empty && the user haven't defined a legend then add the legend equal to the name of the query.
|
||||
if (newQueryData.legend === undefined || newQueryData.legend === null) {
|
||||
newQueryData.legend = queryData.queryName;
|
||||
}
|
||||
// If name of the query and the legend if inserted is same then add the same to the metrics object.
|
||||
if (queryData.queryName === newQueryData.legend) {
|
||||
newQueryData.metric = newQueryData.metric || {};
|
||||
newQueryData.metric[queryData.queryName] = queryData.queryName;
|
||||
}
|
||||
}
|
||||
|
||||
return newQueryData;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return legacyResponse;
|
||||
}
|
||||
45
frontend/src/api/v5/queryRange/getQueryRange.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { ApiV5Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
MetricRangePayloadV5,
|
||||
QueryRangePayloadV5,
|
||||
} from 'types/api/v5/queryRange';
|
||||
|
||||
export const getQueryRangeV5 = async (
|
||||
props: QueryRangePayloadV5,
|
||||
version: string,
|
||||
signal: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
|
||||
try {
|
||||
if (version && version === ENTITY_VERSION_V5) {
|
||||
const response = await ApiV5Instance.post('/query_range', props, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
}
|
||||
|
||||
// Default V5 behavior
|
||||
const response = await ApiV5Instance.post('/query_range', props, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getQueryRangeV5;
|
||||
434
frontend/src/api/v5/queryRange/prepareQueryRangePayloadV5.ts
Normal file
@@ -0,0 +1,434 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
QueryFunctionProps,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
BaseBuilderQuery,
|
||||
FieldContext,
|
||||
FieldDataType,
|
||||
FunctionName,
|
||||
GroupByKey,
|
||||
Having,
|
||||
LogAggregation,
|
||||
MetricAggregation,
|
||||
OrderBy,
|
||||
QueryEnvelope,
|
||||
QueryFunction,
|
||||
QueryRangePayloadV5,
|
||||
QueryType,
|
||||
RequestType,
|
||||
TelemetryFieldKey,
|
||||
TraceAggregation,
|
||||
VariableItem,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
type PrepareQueryRangePayloadV5Result = {
|
||||
queryPayload: QueryRangePayloadV5;
|
||||
legendMap: Record<string, string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Maps panel types to V5 request types
|
||||
*/
|
||||
function mapPanelTypeToRequestType(panelType: PANEL_TYPES): RequestType {
|
||||
switch (panelType) {
|
||||
case PANEL_TYPES.TIME_SERIES:
|
||||
case PANEL_TYPES.BAR:
|
||||
return 'time_series';
|
||||
case PANEL_TYPES.TABLE:
|
||||
case PANEL_TYPES.PIE:
|
||||
case PANEL_TYPES.VALUE:
|
||||
case PANEL_TYPES.TRACE:
|
||||
return 'scalar';
|
||||
case PANEL_TYPES.LIST:
|
||||
return 'raw';
|
||||
case PANEL_TYPES.HISTOGRAM:
|
||||
return 'distribution';
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets signal type from data source
|
||||
*/
|
||||
function getSignalType(dataSource: string): 'traces' | 'logs' | 'metrics' {
|
||||
if (dataSource === 'traces') return 'traces';
|
||||
if (dataSource === 'logs') return 'logs';
|
||||
return 'metrics';
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates base spec for builder queries
|
||||
*/
|
||||
function createBaseSpec(
|
||||
queryData: IBuilderQuery,
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): BaseBuilderQuery {
|
||||
const nonEmptySelectColumns = (queryData.selectColumns as (
|
||||
| BaseAutocompleteData
|
||||
| TelemetryFieldKey
|
||||
)[])?.filter((c) => ('key' in c ? c?.key : c?.name));
|
||||
|
||||
return {
|
||||
stepInterval: queryData?.stepInterval || undefined,
|
||||
disabled: queryData.disabled,
|
||||
filter: queryData?.filter?.expression ? queryData.filter : undefined,
|
||||
groupBy:
|
||||
queryData.groupBy?.length > 0
|
||||
? queryData.groupBy.map(
|
||||
(item: any): GroupByKey => ({
|
||||
name: item.key,
|
||||
fieldDataType: item?.dataType,
|
||||
fieldContext: item?.type,
|
||||
description: item?.description,
|
||||
unit: item?.unit,
|
||||
signal: item?.signal,
|
||||
materialized: item?.materialized,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
limit:
|
||||
panelType === PANEL_TYPES.TABLE || panelType === PANEL_TYPES.LIST
|
||||
? queryData.limit || queryData.pageSize || undefined
|
||||
: queryData.limit || undefined,
|
||||
offset: requestType === 'raw' ? queryData.offset : undefined,
|
||||
order:
|
||||
queryData.orderBy?.length > 0
|
||||
? queryData.orderBy.map(
|
||||
(order: any): OrderBy => ({
|
||||
key: {
|
||||
name: order.columnName,
|
||||
},
|
||||
direction: order.order,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
// legend: isEmpty(queryData.legend) ? undefined : queryData.legend,
|
||||
having: isEmpty(queryData.having) ? undefined : (queryData?.having as Having),
|
||||
functions: isEmpty(queryData.functions)
|
||||
? undefined
|
||||
: queryData.functions.map(
|
||||
(func: QueryFunctionProps): QueryFunction => ({
|
||||
name: func.name as FunctionName,
|
||||
args: func.args.map((arg) => ({
|
||||
// name: arg.name,
|
||||
value: arg,
|
||||
})),
|
||||
}),
|
||||
),
|
||||
selectFields: isEmpty(nonEmptySelectColumns)
|
||||
? undefined
|
||||
: nonEmptySelectColumns?.map(
|
||||
(column: any): TelemetryFieldKey => ({
|
||||
name: column.name ?? column.key,
|
||||
fieldDataType:
|
||||
column?.fieldDataType ?? (column?.dataType as FieldDataType),
|
||||
fieldContext: column?.fieldContext ?? (column?.type as FieldContext),
|
||||
signal: column?.signal ?? undefined,
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
// Utility to parse aggregation expressions with optional alias
|
||||
export function parseAggregations(
|
||||
expression: string,
|
||||
): { expression: string; alias?: string }[] {
|
||||
const result: { expression: string; alias?: string }[] = [];
|
||||
// Matches function calls like "count()" or "sum(field)" with optional alias like "as 'alias'"
|
||||
// Handles quoted ('alias'), dash-separated (field-name), and unquoted values after "as" keyword
|
||||
const regex = /([a-zA-Z0-9_]+\([^)]*\))(?:\s*as\s+((?:'[^']*'|"[^"]*"|[a-zA-Z0-9_-]+)))?/g;
|
||||
let match = regex.exec(expression);
|
||||
while (match !== null) {
|
||||
const expr = match[1];
|
||||
let alias = match[2];
|
||||
if (alias) {
|
||||
// Remove quotes if present
|
||||
alias = alias.replace(/^['"]|['"]$/g, '');
|
||||
result.push({ expression: expr, alias });
|
||||
} else {
|
||||
result.push({ expression: expr });
|
||||
}
|
||||
match = regex.exec(expression);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function createAggregation(
|
||||
queryData: any,
|
||||
panelType?: PANEL_TYPES,
|
||||
): TraceAggregation[] | LogAggregation[] | MetricAggregation[] {
|
||||
if (!queryData) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const haveReduceTo =
|
||||
queryData.dataSource === DataSource.METRICS &&
|
||||
panelType &&
|
||||
(panelType === PANEL_TYPES.TABLE ||
|
||||
panelType === PANEL_TYPES.PIE ||
|
||||
panelType === PANEL_TYPES.VALUE);
|
||||
|
||||
if (queryData.dataSource === DataSource.METRICS) {
|
||||
return [
|
||||
{
|
||||
metricName:
|
||||
queryData?.aggregations?.[0]?.metricName ||
|
||||
queryData?.aggregateAttribute?.key,
|
||||
temporality:
|
||||
queryData?.aggregations?.[0]?.temporality ||
|
||||
queryData?.aggregateAttribute?.temporality,
|
||||
timeAggregation:
|
||||
queryData?.aggregations?.[0]?.timeAggregation ||
|
||||
queryData?.timeAggregation,
|
||||
spaceAggregation:
|
||||
queryData?.aggregations?.[0]?.spaceAggregation ||
|
||||
queryData?.spaceAggregation,
|
||||
reduceTo: haveReduceTo
|
||||
? queryData?.aggregations?.[0]?.reduceTo || queryData?.reduceTo
|
||||
: undefined,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
if (queryData.aggregations?.length > 0) {
|
||||
return isEmpty(parseAggregations(queryData.aggregations?.[0].expression))
|
||||
? [{ expression: 'count()' }]
|
||||
: parseAggregations(queryData.aggregations?.[0].expression);
|
||||
}
|
||||
|
||||
return [{ expression: 'count()' }];
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts query builder data to V5 builder queries
|
||||
*/
|
||||
function convertBuilderQueriesToV5(
|
||||
builderQueries: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(builderQueries).map(
|
||||
([queryName, queryData]): QueryEnvelope => {
|
||||
const signal = getSignalType(queryData.dataSource);
|
||||
const baseSpec = createBaseSpec(queryData, requestType, panelType);
|
||||
let spec: QueryEnvelope['spec'];
|
||||
|
||||
const aggregations = createAggregation(queryData, panelType);
|
||||
|
||||
switch (signal) {
|
||||
case 'traces':
|
||||
spec = {
|
||||
name: queryName,
|
||||
signal: 'traces' as const,
|
||||
...baseSpec,
|
||||
aggregations: aggregations as TraceAggregation[],
|
||||
};
|
||||
break;
|
||||
case 'logs':
|
||||
spec = {
|
||||
name: queryName,
|
||||
signal: 'logs' as const,
|
||||
...baseSpec,
|
||||
aggregations: aggregations as LogAggregation[],
|
||||
};
|
||||
break;
|
||||
case 'metrics':
|
||||
default:
|
||||
spec = {
|
||||
name: queryName,
|
||||
signal: 'metrics' as const,
|
||||
...baseSpec,
|
||||
aggregations: aggregations as MetricAggregation[],
|
||||
// reduceTo: queryData.reduceTo,
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'builder_query' as QueryType,
|
||||
spec,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts PromQL queries to V5 format
|
||||
*/
|
||||
function convertPromQueriesToV5(
|
||||
promQueries: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(promQueries).map(
|
||||
([queryName, queryData]): QueryEnvelope => ({
|
||||
type: 'promql' as QueryType,
|
||||
spec: {
|
||||
name: queryName,
|
||||
query: queryData.query,
|
||||
disabled: queryData.disabled || false,
|
||||
step: queryData?.stepInterval,
|
||||
stats: false, // PromQL specific field
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts ClickHouse queries to V5 format
|
||||
*/
|
||||
function convertClickHouseQueriesToV5(
|
||||
chQueries: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(chQueries).map(
|
||||
([queryName, queryData]): QueryEnvelope => ({
|
||||
type: 'clickhouse_sql' as QueryType,
|
||||
spec: {
|
||||
name: queryName,
|
||||
query: queryData.query,
|
||||
disabled: queryData.disabled || false,
|
||||
// ClickHouse doesn't have step or stats like PromQL
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to reduce query arrays to objects
|
||||
*/
|
||||
function reduceQueriesToObject(
|
||||
queryArray: any[], // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
): { queries: Record<string, any>; legends: Record<string, string> } {
|
||||
// eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
const legends: Record<string, string> = {};
|
||||
const queries = queryArray.reduce((acc, queryItem) => {
|
||||
if (!queryItem.query) return acc;
|
||||
acc[queryItem.name] = queryItem;
|
||||
legends[queryItem.name] = queryItem.legend;
|
||||
return acc;
|
||||
}, {} as Record<string, any>); // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
|
||||
return { queries, legends };
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares V5 query range payload from GetQueryResultsProps
|
||||
*/
|
||||
export const prepareQueryRangePayloadV5 = ({
|
||||
query,
|
||||
globalSelectedInterval,
|
||||
graphType,
|
||||
selectedTime,
|
||||
tableParams,
|
||||
variables = {},
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
formatForWeb,
|
||||
originalGraphType,
|
||||
fillGaps,
|
||||
}: GetQueryResultsProps): PrepareQueryRangePayloadV5Result => {
|
||||
let legendMap: Record<string, string> = {};
|
||||
const requestType = mapPanelTypeToRequestType(graphType);
|
||||
let queries: QueryEnvelope[] = [];
|
||||
|
||||
switch (query.queryType) {
|
||||
case EQueryType.QUERY_BUILDER: {
|
||||
const { queryData: data, queryFormulas } = query.builder;
|
||||
const currentQueryData = mapQueryDataToApi(data, 'queryName', tableParams);
|
||||
const currentFormulas = mapQueryDataToApi(queryFormulas, 'queryName');
|
||||
|
||||
// Combine legend maps
|
||||
legendMap = {
|
||||
...currentQueryData.newLegendMap,
|
||||
...currentFormulas.newLegendMap,
|
||||
};
|
||||
|
||||
// Convert builder queries
|
||||
const builderQueries = convertBuilderQueriesToV5(
|
||||
currentQueryData.data,
|
||||
requestType,
|
||||
graphType,
|
||||
);
|
||||
|
||||
// Convert formulas as separate query type
|
||||
const formulaQueries = Object.entries(currentFormulas.data).map(
|
||||
([queryName, formulaData]): QueryEnvelope => ({
|
||||
type: 'builder_formula' as const,
|
||||
spec: {
|
||||
name: queryName,
|
||||
expression: formulaData.expression || '',
|
||||
disabled: formulaData.disabled,
|
||||
limit: formulaData.limit ?? undefined,
|
||||
order: formulaData.orderBy?.map(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
(order: any): OrderBy => ({
|
||||
key: {
|
||||
name: order.columnName,
|
||||
},
|
||||
direction: order.order,
|
||||
}),
|
||||
),
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Combine both types
|
||||
queries = [...builderQueries, ...formulaQueries];
|
||||
break;
|
||||
}
|
||||
case EQueryType.PROM: {
|
||||
const promQueries = reduceQueriesToObject(query[query.queryType]);
|
||||
queries = convertPromQueriesToV5(promQueries.queries);
|
||||
legendMap = promQueries.legends;
|
||||
break;
|
||||
}
|
||||
case EQueryType.CLICKHOUSE: {
|
||||
const chQueries = reduceQueriesToObject(query[query.queryType]);
|
||||
queries = convertClickHouseQueriesToV5(chQueries.queries);
|
||||
legendMap = chQueries.legends;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
// Calculate time range
|
||||
const { start, end } = getStartEndRangeTime({
|
||||
type: selectedTime,
|
||||
interval: globalSelectedInterval,
|
||||
});
|
||||
|
||||
// Create V5 payload
|
||||
const queryPayload: QueryRangePayloadV5 = {
|
||||
schemaVersion: 'v1',
|
||||
start: startTime ? startTime * 1e3 : parseInt(start, 10) * 1e3,
|
||||
end: endTime ? endTime * 1e3 : parseInt(end, 10) * 1e3,
|
||||
requestType,
|
||||
compositeQuery: {
|
||||
queries,
|
||||
},
|
||||
formatOptions: {
|
||||
formatTableResultForUI:
|
||||
!!formatForWeb ||
|
||||
(originalGraphType
|
||||
? originalGraphType === PANEL_TYPES.TABLE
|
||||
: graphType === PANEL_TYPES.TABLE),
|
||||
fillGaps: fillGaps || false,
|
||||
},
|
||||
variables: Object.entries(variables).reduce((acc, [key, value]) => {
|
||||
acc[key] = { value };
|
||||
return acc;
|
||||
}, {} as Record<string, VariableItem>),
|
||||
};
|
||||
|
||||
return { legendMap, queryPayload };
|
||||
};
|
||||
8
frontend/src/api/v5/v5.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
// V5 API exports
|
||||
export * from './queryRange/constants';
|
||||
export { convertV5ResponseToLegacy } from './queryRange/convertV5Response';
|
||||
export { getQueryRangeV5 } from './queryRange/getQueryRange';
|
||||
export { prepareQueryRangePayloadV5 } from './queryRange/prepareQueryRangePayloadV5';
|
||||
|
||||
// Export types from proper location
|
||||
export * from 'types/api/v5/queryRange';
|
||||
@@ -64,7 +64,8 @@ export function applyCeleryFilterOnWidgetData(
|
||||
...queryItem,
|
||||
filters: {
|
||||
...queryItem.filters,
|
||||
items: [...queryItem.filters.items, ...filters],
|
||||
items: [...(queryItem.filters?.items || []), ...filters],
|
||||
op: queryItem.filters?.op || 'AND',
|
||||
},
|
||||
}
|
||||
: queryItem,
|
||||
|
||||
@@ -41,7 +41,8 @@ export function useNavigateToExplorer(): (
|
||||
aggregateOperator: MetricAggregateOperator.NOOP,
|
||||
filters: {
|
||||
...item.filters,
|
||||
items: selectedFilters,
|
||||
items: [...(item.filters?.items || []), ...selectedFilters],
|
||||
op: item.filters?.op || 'AND',
|
||||
},
|
||||
groupBy: [],
|
||||
disabled: false,
|
||||
|
||||
@@ -60,6 +60,7 @@
|
||||
|
||||
&-ctas {
|
||||
display: flex;
|
||||
margin-left: auto;
|
||||
|
||||
& svg {
|
||||
font-size: 14px;
|
||||
@@ -110,7 +111,7 @@
|
||||
&-content {
|
||||
max-height: calc(100vh - 300px);
|
||||
overflow-y: auto;
|
||||
padding: 16px;
|
||||
padding: 16px 16px 18px 16px;
|
||||
border: 1px solid var(--bg-slate-500, #161922);
|
||||
border-top-width: 0;
|
||||
border-bottom-width: 0;
|
||||
|
||||
@@ -2,27 +2,62 @@ import './ChangelogModal.styles.scss';
|
||||
|
||||
import { CheckOutlined, CloseOutlined } from '@ant-design/icons';
|
||||
import { Button, Modal } from 'antd';
|
||||
import updateUserPreference from 'api/v1/user/preferences/name/update';
|
||||
import cx from 'classnames';
|
||||
import { USER_PREFERENCES } from 'constants/userPreferences';
|
||||
import dayjs from 'dayjs';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { ChevronsDown, ScrollText } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion';
|
||||
import { UserPreference } from 'types/api/preferences/preference';
|
||||
|
||||
import ChangelogRenderer from './components/ChangelogRenderer';
|
||||
|
||||
interface Props {
|
||||
changelog: ChangelogSchema;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
function ChangelogModal({ onClose }: Props): JSX.Element {
|
||||
function ChangelogModal({ changelog, onClose }: Props): JSX.Element {
|
||||
const [hasScroll, setHasScroll] = useState(false);
|
||||
const changelogContentSectionRef = useRef<HTMLDivElement>(null);
|
||||
const { changelog } = useAppContext();
|
||||
const { userPreferences, updateUserPreferenceInContext } = useAppContext();
|
||||
|
||||
const formattedReleaseDate = dayjs(changelog?.release_date).format(
|
||||
'MMMM D, YYYY',
|
||||
);
|
||||
|
||||
const { isCloudUser } = useGetTenantLicense();
|
||||
|
||||
const seenChangelogVersion = userPreferences?.find(
|
||||
(preference) =>
|
||||
preference.name === USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION,
|
||||
)?.value as string;
|
||||
|
||||
const { mutate: updateUserPreferenceMutation } = useMutation(
|
||||
updateUserPreference,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// Update the seen version
|
||||
if (seenChangelogVersion !== changelog.version) {
|
||||
const version = {
|
||||
name: USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION,
|
||||
value: changelog.version,
|
||||
};
|
||||
updateUserPreferenceInContext(version as UserPreference);
|
||||
updateUserPreferenceMutation(version);
|
||||
}
|
||||
}, [
|
||||
seenChangelogVersion,
|
||||
changelog.version,
|
||||
updateUserPreferenceMutation,
|
||||
updateUserPreferenceInContext,
|
||||
]);
|
||||
|
||||
const checkScroll = useCallback((): void => {
|
||||
if (changelogContentSectionRef.current) {
|
||||
const {
|
||||
@@ -89,18 +124,20 @@ function ChangelogModal({ onClose }: Props): JSX.Element {
|
||||
{changelog.features.length > 1 ? 'features' : 'feature'}
|
||||
</span>
|
||||
)}
|
||||
<div className="changelog-modal-footer-ctas">
|
||||
<Button type="default" icon={<CloseOutlined />} onClick={onClose}>
|
||||
Skip for now
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<CheckOutlined />}
|
||||
onClick={onClickUpdateWorkspace}
|
||||
>
|
||||
Update my workspace
|
||||
</Button>
|
||||
</div>
|
||||
{!isCloudUser && (
|
||||
<div className="changelog-modal-footer-ctas">
|
||||
<Button type="default" icon={<CloseOutlined />} onClick={onClose}>
|
||||
Skip for now
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<CheckOutlined />}
|
||||
onClick={onClickUpdateWorkspace}
|
||||
>
|
||||
Update my workspace
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{changelog && (
|
||||
<div className="scroll-btn-container">
|
||||
<button
|
||||
|
||||
@@ -3,10 +3,22 @@
|
||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { USER_PREFERENCES } from 'constants/userPreferences';
|
||||
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
|
||||
import {
|
||||
ChangelogSchema,
|
||||
DeploymentType,
|
||||
} from 'types/api/changelog/getChangelogByVersion';
|
||||
|
||||
import ChangelogModal from '../ChangelogModal';
|
||||
|
||||
const mockChangelog = {
|
||||
const mockChangelog: ChangelogSchema = {
|
||||
id: 1,
|
||||
documentId: 'doc-1',
|
||||
version: 'v1.0.0',
|
||||
createdAt: '2025-06-09T12:00:00Z',
|
||||
updatedAt: '2025-06-09T13:00:00Z',
|
||||
publishedAt: '2025-06-09T14:00:00Z',
|
||||
release_date: '2025-06-10',
|
||||
features: [
|
||||
{
|
||||
@@ -14,6 +26,12 @@ const mockChangelog = {
|
||||
title: 'Feature 1',
|
||||
description: 'Description for feature 1',
|
||||
media: null,
|
||||
documentId: 'feature-1',
|
||||
sort_order: 1,
|
||||
createdAt: '2025-06-09T12:00:00Z',
|
||||
updatedAt: '2025-06-09T13:00:00Z',
|
||||
publishedAt: '2025-06-09T14:00:00Z',
|
||||
deployment_type: DeploymentType.ALL,
|
||||
},
|
||||
],
|
||||
bug_fixes: 'Bug fix details',
|
||||
@@ -28,15 +46,30 @@ jest.mock(
|
||||
return <div>{children}</div>;
|
||||
},
|
||||
);
|
||||
|
||||
// mock useAppContext
|
||||
jest.mock('providers/App/App', () => ({
|
||||
useAppContext: jest.fn(() => ({ changelog: mockChangelog })),
|
||||
useAppContext: jest.fn(() => ({
|
||||
updateUserPreferenceInContext: jest.fn(),
|
||||
userPreferences: [
|
||||
{
|
||||
name: USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION,
|
||||
value: 'v1.0.0',
|
||||
},
|
||||
],
|
||||
})),
|
||||
}));
|
||||
|
||||
function renderChangelog(onClose: () => void = jest.fn()): void {
|
||||
render(
|
||||
<MockQueryClientProvider>
|
||||
<ChangelogModal changelog={mockChangelog} onClose={onClose} />
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
}
|
||||
|
||||
describe('ChangelogModal', () => {
|
||||
it('renders modal with changelog data', () => {
|
||||
render(<ChangelogModal onClose={jest.fn()} />);
|
||||
renderChangelog();
|
||||
expect(
|
||||
screen.getByText('What’s New ⎯ Changelog : June 10, 2025'),
|
||||
).toBeInTheDocument();
|
||||
@@ -48,14 +81,14 @@ describe('ChangelogModal', () => {
|
||||
|
||||
it('calls onClose when Skip for now is clicked', () => {
|
||||
const onClose = jest.fn();
|
||||
render(<ChangelogModal onClose={onClose} />);
|
||||
renderChangelog(onClose);
|
||||
fireEvent.click(screen.getByText('Skip for now'));
|
||||
expect(onClose).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('opens migration docs when Update my workspace is clicked', () => {
|
||||
window.open = jest.fn();
|
||||
render(<ChangelogModal onClose={jest.fn()} />);
|
||||
renderChangelog();
|
||||
fireEvent.click(screen.getByText('Update my workspace'));
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://github.com/SigNoz/signoz/releases',
|
||||
@@ -65,7 +98,7 @@ describe('ChangelogModal', () => {
|
||||
});
|
||||
|
||||
it('scrolls for more when Scroll for more is clicked', () => {
|
||||
render(<ChangelogModal onClose={jest.fn()} />);
|
||||
renderChangelog();
|
||||
const scrollBtn = screen.getByTestId('scroll-more-btn');
|
||||
const contentDiv = screen.getByTestId('changelog-content');
|
||||
if (contentDiv) {
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import {
|
||||
ChangelogSchema,
|
||||
DeploymentType,
|
||||
} from 'types/api/changelog/getChangelogByVersion';
|
||||
|
||||
import ChangelogRenderer from '../components/ChangelogRenderer';
|
||||
|
||||
@@ -15,23 +19,19 @@ jest.mock(
|
||||
},
|
||||
);
|
||||
|
||||
const mockChangelog = {
|
||||
const mockChangelog: ChangelogSchema = {
|
||||
id: 1,
|
||||
documentId: 'changelog-doc-1',
|
||||
version: '1.0.0',
|
||||
documentId: 'doc-1',
|
||||
version: 'v1.0.0',
|
||||
createdAt: '2025-06-09T12:00:00Z',
|
||||
updatedAt: '2025-06-09T13:00:00Z',
|
||||
publishedAt: '2025-06-09T14:00:00Z',
|
||||
release_date: '2025-06-10',
|
||||
features: [
|
||||
{
|
||||
id: 1,
|
||||
documentId: '1',
|
||||
title: 'Feature 1',
|
||||
description: 'Description for feature 1',
|
||||
sort_order: 1,
|
||||
createdAt: '',
|
||||
updatedAt: '',
|
||||
publishedAt: '',
|
||||
deployment_type: 'All',
|
||||
media: {
|
||||
id: 1,
|
||||
documentId: 'doc1',
|
||||
@@ -40,11 +40,15 @@ const mockChangelog = {
|
||||
mime: 'image/webp',
|
||||
alternativeText: null,
|
||||
},
|
||||
documentId: 'feature-1',
|
||||
sort_order: 1,
|
||||
createdAt: '2025-06-09T12:00:00Z',
|
||||
updatedAt: '2025-06-09T13:00:00Z',
|
||||
publishedAt: '2025-06-09T14:00:00Z',
|
||||
deployment_type: DeploymentType.ALL,
|
||||
},
|
||||
],
|
||||
bug_fixes: 'Bug fix details',
|
||||
updatedAt: '2025-06-09T12:00:00Z',
|
||||
publishedAt: '2025-06-09T12:00:00Z',
|
||||
maintenance: 'Maintenance details',
|
||||
};
|
||||
|
||||
|
||||
@@ -101,13 +101,18 @@
|
||||
line-height: 28px;
|
||||
}
|
||||
|
||||
.changelog-media-image {
|
||||
.changelog-media-image,
|
||||
.changelog-media-video {
|
||||
height: auto;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400, #1d212d);
|
||||
}
|
||||
|
||||
.changelog-media-video {
|
||||
margin: 12px 0;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
|
||||
@@ -32,7 +32,7 @@ function renderMedia(media: Media): JSX.Element | null {
|
||||
controls
|
||||
controlsList="nodownload noplaybackrate"
|
||||
loop
|
||||
className="my-3 h-auto w-full rounded"
|
||||
className="changelog-media-video"
|
||||
>
|
||||
<source src={media.url} type={media.mime} />
|
||||
<track kind="captions" src="" label="No captions available" default />
|
||||
@@ -56,7 +56,7 @@ function ChangelogRenderer({ changelog }: Props): JSX.Element {
|
||||
</div>
|
||||
<span className="changelog-release-date">{formattedReleaseDate}</span>
|
||||
{changelog.features && changelog.features.length > 0 && (
|
||||
<div className="changelog-renderer-list flex flex-col gap-7">
|
||||
<div className="changelog-renderer-list">
|
||||
{changelog.features.map((feature) => (
|
||||
<div key={feature.id}>
|
||||
<h2>{feature.title}</h2>
|
||||
|
||||
@@ -18,7 +18,7 @@ function ErrorContent({ error }: ErrorContentProps): JSX.Element {
|
||||
errors: errorMessages,
|
||||
code: errorCode,
|
||||
message: errorMessage,
|
||||
} = error.error.error;
|
||||
} = error?.error?.error || {};
|
||||
return (
|
||||
<section className="error-content">
|
||||
{/* Summary Header */}
|
||||
|
||||
@@ -43,13 +43,13 @@ export const omitIdFromQuery = (query: Query | null): any => ({
|
||||
builder: {
|
||||
...query?.builder,
|
||||
queryData: query?.builder.queryData.map((queryData) => {
|
||||
const { id, ...rest } = queryData.aggregateAttribute;
|
||||
const { id, ...rest } = queryData.aggregateAttribute || {};
|
||||
const newAggregateAttribute = rest;
|
||||
const newGroupByAttributes = queryData.groupBy.map((groupByAttribute) => {
|
||||
const { id, ...rest } = groupByAttribute;
|
||||
return rest;
|
||||
});
|
||||
const newItems = queryData.filters.items.map((item) => {
|
||||
const newItems = queryData.filters?.items?.map((item) => {
|
||||
const { id, ...newItem } = item;
|
||||
if (item.key) {
|
||||
const { id, ...rest } = item.key;
|
||||
|
||||
@@ -74,16 +74,16 @@ function HostMetricTraces({
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
filters: {
|
||||
items: tracesFilters.items.filter(
|
||||
(item) => item.key?.key !== 'host.name',
|
||||
),
|
||||
items:
|
||||
tracesFilters?.items?.filter((item) => item.key?.key !== 'host.name') ||
|
||||
[],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery, tracesFilters.items],
|
||||
[currentQuery, tracesFilters?.items],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
@@ -140,7 +140,8 @@ function HostMetricTraces({
|
||||
|
||||
const isDataEmpty =
|
||||
!isLoading && !isFetching && !isError && traces.length === 0;
|
||||
const hasAdditionalFilters = tracesFilters.items.length > 1;
|
||||
const hasAdditionalFilters =
|
||||
tracesFilters?.items && tracesFilters?.items?.length > 1;
|
||||
|
||||
const totalCount =
|
||||
data?.payload?.data?.newResult?.data?.result?.[0]?.list?.length || 0;
|
||||
@@ -158,7 +159,7 @@ function HostMetricTraces({
|
||||
<div className="filter-section">
|
||||
{query && (
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
query={query as IBuilderQuery}
|
||||
onChange={(value): void =>
|
||||
handleChangeTracesFilters(value, VIEWS.TRACES)
|
||||
}
|
||||
@@ -194,7 +195,7 @@ function HostMetricTraces({
|
||||
{!isError && traces.length > 0 && (
|
||||
<div className="host-metric-traces-table">
|
||||
<TraceExplorerControls
|
||||
isLoading={isFetching}
|
||||
isLoading={isFetching && traces.length === 0}
|
||||
totalCount={totalCount}
|
||||
perPageOptions={PER_PAGE_OPTIONS}
|
||||
showSizeChanger={false}
|
||||
@@ -203,7 +204,7 @@ function HostMetricTraces({
|
||||
tableLayout="fixed"
|
||||
pagination={false}
|
||||
scroll={{ x: true }}
|
||||
loading={isFetching}
|
||||
loading={isFetching && traces.length === 0}
|
||||
dataSource={traces}
|
||||
columns={traceListColumns}
|
||||
onRow={(): Record<string, unknown> => ({
|
||||
|
||||
@@ -37,7 +37,7 @@ import {
|
||||
ScrollText,
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -86,8 +86,12 @@ function HostMetricsDetails({
|
||||
endTime: endMs,
|
||||
}));
|
||||
|
||||
const lastSelectedInterval = useRef<Time | null>(null);
|
||||
|
||||
const [selectedInterval, setSelectedInterval] = useState<Time>(
|
||||
selectedTime as Time,
|
||||
lastSelectedInterval.current
|
||||
? lastSelectedInterval.current
|
||||
: (selectedTime as Time),
|
||||
);
|
||||
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(
|
||||
@@ -150,10 +154,11 @@ function HostMetricsDetails({
|
||||
}, [initialFilters]);
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedInterval(selectedTime as Time);
|
||||
const currentSelectedInterval = lastSelectedInterval.current || selectedTime;
|
||||
setSelectedInterval(currentSelectedInterval as Time);
|
||||
|
||||
if (selectedTime !== 'custom') {
|
||||
const { maxTime, minTime } = GetMinMax(selectedTime);
|
||||
if (currentSelectedInterval !== 'custom') {
|
||||
const { maxTime, minTime } = GetMinMax(currentSelectedInterval);
|
||||
|
||||
setModalTimeRange({
|
||||
startTime: Math.floor(minTime / 1000000000),
|
||||
@@ -181,6 +186,7 @@ function HostMetricsDetails({
|
||||
|
||||
const handleTimeChange = useCallback(
|
||||
(interval: Time | CustomTimeType, dateTimeRange?: [number, number]): void => {
|
||||
lastSelectedInterval.current = interval as Time;
|
||||
setSelectedInterval(interval as Time);
|
||||
|
||||
if (interval === 'custom' && dateTimeRange) {
|
||||
@@ -210,15 +216,17 @@ function HostMetricsDetails({
|
||||
const handleChangeLogFilters = useCallback(
|
||||
(value: IBuilderQuery['filters'], view: VIEWS) => {
|
||||
setLogFilters((prevFilters) => {
|
||||
const hostNameFilter = prevFilters.items.find(
|
||||
const hostNameFilter = prevFilters?.items?.find(
|
||||
(item) => item.key?.key === 'host.name',
|
||||
);
|
||||
const paginationFilter = value.items.find((item) => item.key?.key === 'id');
|
||||
const newFilters = value.items.filter(
|
||||
const paginationFilter = value?.items?.find(
|
||||
(item) => item.key?.key === 'id',
|
||||
);
|
||||
const newFilters = value?.items?.filter(
|
||||
(item) => item.key?.key !== 'id' && item.key?.key !== 'host.name',
|
||||
);
|
||||
|
||||
if (newFilters.length > 0) {
|
||||
if (newFilters && newFilters?.length > 0) {
|
||||
logEvent(InfraMonitoringEvents.FilterApplied, {
|
||||
entity: InfraMonitoringEvents.HostEntity,
|
||||
view: InfraMonitoringEvents.LogsView,
|
||||
@@ -230,7 +238,7 @@ function HostMetricsDetails({
|
||||
op: 'AND',
|
||||
items: [
|
||||
hostNameFilter,
|
||||
...newFilters,
|
||||
...(newFilters || []),
|
||||
...(paginationFilter ? [paginationFilter] : []),
|
||||
].filter((item): item is TagFilterItem => item !== undefined),
|
||||
};
|
||||
@@ -252,11 +260,11 @@ function HostMetricsDetails({
|
||||
const handleChangeTracesFilters = useCallback(
|
||||
(value: IBuilderQuery['filters'], view: VIEWS) => {
|
||||
setTracesFilters((prevFilters) => {
|
||||
const hostNameFilter = prevFilters.items.find(
|
||||
const hostNameFilter = prevFilters?.items?.find(
|
||||
(item) => item.key?.key === 'host.name',
|
||||
);
|
||||
|
||||
if (value.items.length > 0) {
|
||||
if (value?.items && value?.items?.length > 0) {
|
||||
logEvent(InfraMonitoringEvents.FilterApplied, {
|
||||
entity: InfraMonitoringEvents.HostEntity,
|
||||
view: InfraMonitoringEvents.TracesView,
|
||||
@@ -268,7 +276,7 @@ function HostMetricsDetails({
|
||||
op: 'AND',
|
||||
items: [
|
||||
hostNameFilter,
|
||||
...value.items.filter((item) => item.key?.key !== 'host.name'),
|
||||
...(value?.items?.filter((item) => item.key?.key !== 'host.name') || []),
|
||||
].filter((item): item is TagFilterItem => item !== undefined),
|
||||
};
|
||||
|
||||
@@ -305,7 +313,7 @@ function HostMetricsDetails({
|
||||
if (selectedView === VIEW_TYPES.LOGS) {
|
||||
const filtersWithoutPagination = {
|
||||
...logFilters,
|
||||
items: logFilters.items.filter((item) => item.key?.key !== 'id'),
|
||||
items: logFilters?.items?.filter((item) => item.key?.key !== 'id') || [],
|
||||
};
|
||||
|
||||
const compositeQuery = {
|
||||
@@ -356,6 +364,7 @@ function HostMetricsDetails({
|
||||
|
||||
const handleClose = (): void => {
|
||||
setSelectedInterval(selectedTime as Time);
|
||||
lastSelectedInterval.current = null;
|
||||
setSearchParams({});
|
||||
|
||||
if (selectedTime !== 'custom') {
|
||||
|
||||
@@ -52,14 +52,16 @@ function HostMetricLogsDetailedView({
|
||||
...currentQuery.builder.queryData[0].aggregateAttribute,
|
||||
},
|
||||
filters: {
|
||||
items: logFilters.items.filter((item) => item.key?.key !== 'host.name'),
|
||||
items:
|
||||
logFilters?.items?.filter((item) => item.key?.key !== 'host.name') ||
|
||||
[],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
[currentQuery, logFilters.items],
|
||||
[currentQuery, logFilters?.items],
|
||||
);
|
||||
|
||||
const query = updatedCurrentQuery?.builder?.queryData[0] || null;
|
||||
@@ -70,7 +72,7 @@ function HostMetricLogsDetailedView({
|
||||
<div className="filter-section">
|
||||
{query && (
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
query={query as IBuilderQuery}
|
||||
onChange={(value): void => handleChangeLogFilters(value, VIEWS.LOGS)}
|
||||
disableNavigationShortcuts
|
||||
/>
|
||||
|
||||
@@ -13,13 +13,15 @@ import {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { useMultiIntersectionObserver } from 'hooks/useMultiIntersectionObserver';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useQueries, UseQueryResult } from 'react-query';
|
||||
import { QueryFunctionContext, useQueries, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
@@ -53,6 +55,11 @@ function Metrics({
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.DOT_METRICS_ENABLED)
|
||||
?.active || false;
|
||||
|
||||
const {
|
||||
visibilities,
|
||||
setElement,
|
||||
} = useMultiIntersectionObserver(hostWidgetInfo.length, { threshold: 0.1 });
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
() =>
|
||||
getHostQueryPayload(
|
||||
@@ -65,17 +72,22 @@ function Metrics({
|
||||
);
|
||||
|
||||
const queries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryPayloads.map((payload, index) => ({
|
||||
queryKey: ['host-metrics', payload, ENTITY_VERSION_V4, 'HOST'],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, ENTITY_VERSION_V4),
|
||||
enabled: !!payload,
|
||||
queryFn: ({
|
||||
signal,
|
||||
}: QueryFunctionContext): Promise<
|
||||
SuccessResponse<MetricRangePayloadProps>
|
||||
> => GetMetricQueryRange(payload, ENTITY_VERSION_V4, signal),
|
||||
enabled: !!payload && visibilities[index],
|
||||
keepPreviousData: true,
|
||||
})),
|
||||
);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const dimensions = useResizeObserver(graphRef);
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const chartData = useMemo(
|
||||
() => queries.map(({ data }) => getUPlotChartData(data?.payload)),
|
||||
@@ -134,16 +146,24 @@ function Metrics({
|
||||
minTimeScale: graphTimeIntervals[idx].start,
|
||||
maxTimeScale: graphTimeIntervals[idx].end,
|
||||
onDragSelect: (start, end) => onDragSelect(start, end, idx),
|
||||
query: currentQuery,
|
||||
}),
|
||||
),
|
||||
[queries, isDarkMode, dimensions, graphTimeIntervals, onDragSelect],
|
||||
[
|
||||
queries,
|
||||
isDarkMode,
|
||||
dimensions,
|
||||
graphTimeIntervals,
|
||||
onDragSelect,
|
||||
currentQuery,
|
||||
],
|
||||
);
|
||||
|
||||
const renderCardContent = (
|
||||
query: UseQueryResult<SuccessResponse<MetricRangePayloadProps>, unknown>,
|
||||
idx: number,
|
||||
): JSX.Element => {
|
||||
if (query.isLoading) {
|
||||
if ((!query.data && query.isLoading) || !visibilities[idx]) {
|
||||
return <Skeleton />;
|
||||
}
|
||||
|
||||
@@ -181,7 +201,7 @@ function Metrics({
|
||||
</div>
|
||||
<Row gutter={24} className="host-metrics-container">
|
||||
{queries.map((query, idx) => (
|
||||
<Col span={12} key={hostWidgetInfo[idx].title}>
|
||||
<Col ref={setElement(idx)} span={12} key={hostWidgetInfo[idx].title}>
|
||||
<Typography.Text>{hostWidgetInfo[idx].title}</Typography.Text>
|
||||
<Card bordered className="host-metrics-card" ref={graphRef}>
|
||||
{renderCardContent(query, idx)}
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
.input-with-label {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
||||
border-radius: 2px 0px 0px 2px;
|
||||
|
||||
.label {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: 0.56px;
|
||||
|
||||
max-width: 150px;
|
||||
min-width: 60px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
|
||||
padding: 0px 8px;
|
||||
|
||||
border-radius: 2px 0px 0px 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
font-weight: var(--font-weight-light);
|
||||
}
|
||||
|
||||
.input {
|
||||
flex: 1;
|
||||
min-width: 150px;
|
||||
font-family: 'Space Mono', monospace !important;
|
||||
|
||||
border-radius: 2px 0px 0px 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
border-right: none;
|
||||
border-left: none;
|
||||
border-top-right-radius: 0px;
|
||||
border-bottom-right-radius: 0px;
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
border-radius: 0px 2px 2px 0px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
height: 38px;
|
||||
width: 38px;
|
||||
}
|
||||
|
||||
&.labelAfter {
|
||||
.input {
|
||||
border-radius: 0px 2px 2px 0px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
border-top-right-radius: 0px;
|
||||
border-bottom-right-radius: 0px;
|
||||
}
|
||||
|
||||
.label {
|
||||
border-left: none;
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.input-with-label {
|
||||
.label {
|
||||
color: var(--bg-ink-500) !important;
|
||||
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
}
|
||||
|
||||
.input {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
}
|
||||
|
||||
&.labelAfter {
|
||||
.input {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
74
frontend/src/components/InputWithLabel/InputWithLabel.tsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import './InputWithLabel.styles.scss';
|
||||
|
||||
import { Button, Input, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { X } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
|
||||
function InputWithLabel({
|
||||
label,
|
||||
initialValue,
|
||||
placeholder,
|
||||
type,
|
||||
onClose,
|
||||
labelAfter,
|
||||
onChange,
|
||||
className,
|
||||
closeIcon,
|
||||
}: {
|
||||
label: string;
|
||||
initialValue?: string | number;
|
||||
placeholder: string;
|
||||
type?: string;
|
||||
onClose?: () => void;
|
||||
labelAfter?: boolean;
|
||||
onChange: (value: string) => void;
|
||||
className?: string;
|
||||
closeIcon?: React.ReactNode;
|
||||
}): JSX.Element {
|
||||
const [inputValue, setInputValue] = useState<string>(
|
||||
initialValue ? initialValue.toString() : '',
|
||||
);
|
||||
|
||||
const handleChange = (e: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
setInputValue(e.target.value);
|
||||
onChange?.(e.target.value);
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx('input-with-label', className, {
|
||||
labelAfter,
|
||||
})}
|
||||
>
|
||||
{!labelAfter && <Typography.Text className="label">{label}</Typography.Text>}
|
||||
<Input
|
||||
className="input"
|
||||
placeholder={placeholder}
|
||||
type={type}
|
||||
value={inputValue}
|
||||
onChange={handleChange}
|
||||
name={label.toLowerCase()}
|
||||
/>
|
||||
{labelAfter && <Typography.Text className="label">{label}</Typography.Text>}
|
||||
{onClose && (
|
||||
<Button
|
||||
className="periscope-btn ghost close-btn"
|
||||
icon={closeIcon || <X size={16} />}
|
||||
onClick={onClose}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
InputWithLabel.defaultProps = {
|
||||
type: 'text',
|
||||
onClose: undefined,
|
||||
labelAfter: false,
|
||||
initialValue: undefined,
|
||||
className: undefined,
|
||||
closeIcon: undefined,
|
||||
};
|
||||
|
||||
export default InputWithLabel;
|
||||
@@ -20,3 +20,7 @@ export type LogDetailProps = {
|
||||
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
|
||||
Partial<Pick<ActionItemProps, 'onClickActionItem'>> &
|
||||
Pick<DrawerProps, 'onClose'>;
|
||||
|
||||
export type LogDetailInnerProps = LogDetailProps & {
|
||||
log: NonNullable<LogDetailProps['log']>;
|
||||
};
|
||||
|
||||
@@ -62,6 +62,10 @@
|
||||
box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
&-query-container {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.log-detail-drawer__log {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
|
||||
@@ -33,7 +33,6 @@ import {
|
||||
Compass,
|
||||
Copy,
|
||||
Filter,
|
||||
HardHat,
|
||||
Table,
|
||||
TextSelect,
|
||||
X,
|
||||
@@ -48,12 +47,15 @@ import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
||||
|
||||
import { RESOURCE_KEYS, VIEW_TYPES, VIEWS } from './constants';
|
||||
import { LogDetailProps } from './LogDetail.interfaces';
|
||||
import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper';
|
||||
import { LogDetailProps, LogDetailInnerProps } from './LogDetail.interfaces';
|
||||
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
||||
import useInitialQuery from 'container/LogsExplorerContext/useInitialQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { convertExpressionToFilters } from 'components/QueryBuilderV2/utils';
|
||||
|
||||
const convert = new Convert();
|
||||
|
||||
function LogDetail({
|
||||
function LogDetailInner({
|
||||
log,
|
||||
onClose,
|
||||
onAddToQuery,
|
||||
@@ -62,16 +64,19 @@ function LogDetail({
|
||||
selectedTab,
|
||||
isListViewPanel = false,
|
||||
listViewPanelSelectedFields,
|
||||
}: LogDetailProps): JSX.Element {
|
||||
}: LogDetailInnerProps): JSX.Element {
|
||||
const initialContextQuery = useInitialQuery(log);
|
||||
const [contextQuery, setContextQuery] = useState<Query | undefined>(
|
||||
initialContextQuery,
|
||||
);
|
||||
const [, copyToClipboard] = useCopyToClipboard();
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(selectedTab);
|
||||
|
||||
const [isFilterVisibile, setIsFilterVisible] = useState<boolean>(false);
|
||||
const [isFilterVisible, setIsFilterVisible] = useState<boolean>(false);
|
||||
|
||||
const [contextQuery, setContextQuery] = useState<Query | undefined>();
|
||||
const [filters, setFilters] = useState<TagFilter | null>(null);
|
||||
const [isEdit, setIsEdit] = useState<boolean>(false);
|
||||
const { initialDataSource, stagedQuery } = useQueryBuilder();
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
||||
@@ -81,7 +86,7 @@ function LogDetail({
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: initialDataSource || DataSource.LOGS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
@@ -104,7 +109,7 @@ function LogDetail({
|
||||
};
|
||||
|
||||
const handleFilterVisible = (): void => {
|
||||
setIsFilterVisible(!isFilterVisibile);
|
||||
setIsFilterVisible(!isFilterVisible);
|
||||
setIsEdit(!isEdit);
|
||||
};
|
||||
|
||||
@@ -142,6 +147,46 @@ function LogDetail({
|
||||
safeNavigate(`${ROUTES.LOGS_EXPLORER}?${urlQuery.toString()}`);
|
||||
};
|
||||
|
||||
const handleRunQuery = (expression: string): void => {
|
||||
let updatedContextQuery = cloneDeep(contextQuery);
|
||||
|
||||
if (!updatedContextQuery || !updatedContextQuery.builder) {
|
||||
return;
|
||||
}
|
||||
|
||||
const newFilters: TagFilter = {
|
||||
items: expression ? convertExpressionToFilters(expression) : [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
updatedContextQuery = {
|
||||
...updatedContextQuery,
|
||||
builder: {
|
||||
...updatedContextQuery?.builder,
|
||||
queryData: updatedContextQuery?.builder.queryData.map((queryData) => {
|
||||
return {
|
||||
...queryData,
|
||||
filter: {
|
||||
...queryData.filter,
|
||||
expression: expression,
|
||||
},
|
||||
filters: {
|
||||
...queryData.filters,
|
||||
...newFilters,
|
||||
op: queryData.filters?.op ?? 'AND',
|
||||
},
|
||||
};
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
setContextQuery(updatedContextQuery);
|
||||
|
||||
if (newFilters) {
|
||||
setFilters(newFilters);
|
||||
}
|
||||
};
|
||||
|
||||
// Only show when opened from infra monitoring page
|
||||
const showOpenInExplorerBtn = useMemo(
|
||||
() => location.pathname?.includes('/infrastructure-monitoring'),
|
||||
@@ -149,11 +194,6 @@ function LogDetail({
|
||||
[],
|
||||
);
|
||||
|
||||
if (!log) {
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <></>;
|
||||
}
|
||||
|
||||
const logType = log?.attributes_string?.log_level || LogType.INFO;
|
||||
|
||||
return (
|
||||
@@ -269,18 +309,16 @@ function LogDetail({
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<QueryBuilderSearchWrapper
|
||||
isEdit={isEdit}
|
||||
log={log}
|
||||
filters={filters}
|
||||
setContextQuery={setContextQuery}
|
||||
setFilters={setFilters}
|
||||
contextQuery={contextQuery}
|
||||
suffixIcon={
|
||||
<HardHat size={12} style={{ paddingRight: Spacing.PADDING_2 }} />
|
||||
}
|
||||
/>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
<QuerySearch
|
||||
onChange={() => {}}
|
||||
dataSource={DataSource.LOGS}
|
||||
queryData={contextQuery?.builder.queryData[0]}
|
||||
onRun={handleRunQuery}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.OVERVIEW && (
|
||||
<Overview
|
||||
@@ -316,4 +354,13 @@ function LogDetail({
|
||||
);
|
||||
}
|
||||
|
||||
function LogDetail(props: LogDetailProps): JSX.Element {
|
||||
if (!props.log) {
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return <LogDetailInner {...(props as LogDetailInnerProps)} />;
|
||||
}
|
||||
|
||||
export default LogDetail;
|
||||
|
||||
@@ -15,6 +15,7 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties {
|
||||
letterSpacing: '-0.07px',
|
||||
marginBottom: '0px',
|
||||
minWidth: '10rem',
|
||||
width: '10rem',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -47,6 +47,14 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
|
||||
const bodyColumnStyle = useMemo(
|
||||
() => ({
|
||||
...defaultTableStyle,
|
||||
...(fields.length > 2 ? { width: '50rem' } : {}),
|
||||
}),
|
||||
[fields.length],
|
||||
);
|
||||
|
||||
const columns: ColumnsType<Record<string, unknown>> = useMemo(() => {
|
||||
const fieldColumns: ColumnsType<Record<string, unknown>> = fields
|
||||
.filter((e) => !['id', 'body', 'timestamp'].includes(e.name))
|
||||
@@ -136,7 +144,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
field: string | number,
|
||||
): ColumnTypeRender<Record<string, unknown>> => ({
|
||||
props: {
|
||||
style: defaultTableStyle,
|
||||
style: bodyColumnStyle,
|
||||
},
|
||||
children: (
|
||||
<TableBodyContent
|
||||
@@ -166,6 +174,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => {
|
||||
linesPerRow,
|
||||
fontSize,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
bodyColumnStyle,
|
||||
]);
|
||||
|
||||
return { columns, dataSource: flattenLogData };
|
||||
|
||||
@@ -410,18 +410,18 @@ export default function LogsFormatOptionsMenu({
|
||||
)}
|
||||
|
||||
<div className="column-format">
|
||||
{addColumn?.value?.map(({ key, id }) => (
|
||||
<div className="column-name" key={id}>
|
||||
{addColumn?.value?.map(({ name }) => (
|
||||
<div className="column-name" key={name}>
|
||||
<div className="name">
|
||||
<Tooltip placement="left" title={key}>
|
||||
{key}
|
||||
<Tooltip placement="left" title={name}>
|
||||
{name}
|
||||
</Tooltip>
|
||||
</div>
|
||||
{addColumn?.value?.length > 1 && (
|
||||
<X
|
||||
className="delete-btn"
|
||||
size={14}
|
||||
onClick={(): void => addColumn.onRemove(id as string)}
|
||||
onClick={(): void => addColumn.onRemove(name)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
.order-by-loading-container {
|
||||
padding: 4px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||