Compare commits
22 Commits
SIG-5270
...
chore/filt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
44080a1d59 | ||
|
|
554c498209 | ||
|
|
156de83626 | ||
|
|
7c4a18687a | ||
|
|
7214f51e98 | ||
|
|
290e0754c6 | ||
|
|
0ea16f9472 | ||
|
|
0d773211af | ||
|
|
7028031e01 | ||
|
|
2a4407280d | ||
|
|
8c75fb29a6 | ||
|
|
85ea6105f8 | ||
|
|
dc8fba6944 | ||
|
|
97bbc95aab | ||
|
|
fbcb17006d | ||
|
|
d642b69f8e | ||
|
|
7230069de6 | ||
|
|
80fff10273 | ||
|
|
39a6e3865e | ||
|
|
492e249c29 | ||
|
|
d68affd1d6 | ||
|
|
7bad6d5377 |
1
.github/workflows/build-enterprise.yaml
vendored
1
.github/workflows/build-enterprise.yaml
vendored
@@ -107,6 +107,7 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
|
||||
1
.github/workflows/build-staging.yaml
vendored
1
.github/workflows/build-staging.yaml
vendored
@@ -106,6 +106,7 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
|
||||
1
.github/workflows/integrationci.yaml
vendored
1
.github/workflows/integrationci.yaml
vendored
@@ -17,7 +17,6 @@ jobs:
|
||||
- bootstrap
|
||||
- passwordauthn
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- querier
|
||||
- ttl
|
||||
sqlstore-provider:
|
||||
|
||||
@@ -31,6 +31,7 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -76,7 +77,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
@@ -185,37 +186,48 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
return cloudIntegrationUser, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, string, *basemodel.ApiError,
|
||||
) {
|
||||
// TODO: remove this struct from here
|
||||
url := fmt.Sprintf(
|
||||
"%s%s",
|
||||
strings.TrimSuffix(constants.ZeusURL, "/"),
|
||||
"/v2/deployments/me",
|
||||
)
|
||||
|
||||
type deploymentResponse struct {
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
Status string `json:"status"`
|
||||
Error string `json:"error"`
|
||||
Data struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
resp, apiErr := requestAndParseResponse[deploymentResponse](
|
||||
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
return "", "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't query for deployment info",
|
||||
)
|
||||
}
|
||||
|
||||
if resp.Status != "success" {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
"couldn't query for deployment info: status: %s, error: %s",
|
||||
resp.Status, resp.Error,
|
||||
))
|
||||
}
|
||||
|
||||
resp := new(deploymentResponse)
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Name
|
||||
regionDns := resp.Data.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Data.Name
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
|
||||
@@ -10,6 +10,9 @@ var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
|
||||
// this is set via build time variable
|
||||
var ZeusURL = "https://api.signoz.cloud"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
if len(v) == 0 {
|
||||
|
||||
157
ee/sqlstore/postgressqlstore/formatter.go
Normal file
157
ee/sqlstore/postgressqlstore/formatter.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgres(path)...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_typeof("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = 'array'"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements_text("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + "::text")
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + ".key")
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_agg("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
if len(values) == 0 {
|
||||
return []byte("jsonb_build_array()")
|
||||
}
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_build_array("...)
|
||||
for i, v := range values {
|
||||
if i > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = append(sql, '\'')
|
||||
sql = append(sql, v...)
|
||||
sql = append(sql, '\'')
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, "::jsonb"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) string {
|
||||
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) string {
|
||||
path := strings.TrimPrefix(jsonPath, "$")
|
||||
if path == "" || path == "." {
|
||||
return ""
|
||||
}
|
||||
|
||||
parts := strings.Split(strings.TrimPrefix(path, "."), ".")
|
||||
if len(parts) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
var result strings.Builder
|
||||
|
||||
for i, part := range parts {
|
||||
if i < len(parts)-1 {
|
||||
result.WriteString("->")
|
||||
result.WriteString("'")
|
||||
result.WriteString(part)
|
||||
result.WriteString("'")
|
||||
} else {
|
||||
if asText {
|
||||
result.WriteString("->>")
|
||||
} else {
|
||||
result.WriteString("->")
|
||||
}
|
||||
result.WriteString("'")
|
||||
result.WriteString(part)
|
||||
result.WriteString("'")
|
||||
}
|
||||
}
|
||||
|
||||
return result.String()
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
488
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
488
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
@@ -0,0 +1,488 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `"data"->>'field'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `"metadata"->'user'->>'name'`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.level1.level2.level3",
|
||||
expected: `"json_col"->'level1'->'level2'->>'level3'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `"json_col"`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `"data"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `jsonb_typeof("data"->'field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col")`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `jsonb_typeof("data"->'items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col") = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `jsonb_each("metadata"->'object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "jsonb_agg(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "jsonb_agg(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "data->>'field'",
|
||||
expected: "jsonb_agg(data->>'field')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "jsonb_build_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "jsonb_build_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "jsonb_build_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonPath string
|
||||
asText bool
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path as text",
|
||||
jsonPath: "$.field",
|
||||
asText: true,
|
||||
expected: "->>'field'",
|
||||
},
|
||||
{
|
||||
name: "simple path as json",
|
||||
jsonPath: "$.field",
|
||||
asText: false,
|
||||
expected: "->'field'",
|
||||
},
|
||||
{
|
||||
name: "nested path as text",
|
||||
jsonPath: "$.user.name",
|
||||
asText: true,
|
||||
expected: "->'user'->>'name'",
|
||||
},
|
||||
{
|
||||
name: "nested path as json",
|
||||
jsonPath: "$.user.name",
|
||||
asText: false,
|
||||
expected: "->'user'->'name'",
|
||||
},
|
||||
{
|
||||
name: "deeply nested as text",
|
||||
jsonPath: "$.a.b.c.d",
|
||||
asText: true,
|
||||
expected: "->'a'->'b'->'c'->>'d'",
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
jsonPath: "$",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
jsonPath: "",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New()).(*formatter)
|
||||
got := f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"::jsonb`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
expr: "name",
|
||||
expected: "lower(name)",
|
||||
},
|
||||
{
|
||||
name: "quoted column identifier",
|
||||
expr: `"column_name"`,
|
||||
expected: `lower("column_name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb text extraction",
|
||||
expr: "data->>'field'",
|
||||
expected: "lower(data->>'field')",
|
||||
},
|
||||
{
|
||||
name: "nested jsonb extraction",
|
||||
expr: "metadata->'user'->>'name'",
|
||||
expected: "lower(metadata->'user'->>'name')",
|
||||
},
|
||||
{
|
||||
name: "jsonb_typeof expression",
|
||||
expr: "jsonb_typeof(data->'field')",
|
||||
expected: "lower(jsonb_typeof(data->'field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
{
|
||||
name: "subquery column",
|
||||
expr: "users.email",
|
||||
expected: "lower(users.email)",
|
||||
},
|
||||
{
|
||||
name: "quoted identifier with special chars",
|
||||
expr: `"user-name"`,
|
||||
expected: `lower("user-name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb to text cast",
|
||||
expr: "data::text",
|
||||
expected: "lower(data::text)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ package postgressqlstore
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -15,10 +14,11 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -55,11 +55,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
|
||||
sqldb := stdlib.OpenDBFromPool(pool)
|
||||
|
||||
pgDialect := pgdialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
dialect: new(dialect),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks),
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -75,6 +78,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetSpanPercentilesProps,
|
||||
GetSpanPercentilesResponseDataProps,
|
||||
} from 'types/api/trace/getSpanPercentiles';
|
||||
|
||||
const getSpanPercentiles = async (
|
||||
props: GetSpanPercentilesProps,
|
||||
): Promise<SuccessResponseV2<GetSpanPercentilesResponseDataProps>> => {
|
||||
try {
|
||||
const response = await ApiBaseInstance.post('/span_percentile', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export default getSpanPercentiles;
|
||||
@@ -132,9 +132,9 @@
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.log-detail-drawer__actions {
|
||||
.json-action-btn {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -319,35 +319,31 @@ function LogDetailInner({
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
<div className="log-detail-drawer__actions">
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Tooltip
|
||||
title="Show Filters"
|
||||
placement="topLeft"
|
||||
aria-label="Show Filters"
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<Tooltip
|
||||
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
|
||||
placement="topLeft"
|
||||
aria-label={
|
||||
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
|
||||
}
|
||||
>
|
||||
{selectedView === VIEW_TYPES.JSON && (
|
||||
<div className="json-action-btn">
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
|
||||
onClick={handleJSONCopy}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Tooltip title="Copy Log Link" placement="left" aria-label="Copy Log Link">
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={onLogCopy}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
@@ -387,8 +383,7 @@ function LogDetailInner({
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
timestamp={log.timestamp.toString()}
|
||||
dataSource={DataSource.LOGS}
|
||||
logLineTimestamp={log.timestamp.toString()}
|
||||
/>
|
||||
)}
|
||||
</Drawer>
|
||||
|
||||
@@ -398,7 +398,7 @@
|
||||
}
|
||||
|
||||
.qb-search-container {
|
||||
.metrics-container {
|
||||
.metrics-select-container {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,8 +22,6 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
showOnlyWhereClause = false,
|
||||
showTraceOperator = false,
|
||||
version,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: QueryBuilderProps): JSX.Element {
|
||||
const {
|
||||
currentQuery,
|
||||
@@ -177,8 +175,6 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
/>
|
||||
) : (
|
||||
currentQuery.builder.queryData.map((query, index) => (
|
||||
@@ -197,9 +193,7 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
signalSource={query.source as 'meter' | ''}
|
||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
signalSource={config?.signalSource || ''}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
|
||||
@@ -1,14 +1,5 @@
|
||||
.metrics-source-select-container {
|
||||
.metrics-select-container {
|
||||
margin-bottom: 8px;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
|
||||
.source-selector {
|
||||
width: 120px;
|
||||
}
|
||||
|
||||
.ant-select-selector {
|
||||
width: 100%;
|
||||
@@ -51,7 +42,7 @@
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.metrics-source-select-container {
|
||||
.metrics-select-container {
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
@@ -1,39 +1,21 @@
|
||||
import './MetricsSelect.styles.scss';
|
||||
|
||||
import { Select } from 'antd';
|
||||
import {
|
||||
initialQueriesMap,
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { AggregatorFilter } from 'container/QueryBuilder/filters';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { memo, useCallback, useMemo, useState } from 'react';
|
||||
import { memo, useCallback, useState } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { SelectOption } from 'types/common/select';
|
||||
|
||||
export const SOURCE_OPTIONS: SelectOption<string, string>[] = [
|
||||
{ value: 'metrics', label: 'Metrics' },
|
||||
{ value: 'meter', label: 'Meter' },
|
||||
];
|
||||
|
||||
export const MetricsSelect = memo(function MetricsSelect({
|
||||
query,
|
||||
index,
|
||||
version,
|
||||
signalSource,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
index: number;
|
||||
version: string;
|
||||
signalSource: 'meter' | '';
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
}): JSX.Element {
|
||||
const [attributeKeys, setAttributeKeys] = useState<BaseAutocompleteData[]>([]);
|
||||
|
||||
@@ -49,67 +31,8 @@ export const MetricsSelect = memo(function MetricsSelect({
|
||||
},
|
||||
[handleChangeAggregatorAttribute, attributeKeys],
|
||||
);
|
||||
|
||||
const { updateAllQueriesOperators, handleSetQueryData } = useQueryBuilder();
|
||||
|
||||
const source = useMemo(
|
||||
() => (signalSource === 'meter' ? 'meter' : 'metrics'),
|
||||
[signalSource],
|
||||
);
|
||||
|
||||
const defaultMeterQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'meter' as 'meter' | '',
|
||||
),
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const defaultMetricsQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueriesMap.metrics,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'',
|
||||
),
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const handleSignalSourceChange = (value: string): void => {
|
||||
onSignalSourceChange(value);
|
||||
handleSetQueryData(
|
||||
index,
|
||||
value === 'meter'
|
||||
? {
|
||||
...defaultMeterQuery.builder.queryData[0],
|
||||
source: 'meter',
|
||||
queryName: query.queryName,
|
||||
}
|
||||
: {
|
||||
...defaultMetricsQuery.builder.queryData[0],
|
||||
source: '',
|
||||
queryName: query.queryName,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="metrics-source-select-container">
|
||||
{signalSourceChangeEnabled && (
|
||||
<Select
|
||||
className="source-selector"
|
||||
placeholder="Source"
|
||||
options={SOURCE_OPTIONS}
|
||||
value={source}
|
||||
defaultValue="metrics"
|
||||
onChange={handleSignalSourceChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="metrics-select-container">
|
||||
<AggregatorFilter
|
||||
onChange={handleAggregatorAttributeChange}
|
||||
query={query}
|
||||
|
||||
@@ -33,13 +33,7 @@ export const QueryV2 = memo(function QueryV2({
|
||||
showOnlyWhereClause = false,
|
||||
signalSource = '',
|
||||
isMultiQueryAllowed = false,
|
||||
onSignalSourceChange,
|
||||
signalSourceChangeEnabled = false,
|
||||
}: QueryProps & {
|
||||
ref: React.RefObject<HTMLDivElement>;
|
||||
onSignalSourceChange: (value: string) => void;
|
||||
signalSourceChangeEnabled: boolean;
|
||||
}): JSX.Element {
|
||||
}: QueryProps & { ref: React.RefObject<HTMLDivElement> }): JSX.Element {
|
||||
const { cloneQuery, panelType } = useQueryBuilder();
|
||||
|
||||
const showFunctions = query?.functions?.length > 0;
|
||||
@@ -213,14 +207,12 @@ export const QueryV2 = memo(function QueryV2({
|
||||
<div className="qb-elements-container">
|
||||
<div className="qb-search-container">
|
||||
{dataSource === DataSource.METRICS && (
|
||||
<div className="metrics-container">
|
||||
<div className="metrics-select-container">
|
||||
<MetricsSelect
|
||||
query={query}
|
||||
index={index}
|
||||
version={ENTITY_VERSION_V5}
|
||||
signalSource={signalSource as 'meter' | ''}
|
||||
onSignalSourceChange={onSignalSourceChange}
|
||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -266,7 +258,7 @@ export const QueryV2 = memo(function QueryV2({
|
||||
panelType={panelType}
|
||||
query={query}
|
||||
index={index}
|
||||
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}-${signalSource}`}
|
||||
key={`metrics-aggregate-section-${query.queryName}-${query.dataSource}`}
|
||||
version="v4"
|
||||
signalSource={signalSource as 'meter' | ''}
|
||||
/>
|
||||
|
||||
@@ -24,7 +24,6 @@ export const DATE_TIME_FORMATS = {
|
||||
TIME_SECONDS: 'HH:mm:ss',
|
||||
TIME_UTC: 'HH:mm:ss (UTC Z)',
|
||||
TIME_UTC_MS: 'HH:mm:ss.SSS (UTC Z)',
|
||||
TIME_SPAN_PERCENTILE: 'HH:mm:ss MMM DD',
|
||||
|
||||
// Short date formats
|
||||
DATE_SHORT: 'MM/DD',
|
||||
|
||||
@@ -90,7 +90,4 @@ export const REACT_QUERY_KEY = {
|
||||
|
||||
// Routing Policies Query Keys
|
||||
GET_ROUTING_POLICIES: 'GET_ROUTING_POLICIES',
|
||||
|
||||
// Span Percentiles Query Keys
|
||||
GET_SPAN_PERCENTILES: 'GET_SPAN_PERCENTILES',
|
||||
} as const;
|
||||
|
||||
@@ -3,5 +3,4 @@ export const USER_PREFERENCES = {
|
||||
NAV_SHORTCUTS: 'nav_shortcuts',
|
||||
LAST_SEEN_CHANGELOG_VERSION: 'last_seen_changelog_version',
|
||||
SPAN_DETAILS_PINNED_ATTRIBUTES: 'span_details_pinned_attributes',
|
||||
SPAN_PERCENTILE_RESOURCE_ATTRIBUTES: 'span_percentile_resource_attributes',
|
||||
};
|
||||
|
||||
@@ -11,14 +11,12 @@ import { v4 } from 'uuid';
|
||||
|
||||
import { useCreateAlertState } from '../context';
|
||||
import {
|
||||
INITIAL_EVALUATION_WINDOW_STATE,
|
||||
INITIAL_INFO_THRESHOLD,
|
||||
INITIAL_RANDOM_THRESHOLD,
|
||||
INITIAL_WARNING_THRESHOLD,
|
||||
THRESHOLD_MATCH_TYPE_OPTIONS,
|
||||
THRESHOLD_OPERATOR_OPTIONS,
|
||||
} from '../context/constants';
|
||||
import { AlertThresholdMatchType } from '../context/types';
|
||||
import EvaluationSettings from '../EvaluationSettings/EvaluationSettings';
|
||||
import ThresholdItem from './ThresholdItem';
|
||||
import { AnomalyAndThresholdProps, UpdateThreshold } from './types';
|
||||
@@ -40,12 +38,12 @@ function AlertThreshold({
|
||||
alertState,
|
||||
thresholdState,
|
||||
setThresholdState,
|
||||
setEvaluationWindow,
|
||||
notificationSettings,
|
||||
setNotificationSettings,
|
||||
} = useCreateAlertState();
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const queryNames = getQueryNames(currentQuery);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -162,54 +160,6 @@ function AlertThreshold({
|
||||
}),
|
||||
);
|
||||
|
||||
const handleSetEvaluationDetailsForMeter = (): void => {
|
||||
setEvaluationWindow({
|
||||
type: 'SET_INITIAL_STATE_FOR_METER',
|
||||
});
|
||||
|
||||
setThresholdState({
|
||||
type: 'SET_MATCH_TYPE',
|
||||
payload: AlertThresholdMatchType.IN_TOTAL,
|
||||
});
|
||||
};
|
||||
|
||||
const handleSelectedQueryChange = (value: string): void => {
|
||||
// loop through currenttQuery and find the query that matches the selected query
|
||||
const query = currentQuery?.builder?.queryData.find(
|
||||
(query) => query.queryName === value,
|
||||
);
|
||||
|
||||
const currentSelectedQuery = currentQuery?.builder?.queryData.find(
|
||||
(query) => query.queryName === thresholdState.selectedQuery,
|
||||
);
|
||||
|
||||
const newSelectedQuerySource = query?.source || '';
|
||||
const currentSelectedQuerySource = currentSelectedQuery?.source || '';
|
||||
|
||||
if (newSelectedQuerySource === currentSelectedQuerySource) {
|
||||
setThresholdState({
|
||||
type: 'SET_SELECTED_QUERY',
|
||||
payload: value,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (newSelectedQuerySource === 'meter') {
|
||||
handleSetEvaluationDetailsForMeter();
|
||||
} else {
|
||||
setEvaluationWindow({
|
||||
type: 'SET_INITIAL_STATE',
|
||||
payload: INITIAL_EVALUATION_WINDOW_STATE,
|
||||
});
|
||||
}
|
||||
|
||||
setThresholdState({
|
||||
type: 'SET_SELECTED_QUERY',
|
||||
payload: value,
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={classNames(
|
||||
@@ -225,7 +175,12 @@ function AlertThreshold({
|
||||
</Typography.Text>
|
||||
<Select
|
||||
value={thresholdState.selectedQuery}
|
||||
onChange={handleSelectedQueryChange}
|
||||
onChange={(value): void => {
|
||||
setThresholdState({
|
||||
type: 'SET_SELECTED_QUERY',
|
||||
payload: value,
|
||||
});
|
||||
}}
|
||||
style={{ width: 80 }}
|
||||
options={queryNames}
|
||||
data-testid="alert-threshold-query-select"
|
||||
|
||||
@@ -10,7 +10,6 @@ import { getEvaluationWindowTypeText, getTimeframeText } from './utils';
|
||||
|
||||
function EvaluationSettings(): JSX.Element {
|
||||
const { evaluationWindow, setEvaluationWindow } = useCreateAlertState();
|
||||
|
||||
const [
|
||||
isEvaluationWindowPopoverOpen,
|
||||
setIsEvaluationWindowPopoverOpen,
|
||||
|
||||
@@ -24,11 +24,7 @@ import {
|
||||
INITIAL_EVALUATION_WINDOW_STATE,
|
||||
INITIAL_NOTIFICATION_SETTINGS_STATE,
|
||||
} from './constants';
|
||||
import {
|
||||
AlertThresholdMatchType,
|
||||
ICreateAlertContextProps,
|
||||
ICreateAlertProviderProps,
|
||||
} from './types';
|
||||
import { ICreateAlertContextProps, ICreateAlertProviderProps } from './types';
|
||||
import {
|
||||
advancedOptionsReducer,
|
||||
alertCreationReducer,
|
||||
@@ -71,7 +67,6 @@ export function CreateAlertProvider(
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const location = useLocation();
|
||||
const queryParams = new URLSearchParams(location.search);
|
||||
const thresholdsFromURL = queryParams.get(QueryParams.thresholds);
|
||||
|
||||
const [alertType, setAlertType] = useState<AlertTypes>(() => {
|
||||
if (isEditMode) {
|
||||
@@ -127,28 +122,7 @@ export function CreateAlertProvider(
|
||||
setThresholdState({
|
||||
type: 'RESET',
|
||||
});
|
||||
|
||||
if (thresholdsFromURL) {
|
||||
try {
|
||||
const thresholds = JSON.parse(thresholdsFromURL);
|
||||
setThresholdState({
|
||||
type: 'SET_THRESHOLDS',
|
||||
payload: thresholds,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error parsing thresholds from URL:', error);
|
||||
}
|
||||
|
||||
setEvaluationWindow({
|
||||
type: 'SET_INITIAL_STATE_FOR_METER',
|
||||
});
|
||||
|
||||
setThresholdState({
|
||||
type: 'SET_MATCH_TYPE',
|
||||
payload: AlertThresholdMatchType.IN_TOTAL,
|
||||
});
|
||||
}
|
||||
}, [alertType, thresholdsFromURL]);
|
||||
}, [alertType]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isEditMode && initialAlertState) {
|
||||
|
||||
@@ -237,7 +237,6 @@ export type EvaluationWindowAction =
|
||||
}
|
||||
| { type: 'SET_EVALUATION_CADENCE_MODE'; payload: EvaluationCadenceMode }
|
||||
| { type: 'SET_INITIAL_STATE'; payload: EvaluationWindowState }
|
||||
| { type: 'SET_INITIAL_STATE_FOR_METER' }
|
||||
| { type: 'RESET' };
|
||||
|
||||
export type EvaluationCadenceMode = 'default' | 'custom' | 'rrule';
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { UTC_TIMEZONE } from 'components/CustomTimePicker/timezoneUtils';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
alertDefaults,
|
||||
@@ -13,7 +11,6 @@ import { AlertDef } from 'types/api/alerts/def';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { CumulativeWindowTimeframes } from '../EvaluationSettings/types';
|
||||
import {
|
||||
INITIAL_ADVANCED_OPTIONS_STATE,
|
||||
INITIAL_ALERT_STATE,
|
||||
@@ -213,18 +210,6 @@ export const evaluationWindowReducer = (
|
||||
return INITIAL_EVALUATION_WINDOW_STATE;
|
||||
case 'SET_INITIAL_STATE':
|
||||
return action.payload;
|
||||
case 'SET_INITIAL_STATE_FOR_METER':
|
||||
return {
|
||||
...state,
|
||||
windowType: 'cumulative',
|
||||
timeframe: CumulativeWindowTimeframes.CURRENT_DAY,
|
||||
startingAt: {
|
||||
time: '00:00:00',
|
||||
number: '0',
|
||||
timezone: UTC_TIMEZONE.value,
|
||||
unit: UniversalYAxisUnit.MINUTES,
|
||||
},
|
||||
};
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
|
||||
@@ -36,7 +36,6 @@ function QuerySection({
|
||||
// init namespace for translations
|
||||
const { t } = useTranslation('alerts');
|
||||
const [currentTab, setCurrentTab] = useState(queryCategory);
|
||||
const [signalSource, setSignalSource] = useState<string>('metrics');
|
||||
|
||||
const handleQueryCategoryChange = (queryType: string): void => {
|
||||
setQueryCategory(queryType as EQueryType);
|
||||
@@ -49,17 +48,12 @@ function QuerySection({
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const handleSignalSourceChange = (value: string): void => {
|
||||
setSignalSource(value);
|
||||
};
|
||||
|
||||
const renderMetricUI = (): JSX.Element => (
|
||||
<QueryBuilderV2
|
||||
panelType={panelType}
|
||||
config={{
|
||||
queryVariant: 'static',
|
||||
initialDataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||
signalSource: signalSource === 'meter' ? 'meter' : '',
|
||||
}}
|
||||
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
|
||||
showFunctions={
|
||||
@@ -68,8 +62,6 @@ function QuerySection({
|
||||
alertType === AlertTypes.LOGS_BASED_ALERT
|
||||
}
|
||||
version={alertDef.version || 'v3'}
|
||||
onSignalSourceChange={handleSignalSourceChange}
|
||||
signalSourceChangeEnabled
|
||||
/>
|
||||
);
|
||||
|
||||
|
||||
@@ -54,34 +54,16 @@ function GraphManager({
|
||||
|
||||
const labelClickedHandler = useCallback(
|
||||
(labelIndex: number): void => {
|
||||
if (labelIndex < 0 || labelIndex >= graphsVisibilityStates.length) return;
|
||||
const newGraphVisibilityStates = Array<boolean>(data.length).fill(false);
|
||||
newGraphVisibilityStates[labelIndex] = true;
|
||||
|
||||
const newGraphVisibilityStates = [...graphsVisibilityStates];
|
||||
const isCurrentlyVisible = newGraphVisibilityStates[labelIndex];
|
||||
const visibleCount = newGraphVisibilityStates.filter(Boolean).length;
|
||||
|
||||
if (isCurrentlyVisible && visibleCount === 1) {
|
||||
newGraphVisibilityStates.fill(true);
|
||||
} else if (isCurrentlyVisible) {
|
||||
newGraphVisibilityStates.fill(false);
|
||||
newGraphVisibilityStates[labelIndex] = true;
|
||||
} else {
|
||||
newGraphVisibilityStates[labelIndex] = true;
|
||||
}
|
||||
|
||||
// Update all graphs based on new state
|
||||
newGraphVisibilityStates.forEach((state, index) => {
|
||||
lineChartRef?.current?.toggleGraph(index, state);
|
||||
parentChartRef?.current?.toggleGraph(index, state);
|
||||
});
|
||||
setGraphsVisibilityStates(newGraphVisibilityStates);
|
||||
},
|
||||
[
|
||||
graphsVisibilityStates,
|
||||
lineChartRef,
|
||||
parentChartRef,
|
||||
setGraphsVisibilityStates,
|
||||
],
|
||||
[data.length, lineChartRef, parentChartRef, setGraphsVisibilityStates],
|
||||
);
|
||||
|
||||
const columns = getGraphManagerTableColumns({
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { CheckboxChangeEvent } from 'antd/es/checkbox';
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
|
||||
import { ColumnsKeyAndDataIndex, ColumnsTitle } from '../contants';
|
||||
import { DataSetProps, ExtendedChartDataset } from '../types';
|
||||
@@ -8,20 +7,6 @@ import { getGraphManagerTableHeaderTitle } from '../utils';
|
||||
import CustomCheckBox from './CustomCheckBox';
|
||||
import { getLabel } from './GetLabel';
|
||||
|
||||
// Helper function to format numeric values based on yAxisUnit
|
||||
const formatMetricValue = (
|
||||
value: number | null | undefined,
|
||||
yAxisUnit?: string,
|
||||
): string => {
|
||||
if (value == null || value === undefined || Number.isNaN(value)) {
|
||||
return '';
|
||||
}
|
||||
if (yAxisUnit) {
|
||||
return getYAxisFormattedValue(value.toString(), yAxisUnit);
|
||||
}
|
||||
return value.toString();
|
||||
};
|
||||
|
||||
export const getGraphManagerTableColumns = ({
|
||||
tableDataSet,
|
||||
checkBoxOnChangeHandler,
|
||||
@@ -60,7 +45,6 @@ export const getGraphManagerTableColumns = ({
|
||||
width: 90,
|
||||
dataIndex: ColumnsKeyAndDataIndex.Avg,
|
||||
key: ColumnsKeyAndDataIndex.Avg,
|
||||
render: (value: number): string => formatMetricValue(value, yAxisUnit),
|
||||
},
|
||||
{
|
||||
title: getGraphManagerTableHeaderTitle(
|
||||
@@ -70,7 +54,6 @@ export const getGraphManagerTableColumns = ({
|
||||
width: 90,
|
||||
dataIndex: ColumnsKeyAndDataIndex.Sum,
|
||||
key: ColumnsKeyAndDataIndex.Sum,
|
||||
render: (value: number): string => formatMetricValue(value, yAxisUnit),
|
||||
},
|
||||
{
|
||||
title: getGraphManagerTableHeaderTitle(
|
||||
@@ -80,7 +63,6 @@ export const getGraphManagerTableColumns = ({
|
||||
width: 90,
|
||||
dataIndex: ColumnsKeyAndDataIndex.Max,
|
||||
key: ColumnsKeyAndDataIndex.Max,
|
||||
render: (value: number): string => formatMetricValue(value, yAxisUnit),
|
||||
},
|
||||
{
|
||||
title: getGraphManagerTableHeaderTitle(
|
||||
@@ -90,11 +72,10 @@ export const getGraphManagerTableColumns = ({
|
||||
width: 90,
|
||||
dataIndex: ColumnsKeyAndDataIndex.Min,
|
||||
key: ColumnsKeyAndDataIndex.Min,
|
||||
render: (value: number): string => formatMetricValue(value, yAxisUnit),
|
||||
},
|
||||
];
|
||||
|
||||
export interface GetGraphManagerTableColumnsProps {
|
||||
interface GetGraphManagerTableColumnsProps {
|
||||
tableDataSet: ExtendedChartDataset[];
|
||||
checkBoxOnChangeHandler: (e: CheckboxChangeEvent, index: number) => void;
|
||||
labelClickedHandler: (labelIndex: number) => void;
|
||||
|
||||
@@ -324,7 +324,6 @@ function FullView({
|
||||
panelType={selectedPanelType}
|
||||
version={selectedDashboard?.data?.version || 'v3'}
|
||||
isListViewPanel={selectedPanelType === PANEL_TYPES.LIST}
|
||||
signalSourceChangeEnabled
|
||||
// filterConfigs={filterConfigs}
|
||||
// queryComponents={queryComponents}
|
||||
/>
|
||||
|
||||
@@ -1,338 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { render, screen, userEvent } from 'tests/test-utils';
|
||||
|
||||
import GraphManager from '../GridCard/FullView/GraphManager';
|
||||
import {
|
||||
getGraphManagerTableColumns,
|
||||
GetGraphManagerTableColumnsProps,
|
||||
} from '../GridCard/FullView/TableRender/GraphManagerColumns';
|
||||
import { GraphManagerProps } from '../GridCard/FullView/types';
|
||||
|
||||
// Props
|
||||
const props = {
|
||||
tableDataSet: [
|
||||
{
|
||||
label: 'Timestamp',
|
||||
stroke: 'purple',
|
||||
index: 0,
|
||||
show: true,
|
||||
sum: 52791867900,
|
||||
avg: 1759728930,
|
||||
max: 1759729800,
|
||||
min: 1759728060,
|
||||
},
|
||||
{
|
||||
drawStyle: 'line',
|
||||
lineInterpolation: 'spline',
|
||||
show: true,
|
||||
label: '{service.name=""}',
|
||||
stroke: '#B33300',
|
||||
width: 2,
|
||||
spanGaps: true,
|
||||
points: {
|
||||
size: 5,
|
||||
show: false,
|
||||
stroke: '#B33300',
|
||||
},
|
||||
index: 1,
|
||||
sum: 2274.96,
|
||||
avg: 75.83,
|
||||
max: 115.76,
|
||||
min: 55.64,
|
||||
},
|
||||
{
|
||||
drawStyle: 'line',
|
||||
lineInterpolation: 'spline',
|
||||
show: true,
|
||||
label: '{service.name="recommendationservice"}',
|
||||
stroke: '#BB6BD9',
|
||||
width: 2,
|
||||
spanGaps: true,
|
||||
points: {
|
||||
size: 5,
|
||||
show: false,
|
||||
stroke: '#BB6BD9',
|
||||
},
|
||||
index: 2,
|
||||
sum: 1770.84,
|
||||
avg: 59.028,
|
||||
max: 112.16,
|
||||
min: 0,
|
||||
},
|
||||
{
|
||||
drawStyle: 'line',
|
||||
lineInterpolation: 'spline',
|
||||
show: true,
|
||||
label: '{service.name="loadgenerator"}',
|
||||
stroke: '#E9967A',
|
||||
width: 2,
|
||||
spanGaps: true,
|
||||
points: {
|
||||
size: 5,
|
||||
show: false,
|
||||
stroke: '#E9967A',
|
||||
},
|
||||
index: 3,
|
||||
sum: 1801.25,
|
||||
avg: 60.041,
|
||||
max: 94.46,
|
||||
min: 39.86,
|
||||
},
|
||||
],
|
||||
graphVisibilityState: [true, true, true, true],
|
||||
yAxisUnit: 'ops',
|
||||
isGraphDisabled: false,
|
||||
} as GetGraphManagerTableColumnsProps;
|
||||
|
||||
describe('GraphManager', () => {
|
||||
it('should render the columns', () => {
|
||||
const columns = getGraphManagerTableColumns({
|
||||
...props,
|
||||
});
|
||||
expect(columns).toStrictEqual([
|
||||
{
|
||||
dataIndex: 'index',
|
||||
key: 'index',
|
||||
render: expect.any(Function),
|
||||
title: '',
|
||||
width: 50,
|
||||
},
|
||||
{
|
||||
dataIndex: 'label',
|
||||
key: 'label',
|
||||
render: expect.any(Function),
|
||||
title: 'Label',
|
||||
width: 300,
|
||||
},
|
||||
{
|
||||
dataIndex: 'avg',
|
||||
key: 'avg',
|
||||
render: expect.any(Function),
|
||||
title: 'Avg (in ops)',
|
||||
width: 90,
|
||||
},
|
||||
{
|
||||
dataIndex: 'sum',
|
||||
key: 'sum',
|
||||
render: expect.any(Function),
|
||||
title: 'Sum (in ops)',
|
||||
width: 90,
|
||||
},
|
||||
{
|
||||
dataIndex: 'max',
|
||||
key: 'max',
|
||||
render: expect.any(Function),
|
||||
title: 'Max (in ops)',
|
||||
width: 90,
|
||||
},
|
||||
{
|
||||
dataIndex: 'min',
|
||||
key: 'min',
|
||||
render: expect.any(Function),
|
||||
title: 'Min (in ops)',
|
||||
width: 90,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should render graphmanager with correct formatting using y-axis', () => {
|
||||
const testProps: GraphManagerProps = {
|
||||
data: [
|
||||
[1759729380, 1759729440, 1759729500], // timestamps
|
||||
[66.167, 76.833, 83.767], // series 1
|
||||
[46.6, 52.7, 70.867], // series 2
|
||||
[45.967, 52.967, 69.933], // series 3
|
||||
],
|
||||
name: 'test-graph',
|
||||
yAxisUnit: 'ops',
|
||||
onToggleModelHandler: jest.fn(),
|
||||
setGraphsVisibilityStates: jest.fn(),
|
||||
graphsVisibilityStates: [true, true, true, true],
|
||||
lineChartRef: { current: { toggleGraph: jest.fn() } },
|
||||
parentChartRef: { current: { toggleGraph: jest.fn() } },
|
||||
options: {
|
||||
series: [
|
||||
{ label: 'Timestamp' },
|
||||
{ label: '{service.name=""}' },
|
||||
{ label: '{service.name="recommendationservice"}' },
|
||||
{ label: '{service.name="loadgenerator"}' },
|
||||
],
|
||||
width: 100,
|
||||
height: 100,
|
||||
},
|
||||
};
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<GraphManager {...testProps} />);
|
||||
|
||||
// Assert that column headers include y-axis unit formatting
|
||||
expect(screen.getByText('Avg (in ops)')).toBeInTheDocument();
|
||||
expect(screen.getByText('Sum (in ops)')).toBeInTheDocument();
|
||||
expect(screen.getByText('Max (in ops)')).toBeInTheDocument();
|
||||
expect(screen.getByText('Min (in ops)')).toBeInTheDocument();
|
||||
|
||||
// Assert formatting
|
||||
expect(screen.getByText('75.6 ops/s')).toBeInTheDocument();
|
||||
expect(screen.getByText('227 ops/s')).toBeInTheDocument();
|
||||
expect(screen.getByText('83.8 ops/s')).toBeInTheDocument();
|
||||
expect(screen.getByText('66.2 ops/s')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should handle checkbox click correctly', async () => {
|
||||
const mockToggleGraph = jest.fn();
|
||||
const mockSetGraphsVisibilityStates = jest.fn();
|
||||
|
||||
const testProps: GraphManagerProps = {
|
||||
data: [
|
||||
[1759729380, 1759729440, 1759729500],
|
||||
[66.167, 76.833, 83.767],
|
||||
[46.6, 52.7, 70.867],
|
||||
],
|
||||
name: 'test-graph',
|
||||
yAxisUnit: 'ops',
|
||||
onToggleModelHandler: jest.fn(),
|
||||
setGraphsVisibilityStates: mockSetGraphsVisibilityStates,
|
||||
graphsVisibilityStates: [true, true, true],
|
||||
lineChartRef: { current: { toggleGraph: mockToggleGraph } },
|
||||
parentChartRef: { current: { toggleGraph: mockToggleGraph } },
|
||||
options: {
|
||||
series: [
|
||||
{ label: 'Timestamp' },
|
||||
{ label: '{service.name=""}' },
|
||||
{ label: '{service.name="recommendationservice"}' },
|
||||
],
|
||||
width: 100,
|
||||
height: 100,
|
||||
},
|
||||
};
|
||||
|
||||
render(<GraphManager {...testProps} />);
|
||||
|
||||
// Find the first checkbox input (index 1, since index 0 is timestamp)
|
||||
const checkbox = screen.getAllByRole('checkbox')[0];
|
||||
expect(checkbox).toBeInTheDocument();
|
||||
|
||||
// Simulate checkbox click
|
||||
await userEvent.click(checkbox);
|
||||
|
||||
// Verify toggleGraph was called on both chart refs
|
||||
expect(mockToggleGraph).toHaveBeenCalledWith(1, false);
|
||||
expect(mockToggleGraph).toHaveBeenCalledTimes(2); // lineChartRef and parentChartRef
|
||||
|
||||
// Verify state update function was called
|
||||
expect(mockSetGraphsVisibilityStates).toHaveBeenCalledWith([
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle label click correctly for visibility toggle', async () => {
|
||||
const mockToggleGraph = jest.fn();
|
||||
const mockSetGraphsVisibilityStates = jest.fn();
|
||||
|
||||
const testProps: GraphManagerProps = {
|
||||
data: [
|
||||
[1759729380, 1759729440, 1759729500],
|
||||
[66.167, 76.833, 83.767],
|
||||
[46.6, 52.7, 70.867],
|
||||
],
|
||||
name: 'test-graph',
|
||||
yAxisUnit: 'ops',
|
||||
onToggleModelHandler: jest.fn(),
|
||||
setGraphsVisibilityStates: mockSetGraphsVisibilityStates,
|
||||
graphsVisibilityStates: [true, true, true],
|
||||
lineChartRef: { current: { toggleGraph: mockToggleGraph } },
|
||||
parentChartRef: { current: { toggleGraph: mockToggleGraph } },
|
||||
options: {
|
||||
series: [
|
||||
{ label: 'Timestamp' },
|
||||
{ label: '{service.name="loadgenerator"}' },
|
||||
{ label: '{service.name="recommendationservice"}' },
|
||||
],
|
||||
width: 100,
|
||||
height: 100,
|
||||
},
|
||||
};
|
||||
|
||||
render(<GraphManager {...testProps} />);
|
||||
|
||||
// Find the first label button (skip Cancel and Save buttons)
|
||||
const buttons = screen.getAllByRole('button');
|
||||
const label = buttons.find((button) =>
|
||||
button.textContent?.includes('{service.name="loadgenerator"}'),
|
||||
) as HTMLElement;
|
||||
expect(label).toBeInTheDocument();
|
||||
|
||||
// Simulate label click
|
||||
await userEvent.click(label);
|
||||
|
||||
// Verify setGraphsVisibilityStates was called with show-only behavior
|
||||
expect(mockSetGraphsVisibilityStates).toHaveBeenCalledWith([
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
]);
|
||||
|
||||
// Check if toggleGraph was called for each series
|
||||
expect(mockToggleGraph).toHaveBeenCalledWith(0, false); // timestamp
|
||||
expect(mockToggleGraph).toHaveBeenCalledWith(1, true); // selected series
|
||||
expect(mockToggleGraph).toHaveBeenCalledWith(2, false); // other series
|
||||
expect(mockToggleGraph).toHaveBeenCalledTimes(6); // 3 series × 2 chart refs
|
||||
});
|
||||
|
||||
it('should handle label click to show all when only one is visible', async () => {
|
||||
const mockToggleGraph = jest.fn();
|
||||
const mockSetGraphsVisibilityStates = jest.fn();
|
||||
|
||||
const testProps: GraphManagerProps = {
|
||||
data: [
|
||||
[1759729380, 1759729440, 1759729500],
|
||||
[66.167, 76.833, 83.767],
|
||||
[46.6, 52.7, 70.867],
|
||||
],
|
||||
name: 'test-graph',
|
||||
yAxisUnit: 'ops',
|
||||
onToggleModelHandler: jest.fn(),
|
||||
setGraphsVisibilityStates: mockSetGraphsVisibilityStates,
|
||||
graphsVisibilityStates: [false, true, false], // Only one series visible
|
||||
lineChartRef: { current: { toggleGraph: mockToggleGraph } },
|
||||
parentChartRef: { current: { toggleGraph: mockToggleGraph } },
|
||||
options: {
|
||||
series: [
|
||||
{ label: 'Timestamp' },
|
||||
{ label: '{service.name=""}' },
|
||||
{ label: '{service.name="recommendationservice"}' },
|
||||
],
|
||||
width: 100,
|
||||
height: 100,
|
||||
},
|
||||
};
|
||||
|
||||
render(<GraphManager {...testProps} />);
|
||||
|
||||
// Find the visible label button (skip Cancel and Save buttons)
|
||||
const buttons = screen.getAllByRole('button');
|
||||
const label = buttons.find((button) =>
|
||||
button.textContent?.includes('{service.name=""}'),
|
||||
) as HTMLElement;
|
||||
expect(label).toBeInTheDocument();
|
||||
|
||||
// Simulate label click (should show all since only this one is visible)
|
||||
await userEvent.click(label);
|
||||
|
||||
// Verify setGraphsVisibilityStates was called with show-all behavior
|
||||
expect(mockSetGraphsVisibilityStates).toHaveBeenCalledWith([
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
]);
|
||||
|
||||
// Check if toggleGraph was called to show all series
|
||||
expect(mockToggleGraph).toHaveBeenCalledWith(0, true); // timestamp
|
||||
expect(mockToggleGraph).toHaveBeenCalledWith(1, true); // current series
|
||||
expect(mockToggleGraph).toHaveBeenCalledWith(2, true); // other series
|
||||
expect(mockToggleGraph).toHaveBeenCalledTimes(6); // 3 series × 2 chart refs
|
||||
});
|
||||
});
|
||||
@@ -418,11 +418,6 @@
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.set-alert-btn {
|
||||
cursor: pointer;
|
||||
margin-left: 24px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ import {
|
||||
TablePaginationConfig,
|
||||
TableProps as AntDTableProps,
|
||||
Tag,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
@@ -35,20 +34,15 @@ import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import Tags from 'components/Tags/Tags';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { initialQueryMeterWithType } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { INITIAL_ALERT_THRESHOLD_STATE } from 'container/CreateAlertV2/context/constants';
|
||||
import dayjs from 'dayjs';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { cloneDeep, isNil, isUndefined } from 'lodash-es';
|
||||
import { isNil, isUndefined } from 'lodash-es';
|
||||
import {
|
||||
ArrowUpRight,
|
||||
BellPlus,
|
||||
CalendarClock,
|
||||
Check,
|
||||
Copy,
|
||||
@@ -66,7 +60,6 @@ import { useTimezone } from 'providers/Timezone';
|
||||
import { ChangeEvent, useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { ErrorResponse } from 'types/api';
|
||||
import {
|
||||
@@ -78,7 +71,6 @@ import {
|
||||
IngestionKeyProps,
|
||||
PaginationProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
import { MeterAggregateOperator } from 'types/common/queryBuilder';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { getDaysUntilExpiry } from 'utils/timeUtils';
|
||||
|
||||
@@ -178,8 +170,6 @@ function MultiIngestionSettings(): JSX.Element {
|
||||
|
||||
const { isEnterpriseSelfHostedUser } = useGetTenantLicense();
|
||||
|
||||
const history = useHistory();
|
||||
|
||||
const [
|
||||
hasCreateLimitForIngestionKeyError,
|
||||
setHasCreateLimitForIngestionKeyError,
|
||||
@@ -704,68 +694,6 @@ function MultiIngestionSettings(): JSX.Element {
|
||||
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
|
||||
const handleCreateAlert = (
|
||||
APIKey: IngestionKeyProps,
|
||||
signal: LimitProps,
|
||||
): void => {
|
||||
let metricName = '';
|
||||
|
||||
switch (signal.signal) {
|
||||
case 'metrics':
|
||||
metricName = 'signoz.meter.metric.datapoint.count';
|
||||
break;
|
||||
case 'traces':
|
||||
metricName = 'signoz.meter.span.size';
|
||||
break;
|
||||
case 'logs':
|
||||
metricName = 'signoz.meter.log.size';
|
||||
break;
|
||||
default:
|
||||
return;
|
||||
}
|
||||
|
||||
const threshold =
|
||||
signal.signal === 'metrics'
|
||||
? signal.config?.day?.count || 0
|
||||
: signal.config?.day?.size || 0;
|
||||
|
||||
const query = {
|
||||
...initialQueryMeterWithType,
|
||||
builder: {
|
||||
...initialQueryMeterWithType.builder,
|
||||
queryData: [
|
||||
{
|
||||
...initialQueryMeterWithType.builder.queryData[0],
|
||||
aggregations: [
|
||||
{
|
||||
...initialQueryMeterWithType.builder.queryData[0].aggregations?.[0],
|
||||
metricName,
|
||||
timeAggregation: MeterAggregateOperator.INCREASE,
|
||||
spaceAggregation: MeterAggregateOperator.SUM,
|
||||
},
|
||||
],
|
||||
filter: {
|
||||
expression: `signoz.workspace.key.id='${APIKey.id}'`,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const stringifiedQuery = JSON.stringify(query);
|
||||
|
||||
const thresholds = cloneDeep(INITIAL_ALERT_THRESHOLD_STATE.thresholds);
|
||||
thresholds[0].thresholdValue = threshold;
|
||||
|
||||
const URL = `${ROUTES.ALERTS_NEW}?showNewCreateAlertsPage=true&${
|
||||
QueryParams.compositeQuery
|
||||
}=${encodeURIComponent(stringifiedQuery)}&${
|
||||
QueryParams.thresholds
|
||||
}=${encodeURIComponent(JSON.stringify(thresholds))}`;
|
||||
|
||||
history.push(URL);
|
||||
};
|
||||
|
||||
const columns: AntDTableProps<IngestionKeyProps>['columns'] = [
|
||||
{
|
||||
title: 'Ingestion Key',
|
||||
@@ -1255,27 +1183,6 @@ function MultiIngestionSettings(): JSX.Element {
|
||||
</>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{((signalCfg.usesSize &&
|
||||
limit?.config?.day?.size !== undefined) ||
|
||||
(signalCfg.usesCount &&
|
||||
limit?.config?.day?.count !== undefined)) && (
|
||||
<Tooltip
|
||||
title="Set alert on this limit"
|
||||
placement="top"
|
||||
arrow={false}
|
||||
>
|
||||
<Button
|
||||
icon={<BellPlus size={14} color={Color.BG_CHERRY_400} />}
|
||||
className="set-alert-btn periscope-btn ghost"
|
||||
type="text"
|
||||
data-testid={`set-alert-btn-${signalName}`}
|
||||
onClick={(): void =>
|
||||
handleCreateAlert(APIKey, limitsDict[signalName])
|
||||
}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* SECOND limit usage/limit */}
|
||||
|
||||
@@ -1,60 +1,10 @@
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { LimitProps } from 'types/api/ingestionKeys/limits/types';
|
||||
import {
|
||||
AllIngestionKeyProps,
|
||||
IngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import MultiIngestionSettings from '../MultiIngestionSettings';
|
||||
|
||||
// Extend the existing types to include limits with proper structure
|
||||
interface TestIngestionKeyProps extends Omit<IngestionKeyProps, 'limits'> {
|
||||
limits?: LimitProps[];
|
||||
}
|
||||
|
||||
interface TestAllIngestionKeyProps extends Omit<AllIngestionKeyProps, 'data'> {
|
||||
data: TestIngestionKeyProps[];
|
||||
}
|
||||
|
||||
// Mock useHistory.push to capture navigation URL used by MultiIngestionSettings
|
||||
const mockPush = jest.fn() as jest.MockedFunction<(path: string) => void>;
|
||||
jest.mock('react-router-dom', () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const actual = jest.requireActual('react-router-dom');
|
||||
return {
|
||||
...actual,
|
||||
useHistory: (): { push: typeof mockPush } => ({ push: mockPush }),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock deployments data hook to avoid unrelated network calls in this page
|
||||
jest.mock(
|
||||
'hooks/CustomDomain/useGetDeploymentsData',
|
||||
(): Record<string, unknown> => ({
|
||||
useGetDeploymentsData: (): {
|
||||
data: undefined;
|
||||
isLoading: boolean;
|
||||
isFetching: boolean;
|
||||
isError: boolean;
|
||||
} => ({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
|
||||
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
|
||||
const TEST_WORKSPACE_ID = 'w1';
|
||||
const INGESTION_SETTINGS_ROUTE = '/ingestion-settings';
|
||||
|
||||
describe('MultiIngestionSettings Page', () => {
|
||||
beforeEach(() => {
|
||||
mockPush.mockClear();
|
||||
render(<MultiIngestionSettings />);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -62,10 +12,6 @@ describe('MultiIngestionSettings Page', () => {
|
||||
});
|
||||
|
||||
it('renders MultiIngestionSettings page without crashing', () => {
|
||||
render(<MultiIngestionSettings />, undefined, {
|
||||
initialRoute: INGESTION_SETTINGS_ROUTE,
|
||||
});
|
||||
|
||||
expect(screen.getByText('Ingestion Keys')).toBeInTheDocument();
|
||||
|
||||
expect(
|
||||
@@ -81,181 +27,4 @@ describe('MultiIngestionSettings Page', () => {
|
||||
expect(aboutKeyslink).toHaveClass('learn-more');
|
||||
expect(aboutKeyslink).toHaveAttribute('rel', 'noreferrer');
|
||||
});
|
||||
|
||||
it('navigates to create alert with metrics count threshold', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
// Arrange API response with a metrics daily count limit so the alert button is visible
|
||||
const response: TestAllIngestionKeyProps = {
|
||||
status: 'success',
|
||||
data: [
|
||||
{
|
||||
name: 'Key One',
|
||||
expires_at: TEST_EXPIRES_AT,
|
||||
value: 'secret',
|
||||
workspace_id: TEST_WORKSPACE_ID,
|
||||
id: 'k1',
|
||||
created_at: TEST_CREATED_UPDATED,
|
||||
updated_at: TEST_CREATED_UPDATED,
|
||||
tags: [],
|
||||
limits: [
|
||||
{
|
||||
id: 'l1',
|
||||
signal: 'metrics',
|
||||
config: { day: { count: 1000 } },
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
_pagination: { page: 1, per_page: 10, pages: 1, total: 1 },
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.get('*/workspaces/me/keys*', (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(response)),
|
||||
),
|
||||
);
|
||||
|
||||
// Render with initial route to test navigation
|
||||
render(<MultiIngestionSettings />, undefined, {
|
||||
initialRoute: INGESTION_SETTINGS_ROUTE,
|
||||
});
|
||||
// Wait for ingestion key to load and expand the row to show limits
|
||||
await screen.findByText('Key One');
|
||||
const expandButton = screen.getByRole('button', { name: /right Key One/i });
|
||||
await user.click(expandButton);
|
||||
|
||||
// Wait for limits section to render and click metrics alert button by test id
|
||||
await screen.findByText('LIMITS');
|
||||
const metricsAlertBtn = (await screen.findByTestId(
|
||||
'set-alert-btn-metrics',
|
||||
)) as HTMLButtonElement;
|
||||
await user.click(metricsAlertBtn);
|
||||
|
||||
// Wait for navigation to occur
|
||||
await waitFor(() => {
|
||||
expect(mockPush).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
// Assert: navigation occurred with correct query parameters
|
||||
const navigationCall = mockPush.mock.calls[0][0] as string;
|
||||
|
||||
// Check URL contains alerts/new route
|
||||
expect(navigationCall).toContain('/alerts/new');
|
||||
expect(navigationCall).toContain('showNewCreateAlertsPage=true');
|
||||
|
||||
// Parse query parameters
|
||||
const urlParams = new URLSearchParams(navigationCall.split('?')[1]);
|
||||
|
||||
const thresholds = JSON.parse(urlParams.get(QueryParams.thresholds) || '{}');
|
||||
expect(thresholds).toBeDefined();
|
||||
expect(thresholds[0].thresholdValue).toBe(1000);
|
||||
|
||||
// Verify compositeQuery parameter exists and contains correct data
|
||||
const compositeQuery = JSON.parse(
|
||||
urlParams.get(QueryParams.compositeQuery) || '{}',
|
||||
);
|
||||
expect(compositeQuery.builder).toBeDefined();
|
||||
expect(compositeQuery.builder.queryData).toBeDefined();
|
||||
|
||||
// Check that the query contains the correct filter expression for the key
|
||||
const firstQueryData = compositeQuery.builder.queryData[0];
|
||||
expect(firstQueryData.filter.expression).toContain(
|
||||
"signoz.workspace.key.id='k1'",
|
||||
);
|
||||
|
||||
// Verify metric name for metrics signal
|
||||
expect(firstQueryData.aggregations[0].metricName).toBe(
|
||||
'signoz.meter.metric.datapoint.count',
|
||||
);
|
||||
});
|
||||
|
||||
it('navigates to create alert for logs with size threshold', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
// Arrange API response with a logs daily size limit so the alert button is visible
|
||||
const response: TestAllIngestionKeyProps = {
|
||||
status: 'success',
|
||||
data: [
|
||||
{
|
||||
name: 'Key Two',
|
||||
expires_at: TEST_EXPIRES_AT,
|
||||
value: 'secret',
|
||||
workspace_id: TEST_WORKSPACE_ID,
|
||||
id: 'k2',
|
||||
created_at: TEST_CREATED_UPDATED,
|
||||
updated_at: TEST_CREATED_UPDATED,
|
||||
tags: [],
|
||||
limits: [
|
||||
{
|
||||
id: 'l2',
|
||||
signal: 'logs',
|
||||
config: { day: { size: 2048 } },
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
_pagination: { page: 1, per_page: 10, pages: 1, total: 1 },
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.get('*/workspaces/me/keys*', (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(response)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<MultiIngestionSettings />, undefined, {
|
||||
initialRoute: INGESTION_SETTINGS_ROUTE,
|
||||
});
|
||||
|
||||
// Wait for ingestion key to load and expand the row to show limits
|
||||
await screen.findByText('Key Two');
|
||||
const expandButton = screen.getByRole('button', { name: /right Key Two/i });
|
||||
await user.click(expandButton);
|
||||
|
||||
// Wait for limits section to render and click logs alert button by test id
|
||||
await screen.findByText('LIMITS');
|
||||
const logsAlertBtn = (await screen.findByTestId(
|
||||
'set-alert-btn-logs',
|
||||
)) as HTMLButtonElement;
|
||||
await user.click(logsAlertBtn);
|
||||
|
||||
// Wait for navigation to occur
|
||||
await waitFor(() => {
|
||||
expect(mockPush).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
// Assert: navigation occurred with correct query parameters
|
||||
const navigationCall = mockPush.mock.calls[0][0] as string;
|
||||
|
||||
// Check URL contains alerts/new route
|
||||
expect(navigationCall).toContain('/alerts/new');
|
||||
expect(navigationCall).toContain('showNewCreateAlertsPage=true');
|
||||
|
||||
// Parse query parameters
|
||||
const urlParams = new URLSearchParams(navigationCall.split('?')[1]);
|
||||
|
||||
// Verify thresholds parameter
|
||||
const thresholds = JSON.parse(urlParams.get(QueryParams.thresholds) || '{}');
|
||||
expect(thresholds).toBeDefined();
|
||||
expect(thresholds[0].thresholdValue).toBe(2048);
|
||||
|
||||
// Verify compositeQuery parameter exists and contains correct data
|
||||
const compositeQuery = JSON.parse(
|
||||
urlParams.get(QueryParams.compositeQuery) || '{}',
|
||||
);
|
||||
expect(compositeQuery.builder).toBeDefined();
|
||||
expect(compositeQuery.builder.queryData).toBeDefined();
|
||||
|
||||
// Check that the query contains the correct filter expression for the key
|
||||
const firstQueryData = compositeQuery.builder.queryData[0];
|
||||
expect(firstQueryData.filter.expression).toContain(
|
||||
"signoz.workspace.key.id='k2'",
|
||||
);
|
||||
|
||||
// Verify metric name for logs signal
|
||||
expect(firstQueryData.aggregations[0].metricName).toBe(
|
||||
'signoz.meter.log.size',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import './InfraMetrics.styles.scss';
|
||||
|
||||
import { Empty } from 'antd';
|
||||
import { Empty, Radio } from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import SignozRadioGroup from 'components/SignozRadioGroup/SignozRadioGroup';
|
||||
import { History, Table } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { useState } from 'react';
|
||||
|
||||
import { VIEW_TYPES } from './constants';
|
||||
import NodeMetrics from './NodeMetrics';
|
||||
@@ -16,8 +14,7 @@ interface MetricsDataProps {
|
||||
nodeName: string;
|
||||
hostName: string;
|
||||
clusterName: string;
|
||||
timestamp: string;
|
||||
dataSource: DataSource.LOGS | DataSource.TRACES;
|
||||
logLineTimestamp: string;
|
||||
}
|
||||
|
||||
function InfraMetrics({
|
||||
@@ -25,56 +22,22 @@ function InfraMetrics({
|
||||
nodeName,
|
||||
hostName,
|
||||
clusterName,
|
||||
timestamp,
|
||||
dataSource = DataSource.LOGS,
|
||||
logLineTimestamp,
|
||||
}: MetricsDataProps): JSX.Element {
|
||||
const [selectedView, setSelectedView] = useState<string>(() =>
|
||||
podName ? VIEW_TYPES.POD : VIEW_TYPES.NODE,
|
||||
);
|
||||
|
||||
const viewOptions = useMemo(() => {
|
||||
const options = [
|
||||
{
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<Table size={14} />
|
||||
Node
|
||||
</div>
|
||||
),
|
||||
value: VIEW_TYPES.NODE,
|
||||
},
|
||||
];
|
||||
|
||||
if (podName) {
|
||||
options.push({
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<History size={14} />
|
||||
Pod
|
||||
</div>
|
||||
),
|
||||
value: VIEW_TYPES.POD,
|
||||
});
|
||||
}
|
||||
|
||||
return options;
|
||||
}, [podName]);
|
||||
|
||||
const handleModeChange = (e: RadioChangeEvent): void => {
|
||||
setSelectedView(e.target.value);
|
||||
};
|
||||
|
||||
if (!podName && !nodeName && !hostName) {
|
||||
const emptyStateDescription =
|
||||
dataSource === DataSource.TRACES
|
||||
? 'No data available. Please select a span containing a pod, node, or host attributes to view metrics.'
|
||||
: 'No data available. Please select a valid log line containing a pod, node, or host attributes to view metrics.';
|
||||
|
||||
return (
|
||||
<div className="empty-container">
|
||||
<Empty
|
||||
image={Empty.PRESENTED_IMAGE_SIMPLE}
|
||||
description={emptyStateDescription}
|
||||
description="No data available. Please select a valid log line containing a pod, node, or host attributes to view metrics."
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@@ -82,26 +45,46 @@ function InfraMetrics({
|
||||
|
||||
return (
|
||||
<div className="infra-metrics-container">
|
||||
<SignozRadioGroup
|
||||
value={selectedView}
|
||||
onChange={handleModeChange}
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
options={viewOptions}
|
||||
/>
|
||||
onChange={handleModeChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<Radio.Button
|
||||
className={selectedView === VIEW_TYPES.NODE ? 'selected_view tab' : 'tab'}
|
||||
value={VIEW_TYPES.NODE}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Table size={14} />
|
||||
Node
|
||||
</div>
|
||||
</Radio.Button>
|
||||
{podName && (
|
||||
<Radio.Button
|
||||
className={selectedView === VIEW_TYPES.POD ? 'selected_view tab' : 'tab'}
|
||||
value={VIEW_TYPES.POD}
|
||||
>
|
||||
<div className="view-title">
|
||||
<History size={14} />
|
||||
Pod
|
||||
</div>
|
||||
</Radio.Button>
|
||||
)}
|
||||
</Radio.Group>
|
||||
{/* TODO(Rahul): Make a common config driven component for this and other infra metrics components */}
|
||||
{selectedView === VIEW_TYPES.NODE && (
|
||||
<NodeMetrics
|
||||
nodeName={nodeName}
|
||||
clusterName={clusterName}
|
||||
hostName={hostName}
|
||||
timestamp={timestamp}
|
||||
logLineTimestamp={logLineTimestamp}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.POD && podName && (
|
||||
<PodMetrics
|
||||
podName={podName}
|
||||
clusterName={clusterName}
|
||||
timestamp={timestamp}
|
||||
logLineTimestamp={logLineTimestamp}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -29,15 +29,15 @@ function NodeMetrics({
|
||||
nodeName,
|
||||
clusterName,
|
||||
hostName,
|
||||
timestamp,
|
||||
logLineTimestamp,
|
||||
}: {
|
||||
nodeName: string;
|
||||
clusterName: string;
|
||||
hostName: string;
|
||||
timestamp: string;
|
||||
logLineTimestamp: string;
|
||||
}): JSX.Element {
|
||||
const { start, end, verticalLineTimestamp } = useMemo(() => {
|
||||
const logTimestamp = dayjs(timestamp);
|
||||
const logTimestamp = dayjs(logLineTimestamp);
|
||||
const now = dayjs();
|
||||
const startTime = logTimestamp.subtract(3, 'hour');
|
||||
|
||||
@@ -50,7 +50,7 @@ function NodeMetrics({
|
||||
end: endTime.unix(),
|
||||
verticalLineTimestamp: logTimestamp.unix(),
|
||||
};
|
||||
}, [timestamp]);
|
||||
}, [logLineTimestamp]);
|
||||
|
||||
const { featureFlags } = useAppContext();
|
||||
const dotMetricsEnabled =
|
||||
|
||||
@@ -23,14 +23,14 @@ import { getPodQueryPayload, podWidgetInfo } from './constants';
|
||||
function PodMetrics({
|
||||
podName,
|
||||
clusterName,
|
||||
timestamp,
|
||||
logLineTimestamp,
|
||||
}: {
|
||||
podName: string;
|
||||
clusterName: string;
|
||||
timestamp: string;
|
||||
logLineTimestamp: string;
|
||||
}): JSX.Element {
|
||||
const { start, end, verticalLineTimestamp } = useMemo(() => {
|
||||
const logTimestamp = dayjs(timestamp);
|
||||
const logTimestamp = dayjs(logLineTimestamp);
|
||||
const now = dayjs();
|
||||
const startTime = logTimestamp.subtract(3, 'hour');
|
||||
|
||||
@@ -43,7 +43,7 @@ function PodMetrics({
|
||||
end: endTime.unix(),
|
||||
verticalLineTimestamp: logTimestamp.unix(),
|
||||
};
|
||||
}, [timestamp]);
|
||||
}, [logLineTimestamp]);
|
||||
|
||||
const legendScrollPositionRef = useRef<{
|
||||
scrollTop: number;
|
||||
|
||||
@@ -33,7 +33,6 @@ function Explorer(): JSX.Element {
|
||||
handleRunQuery,
|
||||
stagedQuery,
|
||||
updateAllQueriesOperators,
|
||||
handleSetQueryData,
|
||||
currentQuery,
|
||||
} = useQueryBuilder();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
@@ -51,15 +50,6 @@ function Explorer(): JSX.Element {
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
handleSetQueryData(0, {
|
||||
...initialQueryMeterWithType.builder.queryData[0],
|
||||
source: 'meter',
|
||||
});
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const exportDefaultQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
|
||||
@@ -290,6 +290,13 @@ function Summary(): JSX.Element {
|
||||
],
|
||||
);
|
||||
|
||||
console.log({
|
||||
isMetricsListDataEmpty,
|
||||
isMetricsTreeMapDataEmpty,
|
||||
treeMapData,
|
||||
sec: treeMapData?.payload?.data[heatmapView],
|
||||
});
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className="metrics-explorer-summary-tab">
|
||||
|
||||
@@ -168,7 +168,6 @@ function QuerySection({
|
||||
version={selectedDashboard?.data?.version || 'v3'}
|
||||
isListViewPanel={selectedGraph === PANEL_TYPES.LIST}
|
||||
queryComponents={queryComponents}
|
||||
signalSourceChangeEnabled
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
|
||||
@@ -35,8 +35,6 @@ export type QueryBuilderProps = {
|
||||
showTraceOperator?: boolean;
|
||||
version: string;
|
||||
onChangeTraceView?: (view: TraceView) => void;
|
||||
onSignalSourceChange?: (value: string) => void;
|
||||
signalSourceChangeEnabled?: boolean;
|
||||
};
|
||||
|
||||
export enum TraceView {
|
||||
|
||||
@@ -52,10 +52,6 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
(query.aggregations?.[0] as MetricAggregation)?.metricName || '',
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
setSearchText('');
|
||||
}, [signalSource]);
|
||||
|
||||
const debouncedSearchText = useMemo(() => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-unused-vars
|
||||
const [_, value] = getAutocompleteValueAndType(searchText);
|
||||
@@ -71,7 +67,6 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
queryAggregation.timeAggregation,
|
||||
query.dataSource,
|
||||
index,
|
||||
signalSource,
|
||||
],
|
||||
async () =>
|
||||
getAggregateAttribute({
|
||||
@@ -105,7 +100,6 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
setOptionsData(options);
|
||||
setAttributeKeys?.(data?.payload?.attributeKeys || []);
|
||||
},
|
||||
keepPreviousData: false,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -170,11 +164,8 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
queryAggregation.timeAggregation,
|
||||
query.dataSource,
|
||||
index,
|
||||
signalSource,
|
||||
])?.payload?.attributeKeys || [];
|
||||
|
||||
setAttributeKeys?.(attributeKeys);
|
||||
|
||||
return attributeKeys;
|
||||
}, [
|
||||
debouncedValue,
|
||||
@@ -182,7 +173,6 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
query.dataSource,
|
||||
queryClient,
|
||||
index,
|
||||
signalSource,
|
||||
setAttributeKeys,
|
||||
]);
|
||||
|
||||
|
||||
@@ -271,7 +271,7 @@ export const defaultMoreMenuItems: SidebarItem[] = [
|
||||
icon: <ChartArea size={16} />,
|
||||
isNew: false,
|
||||
isEnabled: true,
|
||||
isBeta: false,
|
||||
isBeta: true,
|
||||
itemKey: 'meter-explorer',
|
||||
},
|
||||
{
|
||||
|
||||
@@ -55,337 +55,6 @@
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
.span-name-wrapper {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
||||
.loading-spinner-container {
|
||||
padding: 4px 8px;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
.span-percentile-value {
|
||||
color: var(--text-sakura-400, #f56c87);
|
||||
font-variant-numeric: lining-nums tabular-nums stacked-fractions
|
||||
slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
min-width: 48px;
|
||||
padding: 4px 8px;
|
||||
|
||||
border-left: 1px solid var(--bg-slate-400);
|
||||
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentiles-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
position: relative;
|
||||
|
||||
fill: linear-gradient(
|
||||
139deg,
|
||||
rgba(18, 19, 23, 0.32) 0%,
|
||||
rgba(18, 19, 23, 0.36) 98.68%
|
||||
);
|
||||
|
||||
stroke-width: 1px;
|
||||
stroke: var(--bg-slate-500, #161922);
|
||||
filter: drop-shadow(2px 4px 16px rgba(0, 0, 0, 0.2));
|
||||
backdrop-filter: blur(20px);
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-radius: 4px;
|
||||
|
||||
.span-percentiles-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 8px;
|
||||
padding: 8px 12px 8px 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
|
||||
.span-percentiles-header-text {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
padding: 8px;
|
||||
|
||||
.span-percentile-content-title {
|
||||
.span-percentile-value {
|
||||
color: var(--text-sakura-400, #f56c87);
|
||||
font-variant-numeric: lining-nums tabular-nums stacked-fractions
|
||||
slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on;
|
||||
}
|
||||
|
||||
.span-percentile-value-loader {
|
||||
display: inline-flex;
|
||||
align-items: flex-end;
|
||||
justify-content: flex-end;
|
||||
margin-right: 4px;
|
||||
margin-left: 4px;
|
||||
line-height: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-timerange {
|
||||
width: 100%;
|
||||
|
||||
.span-percentile-timerange-select {
|
||||
width: 100%;
|
||||
margin-top: 8px;
|
||||
margin-bottom: 16px;
|
||||
|
||||
.ant-select-selector {
|
||||
border-radius: 50px;
|
||||
border: 1px solid var(--bg-slate-400, #1d212d);
|
||||
background: var(--bg-slate-500, #161922);
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: 0.28px;
|
||||
|
||||
height: 32px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-values-table {
|
||||
.span-percentile-values-table-header-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 8px;
|
||||
|
||||
.span-percentile-values-table-header {
|
||||
color: var(--text-vanilla-400);
|
||||
text-align: right;
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px; /* 181.818% */
|
||||
text-transform: uppercase;
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-values-table-data-rows {
|
||||
margin-top: 8px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
.span-percentile-values-table-data-rows-skeleton {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
.ant-skeleton-title {
|
||||
width: 100% !important;
|
||||
margin-top: 0px !important;
|
||||
}
|
||||
|
||||
.ant-skeleton-paragraph {
|
||||
margin-top: 8px;
|
||||
|
||||
& > li + li {
|
||||
margin-top: 10px;
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-values-table-data-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 12px;
|
||||
padding: 0px 4px;
|
||||
|
||||
.span-percentile-values-table-data-row-key {
|
||||
flex: 0 0 auto;
|
||||
color: var(--text-vanilla-100);
|
||||
text-align: right;
|
||||
font-variant-numeric: lining-nums tabular-nums slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on;
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px; /* 166.667% */
|
||||
}
|
||||
|
||||
.span-percentile-values-table-data-row-value {
|
||||
color: var(--text-vanilla-400);
|
||||
font-variant-numeric: lining-nums tabular-nums stacked-fractions
|
||||
slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on, 'ss02' on;
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 166.667% */
|
||||
}
|
||||
|
||||
.dashed-line {
|
||||
flex: 1;
|
||||
height: 0; /* line only */
|
||||
margin: 0 8px;
|
||||
border-top: 1px dashed var(--bg-slate-300);
|
||||
|
||||
/* Use border image to control dash length & spacing */
|
||||
border-top-width: 1px;
|
||||
border-top-style: solid; /* temporary solid for image */
|
||||
border-image: repeating-linear-gradient(
|
||||
to right,
|
||||
#1d212d 0,
|
||||
#1d212d 10px,
|
||||
transparent 10px,
|
||||
transparent 20px
|
||||
)
|
||||
1 stretch;
|
||||
}
|
||||
}
|
||||
|
||||
.current-span-percentile-row {
|
||||
border-radius: 2px;
|
||||
background: rgba(78, 116, 248, 0.2);
|
||||
|
||||
.span-percentile-values-table-data-row-key {
|
||||
color: var(--text-robin-300);
|
||||
}
|
||||
|
||||
.dashed-line {
|
||||
flex: 1;
|
||||
height: 0; /* line only */
|
||||
margin: 0 8px;
|
||||
border-top: 1px dashed #abbdff;
|
||||
|
||||
/* Use border image to control dash length & spacing */
|
||||
border-top-width: 1px;
|
||||
border-top-style: solid; /* temporary solid for image */
|
||||
border-image: repeating-linear-gradient(
|
||||
to right,
|
||||
#abbdff 0,
|
||||
#abbdff 10px,
|
||||
transparent 10px,
|
||||
transparent 20px
|
||||
)
|
||||
1 stretch;
|
||||
}
|
||||
|
||||
.span-percentile-values-table-data-row-value {
|
||||
color: var(--text-robin-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.resource-attributes-select-container {
|
||||
overflow: hidden;
|
||||
width: calc(100% + 16px);
|
||||
|
||||
position: absolute;
|
||||
top: 32px;
|
||||
left: -8px;
|
||||
z-index: 1000;
|
||||
|
||||
.resource-attributes-select-container-header {
|
||||
.resource-attributes-select-container-input {
|
||||
border-radius: 0px;
|
||||
border: none !important;
|
||||
box-shadow: none !important;
|
||||
height: 36px;
|
||||
|
||||
border-bottom: 1px solid var(--bg-slate-400) !important;
|
||||
}
|
||||
}
|
||||
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400, #1d212d);
|
||||
background: linear-gradient(
|
||||
139deg,
|
||||
rgba(18, 19, 23, 1) 0%,
|
||||
rgba(18, 19, 23, 1) 98.68%
|
||||
);
|
||||
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
||||
backdrop-filter: blur(20px);
|
||||
|
||||
.ant-select {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.resource-attributes-items {
|
||||
height: 200px;
|
||||
overflow-y: auto;
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
height: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-300);
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--bg-slate-200);
|
||||
}
|
||||
}
|
||||
|
||||
.resource-attributes-select-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 8px 12px 8px 12px;
|
||||
|
||||
.resource-attributes-select-item-checkbox {
|
||||
.ant-checkbox-disabled {
|
||||
background-color: var(--bg-robin-500);
|
||||
color: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.resource-attributes-select-item-value {
|
||||
color: var(--bg-vanilla-100);
|
||||
|
||||
font-family: Inter;
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-key {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
@@ -399,6 +68,7 @@
|
||||
|
||||
.value-wrapper {
|
||||
display: flex;
|
||||
padding: 2px 8px;
|
||||
align-items: center;
|
||||
width: fit-content;
|
||||
max-width: 100%;
|
||||
@@ -407,7 +77,6 @@
|
||||
background: var(--bg-slate-500);
|
||||
|
||||
.attribute-value {
|
||||
padding: 2px 8px;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Inter';
|
||||
font-size: 14px;
|
||||
@@ -531,44 +200,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-tooltip {
|
||||
.ant-tooltip-content {
|
||||
width: 300px;
|
||||
max-width: 300px;
|
||||
}
|
||||
|
||||
.span-percentile-tooltip-text {
|
||||
color: var(--text-vanilla-400);
|
||||
font-variant-numeric: lining-nums tabular-nums stacked-fractions ordinal
|
||||
slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on;
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 166.667% */
|
||||
letter-spacing: -0.06px;
|
||||
|
||||
.span-percentile-tooltip-text-percentile {
|
||||
color: var(--text-sakura-500);
|
||||
font-variant-numeric: lining-nums tabular-nums stacked-fractions slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on;
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.span-percentile-tooltip-text-link {
|
||||
color: var(--text-vanilla-400);
|
||||
text-align: right;
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px; /* 166.667% */
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.span-details-drawer-docked {
|
||||
width: 48px;
|
||||
flex: 0 48px !important;
|
||||
@@ -577,7 +208,6 @@
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
.resizable-handle {
|
||||
box-sizing: border-box;
|
||||
border: 2px solid transparent;
|
||||
@@ -604,164 +234,6 @@
|
||||
|
||||
.description {
|
||||
.item {
|
||||
.span-name-wrapper {
|
||||
.span-percentile-value {
|
||||
color: var(--text-sakura-400, #f56c87);
|
||||
border-left: 1px solid var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentiles-container {
|
||||
fill: linear-gradient(
|
||||
139deg,
|
||||
rgba(18, 19, 23, 0.32) 0%,
|
||||
rgba(18, 19, 23, 0.36) 98.68%
|
||||
);
|
||||
|
||||
stroke-width: 1px;
|
||||
stroke: var(--bg-slate-500);
|
||||
filter: drop-shadow(2px 4px 16px rgba(0, 0, 0, 0.2));
|
||||
backdrop-filter: blur(20px);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
border-radius: 4px;
|
||||
|
||||
.span-percentiles-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.span-percentile-content {
|
||||
.span-percentile-content-title {
|
||||
.span-percentile-value {
|
||||
color: var(--text-sakura-400, #f56c87);
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-timerange {
|
||||
.span-percentile-timerange-select {
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
color: var(--text-slate-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-values-table {
|
||||
.span-percentile-values-table-header-row {
|
||||
.span-percentile-values-table-header {
|
||||
color: var(--text-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-values-table-data-row {
|
||||
.span-percentile-values-table-data-row-key {
|
||||
color: var(--text-ink-100);
|
||||
}
|
||||
|
||||
.span-percentile-values-table-data-row-value {
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
|
||||
.dashed-line {
|
||||
flex: 1;
|
||||
height: 0; /* line only */
|
||||
margin: 0 8px;
|
||||
border-top: 1px dashed var(--bg-slate-300);
|
||||
|
||||
/* Use border image to control dash length & spacing */
|
||||
border-top-width: 1px;
|
||||
border-top-style: solid; /* temporary solid for image */
|
||||
border-image: repeating-linear-gradient(
|
||||
to right,
|
||||
var(--bg-slate-300) 0,
|
||||
var(--bg-slate-300) 10px,
|
||||
transparent 10px,
|
||||
transparent 20px
|
||||
)
|
||||
1 stretch;
|
||||
}
|
||||
}
|
||||
|
||||
.current-span-percentile-row {
|
||||
border-radius: 2px;
|
||||
background: rgba(78, 116, 248, 0.2);
|
||||
|
||||
.span-percentile-values-table-data-row-key {
|
||||
color: var(--text-robin-300, #95acfb);
|
||||
}
|
||||
|
||||
.dashed-line {
|
||||
border-top: 1px dashed #abbdff;
|
||||
|
||||
/* Use border image to control dash length & spacing */
|
||||
border-top-width: 1px;
|
||||
border-top-style: solid; /* temporary solid for image */
|
||||
border-image: repeating-linear-gradient(
|
||||
to right,
|
||||
#abbdff 0,
|
||||
#abbdff 10px,
|
||||
transparent 10px,
|
||||
transparent 20px
|
||||
)
|
||||
1 stretch;
|
||||
}
|
||||
|
||||
.span-percentile-values-table-data-row-value {
|
||||
color: var(--text-robin-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.resource-attributes-select-container {
|
||||
.resource-attributes-select-container-header {
|
||||
.resource-attributes-select-container-input {
|
||||
border: none !important;
|
||||
box-shadow: none !important;
|
||||
height: 36px;
|
||||
|
||||
border-bottom: 1px solid var(--bg-vanilla-400) !important;
|
||||
background: var(--bg-vanilla-300);
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
box-shadow: 0 3px 6px -4px rgba(0, 0, 0, 0.12),
|
||||
0 6px 16px 0 rgba(0, 0, 0, 0.08), 0 9px 28px 8px rgba(0, 0, 0, 0.05);
|
||||
backdrop-filter: blur(20px);
|
||||
|
||||
.resource-attributes-items {
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-300);
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: var(--bg-slate-200);
|
||||
}
|
||||
}
|
||||
|
||||
.resource-attributes-select-item {
|
||||
.resource-attributes-select-item-checkbox {
|
||||
.ant-checkbox-disabled {
|
||||
background-color: var(--bg-robin-500);
|
||||
color: var(--text-ink-100);
|
||||
}
|
||||
|
||||
.resource-attributes-select-item-value {
|
||||
color: var(--text-ink-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-key {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
@@ -1,53 +1,14 @@
|
||||
import './SpanDetailsDrawer.styles.scss';
|
||||
|
||||
import {
|
||||
Button,
|
||||
Checkbox,
|
||||
Input,
|
||||
Select,
|
||||
Skeleton,
|
||||
Tabs,
|
||||
TabsProps,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Button, Tabs, TabsProps, Tooltip, Typography } from 'antd';
|
||||
import { RadioChangeEvent } from 'antd/lib';
|
||||
import getSpanPercentiles from 'api/trace/getSpanPercentiles';
|
||||
import getUserPreference from 'api/v1/user/preferences/name/get';
|
||||
import updateUserPreference from 'api/v1/user/preferences/name/update';
|
||||
import LogsIcon from 'assets/AlertHistory/LogsIcon';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import SignozRadioGroup from 'components/SignozRadioGroup/SignozRadioGroup';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { USER_PREFERENCES } from 'constants/userPreferences';
|
||||
import dayjs from 'dayjs';
|
||||
import useClickOutside from 'hooks/useClickOutside';
|
||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||
import {
|
||||
Anvil,
|
||||
BarChart2,
|
||||
Bookmark,
|
||||
Check,
|
||||
ChevronDown,
|
||||
Link2,
|
||||
Loader2,
|
||||
PanelRight,
|
||||
PlusIcon,
|
||||
Search,
|
||||
} from 'lucide-react';
|
||||
import { AnimatePresence, motion } from 'motion/react';
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import { Anvil, Bookmark, Link2, PanelRight, Search } from 'lucide-react';
|
||||
import { Dispatch, SetStateAction, useCallback, useState } from 'react';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
import { formatEpochTimestamp } from 'utils/timeUtils';
|
||||
|
||||
@@ -56,7 +17,6 @@ import { RelatedSignalsViews } from './constants';
|
||||
import Events from './Events/Events';
|
||||
import LinkedSpans from './LinkedSpans/LinkedSpans';
|
||||
import SpanRelatedSignals from './SpanRelatedSignals/SpanRelatedSignals';
|
||||
import { hasInfraMetadata } from './utils';
|
||||
|
||||
interface ISpanDetailsDrawerProps {
|
||||
isSpanDetailsDocked: boolean;
|
||||
@@ -66,45 +26,6 @@ interface ISpanDetailsDrawerProps {
|
||||
traceEndTime: number;
|
||||
}
|
||||
|
||||
const timerangeOptions = [
|
||||
{
|
||||
label: '1 hour',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
label: '2 hours',
|
||||
value: 2,
|
||||
},
|
||||
{
|
||||
label: '3 hours',
|
||||
value: 3,
|
||||
},
|
||||
{
|
||||
label: '6 hours',
|
||||
value: 6,
|
||||
},
|
||||
{
|
||||
label: '12 hours',
|
||||
value: 12,
|
||||
},
|
||||
{
|
||||
label: '24 hours',
|
||||
value: 24,
|
||||
},
|
||||
];
|
||||
|
||||
interface IResourceAttribute {
|
||||
key: string;
|
||||
value: string;
|
||||
isSelected: boolean;
|
||||
}
|
||||
|
||||
const DEFAULT_RESOURCE_ATTRIBUTES = {
|
||||
serviceName: 'service.name',
|
||||
name: 'name',
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
const {
|
||||
isSpanDetailsDocked,
|
||||
@@ -118,60 +39,12 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
const [shouldAutoFocusSearch, setShouldAutoFocusSearch] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
const [isSpanPercentilesOpen, setIsSpanPercentilesOpen] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
const [isRelatedSignalsOpen, setIsRelatedSignalsOpen] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
const [activeDrawerView, setActiveDrawerView] = useState<RelatedSignalsViews>(
|
||||
RelatedSignalsViews.LOGS,
|
||||
);
|
||||
|
||||
const [selectedTimeRange, setSelectedTimeRange] = useState<number>(1);
|
||||
const [
|
||||
resourceAttributesSearchQuery,
|
||||
setResourceAttributesSearchQuery,
|
||||
] = useState<string>('');
|
||||
|
||||
const [spanPercentileData, setSpanPercentileData] = useState<{
|
||||
percentile: number;
|
||||
description: string;
|
||||
percentiles: Record<string, number>;
|
||||
} | null>(null);
|
||||
|
||||
const [
|
||||
showResourceAttributesSelector,
|
||||
setShowResourceAttributesSelector,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const [selectedResourceAttributes, setSelectedResourceAttributes] = useState<
|
||||
Record<string, string>
|
||||
>({});
|
||||
|
||||
const [spanResourceAttributes, updateSpanResourceAttributes] = useState<
|
||||
IResourceAttribute[]
|
||||
>([] as IResourceAttribute[]);
|
||||
|
||||
const [initialWaitCompleted, setInitialWaitCompleted] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
|
||||
const [
|
||||
shouldFetchSpanPercentilesData,
|
||||
setShouldFetchSpanPercentilesData,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const [
|
||||
shouldUpdateUserPreference,
|
||||
setShouldUpdateUserPreference,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const handleTimeRangeChange = useCallback((value: number): void => {
|
||||
setShouldFetchSpanPercentilesData(true);
|
||||
setSelectedTimeRange(value);
|
||||
}, []);
|
||||
|
||||
const color = generateColor(
|
||||
selectedSpan?.serviceName || '',
|
||||
themeColors.traceDetailColors,
|
||||
@@ -187,35 +60,6 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
setIsRelatedSignalsOpen(false);
|
||||
}, []);
|
||||
|
||||
const relatedSignalsOptions = useMemo(() => {
|
||||
const baseOptions = [
|
||||
{
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<LogsIcon width={14} height={14} />
|
||||
Logs
|
||||
</div>
|
||||
),
|
||||
value: RelatedSignalsViews.LOGS,
|
||||
},
|
||||
];
|
||||
|
||||
// Only show Infra option if span has infrastructure metadata
|
||||
if (hasInfraMetadata(selectedSpan)) {
|
||||
baseOptions.push({
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
),
|
||||
value: RelatedSignalsViews.INFRA,
|
||||
});
|
||||
}
|
||||
|
||||
return baseOptions;
|
||||
}, [selectedSpan]);
|
||||
|
||||
function getItems(span: Span, startTime: number): TabsProps['items'] {
|
||||
return [
|
||||
{
|
||||
@@ -279,265 +123,6 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
];
|
||||
}
|
||||
|
||||
const resourceAttributesSelectorRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
useClickOutside({
|
||||
ref: resourceAttributesSelectorRef,
|
||||
onClickOutside: () => {
|
||||
if (resourceAttributesSelectorRef.current) {
|
||||
setShowResourceAttributesSelector(false);
|
||||
}
|
||||
},
|
||||
eventType: 'mousedown',
|
||||
});
|
||||
|
||||
const spanPercentileTooltipText = useMemo(
|
||||
() => (
|
||||
<div className="span-percentile-tooltip-text">
|
||||
<Typography.Text>
|
||||
This span duration is{' '}
|
||||
<span className="span-percentile-tooltip-text-percentile">
|
||||
p{Math.floor(spanPercentileData?.percentile || 0)}
|
||||
</span>{' '}
|
||||
out of the distribution for this resource evaluated for {selectedTimeRange}{' '}
|
||||
hour(s) since the span start time.
|
||||
</Typography.Text>
|
||||
<br />
|
||||
<br />
|
||||
<Typography.Text className="span-percentile-tooltip-text-link">
|
||||
Click to learn more
|
||||
</Typography.Text>
|
||||
</div>
|
||||
),
|
||||
[spanPercentileData?.percentile, selectedTimeRange],
|
||||
);
|
||||
|
||||
const endTime = useMemo(
|
||||
() => Math.floor(Number(selectedSpan?.timestamp) / 1000) * 1000,
|
||||
[selectedSpan?.timestamp],
|
||||
);
|
||||
|
||||
const startTime = useMemo(
|
||||
() =>
|
||||
dayjs(selectedSpan?.timestamp)
|
||||
.subtract(Number(selectedTimeRange), 'hour')
|
||||
.unix() * 1000,
|
||||
[selectedSpan?.timestamp, selectedTimeRange],
|
||||
);
|
||||
|
||||
const { mutate: updateUserPreferenceMutation } = useMutation(
|
||||
updateUserPreference,
|
||||
);
|
||||
|
||||
// TODO: Span percentile should be eventually moved to context and not fetched on every span change
|
||||
const {
|
||||
data: userSelectedResourceAttributes,
|
||||
isError: isErrorUserSelectedResourceAttributes,
|
||||
} = useQuery({
|
||||
queryFn: () =>
|
||||
getUserPreference({
|
||||
name: USER_PREFERENCES.SPAN_PERCENTILE_RESOURCE_ATTRIBUTES,
|
||||
}),
|
||||
queryKey: [
|
||||
'getUserPreferenceByPreferenceName',
|
||||
USER_PREFERENCES.SPAN_PERCENTILE_RESOURCE_ATTRIBUTES,
|
||||
selectedSpan?.spanId,
|
||||
],
|
||||
enabled: selectedSpan !== null && selectedSpan?.tagMap !== undefined,
|
||||
});
|
||||
|
||||
const {
|
||||
isLoading: isLoadingSpanPercentilesData,
|
||||
isFetching: isFetchingSpanPercentilesData,
|
||||
data,
|
||||
refetch: refetchSpanPercentilesData,
|
||||
isError: isErrorSpanPercentilesData,
|
||||
} = useQuery({
|
||||
queryFn: () =>
|
||||
getSpanPercentiles({
|
||||
start: startTime || 0,
|
||||
end: endTime || 0,
|
||||
spanDuration: selectedSpan?.durationNano || 0,
|
||||
serviceName: selectedSpan?.serviceName || '',
|
||||
name: selectedSpan?.name || '',
|
||||
resourceAttributes: selectedResourceAttributes,
|
||||
}),
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_SPAN_PERCENTILES,
|
||||
selectedSpan?.spanId,
|
||||
startTime,
|
||||
endTime,
|
||||
],
|
||||
enabled:
|
||||
selectedSpan !== null &&
|
||||
shouldFetchSpanPercentilesData &&
|
||||
!showResourceAttributesSelector &&
|
||||
initialWaitCompleted,
|
||||
onSuccess: (response) => {
|
||||
if (response.httpStatusCode !== 200) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (shouldUpdateUserPreference) {
|
||||
updateUserPreferenceMutation({
|
||||
name: USER_PREFERENCES.SPAN_PERCENTILE_RESOURCE_ATTRIBUTES,
|
||||
value: [...Object.keys(selectedResourceAttributes)],
|
||||
});
|
||||
|
||||
setShouldUpdateUserPreference(false);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Prod Req - Wait for 2 seconds before fetching span percentile data on initial load
|
||||
useEffect(() => {
|
||||
setInitialWaitCompleted(false);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
setInitialWaitCompleted(true);
|
||||
}, 2000); // 2-second delay
|
||||
|
||||
return (): void => clearTimeout(timer); // Cleanup on re-run or unmount
|
||||
}, [selectedSpan?.spanId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.httpStatusCode !== 200) {
|
||||
setSpanPercentileData(null);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (data) {
|
||||
const percentileData = {
|
||||
percentile: data?.data?.position?.percentile || 0,
|
||||
description: data?.data?.position?.description || '',
|
||||
percentiles: data?.data?.percentiles || {},
|
||||
};
|
||||
|
||||
setSpanPercentileData(percentileData);
|
||||
}
|
||||
}, [data]);
|
||||
|
||||
useEffect(() => {
|
||||
if (userSelectedResourceAttributes) {
|
||||
const userSelectedResourceAttributesList = (userSelectedResourceAttributes
|
||||
?.data?.value as string[]).map((attribute: string) => attribute);
|
||||
|
||||
let selectedResourceAttributesMap: Record<string, string> = {};
|
||||
|
||||
userSelectedResourceAttributesList.forEach((attribute: string) => {
|
||||
selectedResourceAttributesMap[attribute] =
|
||||
selectedSpan?.tagMap?.[attribute] || '';
|
||||
});
|
||||
|
||||
// filter out the attributes that are not in the selectedSpan?.tagMap
|
||||
selectedResourceAttributesMap = Object.fromEntries(
|
||||
Object.entries(selectedResourceAttributesMap).filter(
|
||||
([key]) => selectedSpan?.tagMap?.[key] !== undefined,
|
||||
),
|
||||
);
|
||||
|
||||
const resourceAttributes = Object.entries(selectedSpan?.tagMap || {}).map(
|
||||
([key, value]) => ({
|
||||
key,
|
||||
value,
|
||||
isSelected:
|
||||
key === DEFAULT_RESOURCE_ATTRIBUTES.serviceName ||
|
||||
key === DEFAULT_RESOURCE_ATTRIBUTES.name ||
|
||||
(key in selectedResourceAttributesMap &&
|
||||
selectedResourceAttributesMap[key] !== '' &&
|
||||
selectedResourceAttributesMap[key] !== undefined),
|
||||
}),
|
||||
);
|
||||
|
||||
// selected resources should be at the top of the list
|
||||
const selectedResourceAttributes = resourceAttributes.filter(
|
||||
(resourceAttribute) => resourceAttribute.isSelected,
|
||||
);
|
||||
|
||||
const unselectedResourceAttributes = resourceAttributes.filter(
|
||||
(resourceAttribute) => !resourceAttribute.isSelected,
|
||||
);
|
||||
|
||||
const sortedResourceAttributes = [
|
||||
...selectedResourceAttributes,
|
||||
...unselectedResourceAttributes,
|
||||
];
|
||||
|
||||
updateSpanResourceAttributes(sortedResourceAttributes);
|
||||
|
||||
setSelectedResourceAttributes(
|
||||
selectedResourceAttributesMap as Record<string, string>,
|
||||
);
|
||||
|
||||
setShouldFetchSpanPercentilesData(true);
|
||||
}
|
||||
|
||||
if (isErrorUserSelectedResourceAttributes) {
|
||||
const resourceAttributes = Object.entries(selectedSpan?.tagMap || {}).map(
|
||||
([key, value]) => ({
|
||||
key,
|
||||
value,
|
||||
isSelected:
|
||||
key === DEFAULT_RESOURCE_ATTRIBUTES.serviceName ||
|
||||
key === DEFAULT_RESOURCE_ATTRIBUTES.name,
|
||||
}),
|
||||
);
|
||||
|
||||
updateSpanResourceAttributes(resourceAttributes);
|
||||
|
||||
setShouldFetchSpanPercentilesData(true);
|
||||
}
|
||||
}, [
|
||||
userSelectedResourceAttributes,
|
||||
isErrorUserSelectedResourceAttributes,
|
||||
selectedSpan?.tagMap,
|
||||
]);
|
||||
|
||||
const handleResourceAttributeChange = useCallback(
|
||||
(key: string, value: string, isSelected: boolean): void => {
|
||||
updateSpanResourceAttributes((prev) =>
|
||||
prev.map((resourceAttribute) =>
|
||||
resourceAttribute.key === key
|
||||
? { ...resourceAttribute, isSelected }
|
||||
: resourceAttribute,
|
||||
),
|
||||
);
|
||||
|
||||
const newSelectedResourceAttributes = { ...selectedResourceAttributes };
|
||||
|
||||
if (isSelected) {
|
||||
newSelectedResourceAttributes[key] = value;
|
||||
} else {
|
||||
delete newSelectedResourceAttributes[key];
|
||||
}
|
||||
|
||||
setSelectedResourceAttributes(newSelectedResourceAttributes);
|
||||
|
||||
setShouldFetchSpanPercentilesData(true);
|
||||
|
||||
setShouldUpdateUserPreference(true);
|
||||
},
|
||||
[selectedResourceAttributes],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
shouldFetchSpanPercentilesData &&
|
||||
!showResourceAttributesSelector &&
|
||||
initialWaitCompleted
|
||||
) {
|
||||
refetchSpanPercentilesData();
|
||||
|
||||
setShouldFetchSpanPercentilesData(false);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
shouldFetchSpanPercentilesData,
|
||||
showResourceAttributesSelector,
|
||||
initialWaitCompleted,
|
||||
]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<section className="header">
|
||||
@@ -558,226 +143,13 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
<section className="description">
|
||||
<div className="item">
|
||||
<Typography.Text className="attribute-key">span name</Typography.Text>
|
||||
|
||||
<div className="value-wrapper span-name-wrapper">
|
||||
<Tooltip title={selectedSpan.name}>
|
||||
<Tooltip title={selectedSpan.name}>
|
||||
<div className="value-wrapper">
|
||||
<Typography.Text className="attribute-value" ellipsis>
|
||||
{selectedSpan.name}
|
||||
</Typography.Text>
|
||||
</Tooltip>
|
||||
|
||||
{isLoadingSpanPercentilesData && (
|
||||
<div className="loading-spinner-container">
|
||||
<Loader2 size={16} className="animate-spin" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isLoadingSpanPercentilesData && spanPercentileData && (
|
||||
<Tooltip
|
||||
title={isSpanPercentilesOpen ? '' : spanPercentileTooltipText}
|
||||
placement="bottomRight"
|
||||
overlayClassName="span-percentile-tooltip"
|
||||
arrow={false}
|
||||
>
|
||||
<Typography.Text
|
||||
className="span-percentile-value"
|
||||
onClick={(): void => setIsSpanPercentilesOpen((prev) => !prev)}
|
||||
>
|
||||
p{Math.floor(spanPercentileData?.percentile || 0)}
|
||||
</Typography.Text>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<AnimatePresence initial={false}>
|
||||
{isSpanPercentilesOpen && !isErrorSpanPercentilesData && (
|
||||
<motion.div
|
||||
initial={{ height: 0, opacity: 0 }}
|
||||
animate={{ height: 'auto', opacity: 1 }}
|
||||
exit={{ height: 0, opacity: 0 }}
|
||||
key="box"
|
||||
>
|
||||
<div className="span-percentiles-container">
|
||||
<div className="span-percentiles-header">
|
||||
<Typography.Text
|
||||
className="span-percentiles-header-text"
|
||||
onClick={(): void => setIsSpanPercentilesOpen((prev) => !prev)}
|
||||
>
|
||||
<ChevronDown size={16} /> Span Percentile
|
||||
</Typography.Text>
|
||||
|
||||
{showResourceAttributesSelector ? (
|
||||
<Check
|
||||
data-testid="check-icon"
|
||||
size={16}
|
||||
className="cursor-pointer span-percentiles-header-icon"
|
||||
onClick={(): void => setShowResourceAttributesSelector(false)}
|
||||
/>
|
||||
) : (
|
||||
<PlusIcon
|
||||
data-testid="plus-icon"
|
||||
size={16}
|
||||
className="cursor-pointer span-percentiles-header-icon"
|
||||
onClick={(): void => setShowResourceAttributesSelector(true)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{showResourceAttributesSelector && (
|
||||
<div
|
||||
className="resource-attributes-select-container"
|
||||
ref={resourceAttributesSelectorRef}
|
||||
>
|
||||
<div className="resource-attributes-select-container-header">
|
||||
<Input
|
||||
placeholder="Search resource attributes"
|
||||
className="resource-attributes-select-container-input"
|
||||
value={resourceAttributesSearchQuery}
|
||||
onChange={(e): void =>
|
||||
setResourceAttributesSearchQuery(e.target.value as string)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="resource-attributes-items">
|
||||
{spanResourceAttributes
|
||||
.filter((resourceAttribute) =>
|
||||
resourceAttribute.key
|
||||
.toLowerCase()
|
||||
.includes(resourceAttributesSearchQuery.toLowerCase()),
|
||||
)
|
||||
.map((resourceAttribute) => (
|
||||
<div
|
||||
className="resource-attributes-select-item"
|
||||
key={resourceAttribute.key}
|
||||
>
|
||||
<div className="resource-attributes-select-item-checkbox">
|
||||
<Checkbox
|
||||
checked={resourceAttribute.isSelected}
|
||||
onChange={(e): void => {
|
||||
handleResourceAttributeChange(
|
||||
resourceAttribute.key,
|
||||
resourceAttribute.value,
|
||||
e.target.checked,
|
||||
);
|
||||
}}
|
||||
disabled={
|
||||
resourceAttribute.key === 'service.name' ||
|
||||
resourceAttribute.key === 'name'
|
||||
}
|
||||
>
|
||||
<div className="resource-attributes-select-item-value">
|
||||
{resourceAttribute.key}
|
||||
</div>
|
||||
</Checkbox>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="span-percentile-content">
|
||||
<Typography.Text className="span-percentile-content-title">
|
||||
This span duration is{' '}
|
||||
{!isLoadingSpanPercentilesData &&
|
||||
!isFetchingSpanPercentilesData &&
|
||||
spanPercentileData ? (
|
||||
<span className="span-percentile-value">
|
||||
p{Math.floor(spanPercentileData?.percentile || 0)}
|
||||
</span>
|
||||
) : (
|
||||
<span className="span-percentile-value-loader">
|
||||
<Loader2 size={12} className="animate-spin" />
|
||||
</span>
|
||||
)}{' '}
|
||||
out of the distribution for this resource evaluated for{' '}
|
||||
{selectedTimeRange} hour(s) since the span start time.
|
||||
</Typography.Text>
|
||||
|
||||
<div className="span-percentile-timerange">
|
||||
<Select
|
||||
labelInValue
|
||||
placeholder="Select timerange"
|
||||
className="span-percentile-timerange-select"
|
||||
value={{
|
||||
label: `${selectedTimeRange}h : ${dayjs(selectedSpan?.timestamp)
|
||||
.subtract(selectedTimeRange, 'hour')
|
||||
.format(DATE_TIME_FORMATS.TIME_SPAN_PERCENTILE)} - ${dayjs(
|
||||
selectedSpan?.timestamp,
|
||||
).format(DATE_TIME_FORMATS.TIME_SPAN_PERCENTILE)}`,
|
||||
value: selectedTimeRange,
|
||||
}}
|
||||
onChange={(value): void => {
|
||||
handleTimeRangeChange(Number(value.value));
|
||||
}}
|
||||
options={timerangeOptions}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="span-percentile-values-table">
|
||||
<div className="span-percentile-values-table-header-row">
|
||||
<Typography.Text className="span-percentile-values-table-header">
|
||||
Percentile
|
||||
</Typography.Text>
|
||||
|
||||
<Typography.Text className="span-percentile-values-table-header">
|
||||
Duration
|
||||
</Typography.Text>
|
||||
</div>
|
||||
|
||||
<div className="span-percentile-values-table-data-rows">
|
||||
{isLoadingSpanPercentilesData || isFetchingSpanPercentilesData ? (
|
||||
<Skeleton
|
||||
active
|
||||
paragraph={{ rows: 3 }}
|
||||
className="span-percentile-values-table-data-rows-skeleton"
|
||||
/>
|
||||
) : (
|
||||
<>
|
||||
{Object.entries(spanPercentileData?.percentiles || {}).map(
|
||||
([percentile, duration]) => (
|
||||
<div
|
||||
className="span-percentile-values-table-data-row"
|
||||
key={percentile}
|
||||
>
|
||||
<Typography.Text className="span-percentile-values-table-data-row-key">
|
||||
{percentile}
|
||||
</Typography.Text>
|
||||
|
||||
<div className="dashed-line" />
|
||||
|
||||
<Typography.Text className="span-percentile-values-table-data-row-value">
|
||||
{getYAxisFormattedValue(`${duration / 1000000}`, 'ms')}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
),
|
||||
)}
|
||||
|
||||
<div className="span-percentile-values-table-data-row current-span-percentile-row">
|
||||
<Typography.Text className="span-percentile-values-table-data-row-key">
|
||||
p{Math.floor(spanPercentileData?.percentile || 0)}
|
||||
</Typography.Text>
|
||||
|
||||
<div className="dashed-line" />
|
||||
|
||||
<Typography.Text className="span-percentile-values-table-data-row-value">
|
||||
(this span){' '}
|
||||
{getYAxisFormattedValue(
|
||||
`${selectedSpan.durationNano / 1000000}`,
|
||||
'ms',
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="item">
|
||||
<Typography.Text className="attribute-key">span id</Typography.Text>
|
||||
@@ -854,7 +226,17 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
<div className="related-signals-section">
|
||||
<SignozRadioGroup
|
||||
value=""
|
||||
options={relatedSignalsOptions}
|
||||
options={[
|
||||
{
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<LogsIcon width={14} height={14} />
|
||||
Logs
|
||||
</div>
|
||||
),
|
||||
value: RelatedSignalsViews.LOGS,
|
||||
},
|
||||
]}
|
||||
onChange={handleRelatedSignalsChange}
|
||||
className="related-signals-radio"
|
||||
/>
|
||||
|
||||
@@ -30,11 +30,6 @@
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.view-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.views-tabs-container {
|
||||
padding: 16px 15px;
|
||||
@@ -93,10 +88,28 @@
|
||||
}
|
||||
}
|
||||
|
||||
.infra-metrics-container {
|
||||
padding-inline: 16px;
|
||||
.infra-metrics-card {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
.infra-placeholder {
|
||||
height: 50vh;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
padding: 2rem;
|
||||
box-sizing: border-box;
|
||||
|
||||
.infra-placeholder-content {
|
||||
text-align: center;
|
||||
color: var(--bg-slate-400);
|
||||
|
||||
svg {
|
||||
margin-bottom: 1rem;
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
|
||||
.ant-typography {
|
||||
font-size: 16px;
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,20 +11,17 @@ import {
|
||||
initialQueryState,
|
||||
} from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import InfraMetrics from 'container/LogDetailedView/InfraMetrics/InfraMetrics';
|
||||
import { getEmptyLogsListConfig } from 'container/LogsExplorerList/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { BarChart2, Compass, X } from 'lucide-react';
|
||||
import { Compass, X } from 'lucide-react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
import { DataSource, LogsAggregatorOperator } from 'types/common/queryBuilder';
|
||||
import { LogsAggregatorOperator } from 'types/common/queryBuilder';
|
||||
|
||||
import { RelatedSignalsViews } from '../constants';
|
||||
import SpanLogs from '../SpanLogs/SpanLogs';
|
||||
import { useSpanContextLogs } from '../SpanLogs/useSpanContextLogs';
|
||||
import { hasInfraMetadata } from '../utils';
|
||||
|
||||
const FIVE_MINUTES_IN_MS = 5 * 60 * 1000;
|
||||
|
||||
@@ -50,21 +47,6 @@ function SpanRelatedSignals({
|
||||
);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
// Extract infrastructure metadata from span attributes
|
||||
const infraMetadata = useMemo(() => {
|
||||
// Only return metadata if span has infrastructure metadata
|
||||
if (!hasInfraMetadata(selectedSpan)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
clusterName: selectedSpan.tagMap['k8s.cluster.name'] || '',
|
||||
podName: selectedSpan.tagMap['k8s.pod.name'] || '',
|
||||
nodeName: selectedSpan.tagMap['k8s.node.name'] || '',
|
||||
hostName: selectedSpan.tagMap['host.name'] || '',
|
||||
spanTimestamp: dayjs(selectedSpan.timestamp).format(),
|
||||
};
|
||||
}, [selectedSpan]);
|
||||
const {
|
||||
logs,
|
||||
isLoading,
|
||||
@@ -86,34 +68,10 @@ function SpanRelatedSignals({
|
||||
setSelectedView(e.target.value);
|
||||
}, []);
|
||||
|
||||
const tabOptions = useMemo(() => {
|
||||
const baseOptions = [
|
||||
{
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<LogsIcon width={14} height={14} />
|
||||
Logs
|
||||
</div>
|
||||
),
|
||||
value: RelatedSignalsViews.LOGS,
|
||||
},
|
||||
];
|
||||
|
||||
// Add Infra option if infrastructure metadata is available
|
||||
if (infraMetadata) {
|
||||
baseOptions.push({
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
),
|
||||
value: RelatedSignalsViews.INFRA,
|
||||
});
|
||||
}
|
||||
|
||||
return baseOptions;
|
||||
}, [infraMetadata]);
|
||||
const handleClose = useCallback((): void => {
|
||||
setSelectedView(RelatedSignalsViews.LOGS);
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
const handleExplorerPageRedirect = useCallback((): void => {
|
||||
const startTimeMs = traceStartTime - FIVE_MINUTES_IN_MS;
|
||||
@@ -187,7 +145,7 @@ function SpanRelatedSignals({
|
||||
</>
|
||||
}
|
||||
placement="right"
|
||||
onClose={onClose}
|
||||
onClose={handleClose}
|
||||
open={isOpen}
|
||||
style={{
|
||||
overscrollBehavior: 'contain',
|
||||
@@ -202,7 +160,35 @@ function SpanRelatedSignals({
|
||||
<div className="views-tabs-container">
|
||||
<SignozRadioGroup
|
||||
value={selectedView}
|
||||
options={tabOptions}
|
||||
options={[
|
||||
{
|
||||
label: (
|
||||
<div className="view-title">
|
||||
<LogsIcon width={14} height={14} />
|
||||
Logs
|
||||
</div>
|
||||
),
|
||||
value: RelatedSignalsViews.LOGS,
|
||||
},
|
||||
// {
|
||||
// label: (
|
||||
// <div className="view-title">
|
||||
// <LogsIcon width={14} height={14} />
|
||||
// Metrics
|
||||
// </div>
|
||||
// ),
|
||||
// value: RelatedSignalsViews.METRICS,
|
||||
// },
|
||||
// {
|
||||
// label: (
|
||||
// <div className="view-title">
|
||||
// <Server size={14} />
|
||||
// Infra
|
||||
// </div>
|
||||
// ),
|
||||
// value: RelatedSignalsViews.INFRA,
|
||||
// },
|
||||
]}
|
||||
onChange={handleTabChange}
|
||||
className="related-signals-radio"
|
||||
/>
|
||||
@@ -211,7 +197,6 @@ function SpanRelatedSignals({
|
||||
icon={<Compass size={18} />}
|
||||
className="open-in-explorer"
|
||||
onClick={handleExplorerPageRedirect}
|
||||
data-testid="open-in-explorer-button"
|
||||
>
|
||||
Open in Logs Explorer
|
||||
</Button>
|
||||
@@ -235,17 +220,6 @@ function SpanRelatedSignals({
|
||||
emptyStateConfig={!hasTraceIdLogs ? emptyStateConfig : undefined}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === RelatedSignalsViews.INFRA && infraMetadata && (
|
||||
<InfraMetrics
|
||||
clusterName={infraMetadata.clusterName}
|
||||
podName={infraMetadata.podName}
|
||||
nodeName={infraMetadata.nodeName}
|
||||
hostName={infraMetadata.hostName}
|
||||
timestamp={infraMetadata.spanTimestamp}
|
||||
dataSource={DataSource.TRACES}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</Drawer>
|
||||
|
||||
@@ -1,502 +0,0 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import SpanDetailsDrawer from '../SpanDetailsDrawer';
|
||||
import {
|
||||
expectedHostOnlyMetadata,
|
||||
expectedInfraMetadata,
|
||||
expectedNodeOnlyMetadata,
|
||||
expectedPodOnlyMetadata,
|
||||
mockEmptyMetricsResponse,
|
||||
mockNodeMetricsResponse,
|
||||
mockPodMetricsResponse,
|
||||
mockSpanWithHostOnly,
|
||||
mockSpanWithInfraMetadata,
|
||||
mockSpanWithNodeOnly,
|
||||
mockSpanWithoutInfraMetadata,
|
||||
mockSpanWithPodOnly,
|
||||
} from './infraMetricsTestData';
|
||||
|
||||
// Mock external dependencies
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${ROUTES.TRACE_DETAIL}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockSafeNavigate = jest.fn();
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockUpdateAllQueriesOperators = jest.fn().mockReturnValue({
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: 'logs',
|
||||
queryName: 'A',
|
||||
aggregateOperator: 'noop',
|
||||
filters: { items: [], op: 'AND' },
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
|
||||
groupBy: [],
|
||||
limit: null,
|
||||
having: [],
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
queryType: 'builder',
|
||||
});
|
||||
|
||||
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
|
||||
useQueryBuilder: (): any => ({
|
||||
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
|
||||
currentQuery: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: 'logs',
|
||||
queryName: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockWindowOpen = jest.fn();
|
||||
Object.defineProperty(window, 'open', {
|
||||
writable: true,
|
||||
value: mockWindowOpen,
|
||||
});
|
||||
|
||||
// Mock uplot to avoid rendering issues
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
// Mock GetMetricQueryRange to track API calls
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
GetMetricQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock generateColor
|
||||
jest.mock('lib/uPlotLib/utils/generateColor', () => ({
|
||||
generateColor: jest.fn().mockReturnValue('#1f77b4'),
|
||||
}));
|
||||
|
||||
// Mock OverlayScrollbar
|
||||
jest.mock(
|
||||
'components/OverlayScrollbar/OverlayScrollbar',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function ({ children }: any) {
|
||||
return <div data-testid="overlay-scrollbar">{children}</div>;
|
||||
},
|
||||
);
|
||||
|
||||
// Mock Virtuoso
|
||||
jest.mock('react-virtuoso', () => ({
|
||||
Virtuoso: jest.fn(({ data, itemContent }) => (
|
||||
<div data-testid="virtuoso">
|
||||
{data?.map((item: any, index: number) => (
|
||||
<div key={item.id || index} data-testid={`log-item-${item.id}`}>
|
||||
{itemContent(index, item)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock InfraMetrics component for focused testing
|
||||
jest.mock(
|
||||
'container/LogDetailedView/InfraMetrics/InfraMetrics',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function MockInfraMetrics({
|
||||
podName,
|
||||
nodeName,
|
||||
hostName,
|
||||
clusterName,
|
||||
timestamp,
|
||||
dataSource,
|
||||
}: any) {
|
||||
return (
|
||||
<div data-testid="infra-metrics">
|
||||
<div data-testid="infra-pod-name">{podName}</div>
|
||||
<div data-testid="infra-node-name">{nodeName}</div>
|
||||
<div data-testid="infra-host-name">{hostName}</div>
|
||||
<div data-testid="infra-cluster-name">{clusterName}</div>
|
||||
<div data-testid="infra-timestamp">{timestamp}</div>
|
||||
<div data-testid="infra-data-source">{dataSource}</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
// Mock PreferenceContextProvider
|
||||
jest.mock('providers/preferences/context/PreferenceContextProvider', () => ({
|
||||
PreferenceContextProvider: ({ children }: any): JSX.Element => (
|
||||
<div>{children}</div>
|
||||
),
|
||||
}));
|
||||
|
||||
describe('SpanDetailsDrawer - Infra Metrics', () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any, sonarjs/no-unused-collection
|
||||
let apiCallHistory: any[] = [];
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
apiCallHistory = [];
|
||||
mockSafeNavigate.mockClear();
|
||||
mockWindowOpen.mockClear();
|
||||
mockUpdateAllQueriesOperators.mockClear();
|
||||
|
||||
// Setup API call tracking for infra metrics
|
||||
(GetMetricQueryRange as jest.Mock).mockImplementation((query) => {
|
||||
apiCallHistory.push(query);
|
||||
|
||||
// Return mock responses for different query types
|
||||
if (
|
||||
query?.query?.builder?.queryData?.[0]?.filters?.items?.some(
|
||||
(item: any) => item.key?.key === 'k8s_pod_name',
|
||||
)
|
||||
) {
|
||||
return Promise.resolve(mockPodMetricsResponse);
|
||||
}
|
||||
|
||||
if (
|
||||
query?.query?.builder?.queryData?.[0]?.filters?.items?.some(
|
||||
(item: any) => item.key?.key === 'k8s_node_name',
|
||||
)
|
||||
) {
|
||||
return Promise.resolve(mockNodeMetricsResponse);
|
||||
}
|
||||
|
||||
return Promise.resolve(mockEmptyMetricsResponse);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
});
|
||||
|
||||
// Mock QueryBuilder context value
|
||||
const mockQueryBuilderContextValue = {
|
||||
currentQuery: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: 'logs',
|
||||
queryName: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
stagedQuery: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: 'logs',
|
||||
queryName: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
|
||||
panelType: 'list',
|
||||
redirectWithQuery: jest.fn(),
|
||||
handleRunQuery: jest.fn(),
|
||||
handleStageQuery: jest.fn(),
|
||||
resetQuery: jest.fn(),
|
||||
};
|
||||
|
||||
const renderSpanDetailsDrawer = (props = {}): void => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithInfraMetadata}
|
||||
traceStartTime={1640995200000} // 2022-01-01 00:00:00
|
||||
traceEndTime={1640995260000} // 2022-01-01 00:01:00
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
{...props}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
};
|
||||
|
||||
it('should detect infra metadata from span attributes', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Click on metrics tab
|
||||
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
|
||||
expect(infraMetricsButton).toBeInTheDocument();
|
||||
|
||||
fireEvent.click(infraMetricsButton);
|
||||
|
||||
// Wait for infra metrics to load
|
||||
await waitFor(() => {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify metadata extraction
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
|
||||
expectedInfraMetadata.podName,
|
||||
);
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
|
||||
expectedInfraMetadata.nodeName,
|
||||
);
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
|
||||
expectedInfraMetadata.hostName,
|
||||
);
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
|
||||
expectedInfraMetadata.clusterName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-data-source')).toHaveTextContent(
|
||||
DataSource.TRACES,
|
||||
);
|
||||
});
|
||||
|
||||
it('should not show infra tab when span lacks infra metadata', async () => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithoutInfraMetadata}
|
||||
traceStartTime={1640995200000}
|
||||
traceEndTime={1640995260000}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// Should NOT show infra tab, only logs tab
|
||||
expect(
|
||||
screen.queryByRole('radio', { name: /metrics/i }),
|
||||
).not.toBeInTheDocument();
|
||||
expect(screen.getByRole('radio', { name: /logs/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should show infra tab when span has infra metadata', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Should show both logs and infra tabs
|
||||
expect(screen.getByRole('radio', { name: /metrics/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('radio', { name: /logs/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should handle pod-only metadata correctly', async () => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithPodOnly}
|
||||
traceStartTime={1640995200000}
|
||||
traceEndTime={1640995260000}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// Click on infra tab
|
||||
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
|
||||
fireEvent.click(infraMetricsButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify pod-only metadata
|
||||
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
|
||||
expectedPodOnlyMetadata.podName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
|
||||
expectedPodOnlyMetadata.clusterName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
|
||||
expectedPodOnlyMetadata.nodeName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
|
||||
expectedPodOnlyMetadata.hostName,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle node-only metadata correctly', async () => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithNodeOnly}
|
||||
traceStartTime={1640995200000}
|
||||
traceEndTime={1640995260000}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// Click on infra tab
|
||||
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
|
||||
fireEvent.click(infraMetricsButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify node-only metadata
|
||||
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
|
||||
expectedNodeOnlyMetadata.nodeName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
|
||||
expectedNodeOnlyMetadata.podName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
|
||||
expectedNodeOnlyMetadata.clusterName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
|
||||
expectedNodeOnlyMetadata.hostName,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle host-only metadata correctly', async () => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithHostOnly}
|
||||
traceStartTime={1640995200000}
|
||||
traceEndTime={1640995260000}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// Click on infra tab
|
||||
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
|
||||
fireEvent.click(infraMetricsButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify host-only metadata
|
||||
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
|
||||
expectedHostOnlyMetadata.hostName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
|
||||
expectedHostOnlyMetadata.podName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
|
||||
expectedHostOnlyMetadata.nodeName,
|
||||
);
|
||||
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
|
||||
expectedHostOnlyMetadata.clusterName,
|
||||
);
|
||||
});
|
||||
|
||||
it('should switch between logs and infra tabs correctly', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Initially should show logs tab content
|
||||
const logsButton = screen.getByRole('radio', { name: /logs/i });
|
||||
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
|
||||
|
||||
expect(logsButton).toBeInTheDocument();
|
||||
expect(infraMetricsButton).toBeInTheDocument();
|
||||
|
||||
// Ensure logs tab is active and wait for content to load
|
||||
fireEvent.click(logsButton);
|
||||
|
||||
await waitFor(() => {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
expect(screen.getByTestId('open-in-explorer-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click on infra tab
|
||||
fireEvent.click(infraMetricsButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Should not show logs content anymore
|
||||
expect(
|
||||
screen.queryByTestId('open-in-explorer-button'),
|
||||
).not.toBeInTheDocument();
|
||||
|
||||
// Switch back to logs tab
|
||||
fireEvent.click(logsButton);
|
||||
|
||||
// Should not show infra metrics anymore
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('infra-metrics')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify logs content is shown again
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('open-in-explorer-button')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass correct data source and handle multiple infra identifiers', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Should show infra tab when span has any of: clusterName, podName, nodeName, hostName
|
||||
expect(screen.getByRole('radio', { name: /metrics/i })).toBeInTheDocument();
|
||||
|
||||
// Click on infra tab
|
||||
const infraMetricsButton = screen.getByRole('radio', { name: /metrics/i });
|
||||
fireEvent.click(infraMetricsButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('infra-metrics')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify TRACES data source is passed
|
||||
expect(screen.getByTestId('infra-data-source')).toHaveTextContent(
|
||||
DataSource.TRACES,
|
||||
);
|
||||
|
||||
// All infra identifiers should be passed through
|
||||
expect(screen.getByTestId('infra-pod-name')).toHaveTextContent(
|
||||
'test-pod-abc123',
|
||||
);
|
||||
expect(screen.getByTestId('infra-node-name')).toHaveTextContent(
|
||||
'test-node-456',
|
||||
);
|
||||
expect(screen.getByTestId('infra-host-name')).toHaveTextContent(
|
||||
'test-host.example.com',
|
||||
);
|
||||
expect(screen.getByTestId('infra-cluster-name')).toHaveTextContent(
|
||||
'test-cluster',
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,3 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
|
||||
import getSpanPercentiles from 'api/trace/getSpanPercentiles';
|
||||
import getUserPreference from 'api/v1/user/preferences/name/get';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
@@ -15,8 +10,6 @@ import {
|
||||
userEvent,
|
||||
waitFor,
|
||||
} from 'tests/test-utils';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetSpanPercentilesResponseDataProps } from 'types/api/trace/getSpanPercentiles';
|
||||
|
||||
import SpanDetailsDrawer from '../SpanDetailsDrawer';
|
||||
import {
|
||||
@@ -31,17 +24,11 @@ import {
|
||||
mockSpanLogsResponse,
|
||||
} from './mockData';
|
||||
|
||||
// Get typed mocks
|
||||
const mockGetSpanPercentiles = jest.mocked(getSpanPercentiles);
|
||||
const mockGetUserPreference = jest.mocked(getUserPreference);
|
||||
const mockSafeNavigate = jest.fn();
|
||||
|
||||
// Mock external dependencies
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string; search: string } => ({
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${ROUTES.TRACE_DETAIL}`,
|
||||
search: 'trace_id=test-trace-id',
|
||||
}),
|
||||
}));
|
||||
|
||||
@@ -51,8 +38,9 @@ jest.mock('@signozhq/button', () => ({
|
||||
),
|
||||
}));
|
||||
|
||||
const mockSafeNavigate = jest.fn();
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): { safeNavigate: jest.MockedFunction<() => void> } => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
@@ -80,10 +68,7 @@ const mockUpdateAllQueriesOperators = jest.fn().mockReturnValue({
|
||||
});
|
||||
|
||||
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
|
||||
useQueryBuilder: (): {
|
||||
updateAllQueriesOperators: jest.MockedFunction<() => any>;
|
||||
currentQuery: any;
|
||||
} => ({
|
||||
useQueryBuilder: (): any => ({
|
||||
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
|
||||
currentQuery: {
|
||||
builder: {
|
||||
@@ -128,46 +113,26 @@ jest.mock('lib/uPlotLib/utils/generateColor', () => ({
|
||||
generateColor: jest.fn().mockReturnValue('#1f77b4'),
|
||||
}));
|
||||
|
||||
// Mock getSpanPercentiles API
|
||||
jest.mock('api/trace/getSpanPercentiles', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock getUserPreference API
|
||||
jest.mock('api/v1/user/preferences/name/get', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'components/OverlayScrollbar/OverlayScrollbar',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function ({ children }: { children: React.ReactNode }) {
|
||||
function ({ children }: any) {
|
||||
return <div data-testid="overlay-scrollbar">{children}</div>;
|
||||
},
|
||||
);
|
||||
|
||||
// Mock Virtuoso to avoid complex virtualization
|
||||
jest.mock('react-virtuoso', () => ({
|
||||
Virtuoso: jest.fn(
|
||||
({
|
||||
data,
|
||||
itemContent,
|
||||
}: {
|
||||
data: any[];
|
||||
itemContent: (index: number, item: any) => React.ReactNode;
|
||||
}) => (
|
||||
<div data-testid="virtuoso">
|
||||
{data?.map((item: any, index: number) => (
|
||||
<div key={item.id || index} data-testid={`log-item-${item.id}`}>
|
||||
{itemContent(index, item)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
),
|
||||
),
|
||||
Virtuoso: jest.fn(({ data, itemContent }) => (
|
||||
<div data-testid="virtuoso">
|
||||
{data?.map((item: any, index: number) => (
|
||||
<div key={item.id || index} data-testid={`log-item-${item.id}`}>
|
||||
{itemContent(index, item)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
// Mock RawLogView component
|
||||
@@ -180,12 +145,7 @@ jest.mock(
|
||||
onLogClick,
|
||||
isHighlighted,
|
||||
helpTooltip,
|
||||
}: {
|
||||
data: any;
|
||||
onLogClick: (data: any, event: React.MouseEvent) => void;
|
||||
isHighlighted: boolean;
|
||||
helpTooltip: string;
|
||||
}) {
|
||||
}: any) {
|
||||
return (
|
||||
// eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions
|
||||
<div
|
||||
@@ -204,11 +164,9 @@ jest.mock(
|
||||
|
||||
// Mock PreferenceContextProvider
|
||||
jest.mock('providers/preferences/context/PreferenceContextProvider', () => ({
|
||||
PreferenceContextProvider: ({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}): JSX.Element => <div>{children}</div>,
|
||||
PreferenceContextProvider: ({ children }: any): JSX.Element => (
|
||||
<div>{children}</div>
|
||||
),
|
||||
}));
|
||||
|
||||
// Mock QueryBuilder context value
|
||||
@@ -259,51 +217,6 @@ const renderSpanDetailsDrawer = (props = {}): void => {
|
||||
);
|
||||
};
|
||||
|
||||
// Constants for repeated strings
|
||||
const SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER = 'Search resource attributes';
|
||||
const P75_TEXT = 'p75';
|
||||
const SPAN_PERCENTILE_TEXT = 'Span Percentile';
|
||||
|
||||
// Mock data for span percentiles
|
||||
const mockSpanPercentileResponse = {
|
||||
httpStatusCode: 200 as const,
|
||||
data: {
|
||||
percentiles: {
|
||||
p50: 500000000, // 500ms in nanoseconds
|
||||
p90: 1000000000, // 1s in nanoseconds
|
||||
p95: 1500000000, // 1.5s in nanoseconds
|
||||
p99: 2000000000, // 2s in nanoseconds
|
||||
},
|
||||
position: {
|
||||
percentile: 75.5,
|
||||
description: 'This span is in the 75th percentile',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockUserPreferenceResponse = {
|
||||
statusCode: 200,
|
||||
httpStatusCode: 200,
|
||||
error: null,
|
||||
message: 'Success',
|
||||
data: {
|
||||
name: 'span_percentile_resource_attributes',
|
||||
description: 'Resource attributes for span percentile calculation',
|
||||
valueType: 'array',
|
||||
defaultValue: [],
|
||||
value: ['service.name', 'name', 'http.method'],
|
||||
allowedValues: [],
|
||||
allowedScopes: [],
|
||||
createdAt: '2023-01-01T00:00:00Z',
|
||||
updatedAt: '2023-01-01T00:00:00Z',
|
||||
},
|
||||
};
|
||||
|
||||
const mockSpanPercentileErrorResponse = ({
|
||||
httpStatusCode: 500,
|
||||
data: null,
|
||||
} as unknown) as SuccessResponseV2<GetSpanPercentilesResponseDataProps>;
|
||||
|
||||
describe('SpanDetailsDrawer', () => {
|
||||
let apiCallHistory: any = {};
|
||||
|
||||
@@ -318,14 +231,12 @@ describe('SpanDetailsDrawer', () => {
|
||||
mockSafeNavigate.mockClear();
|
||||
mockWindowOpen.mockClear();
|
||||
mockUpdateAllQueriesOperators.mockClear();
|
||||
mockGetSpanPercentiles.mockClear();
|
||||
mockGetUserPreference.mockClear();
|
||||
|
||||
// Setup API call tracking
|
||||
(GetMetricQueryRange as jest.Mock).mockImplementation((query) => {
|
||||
// Determine response based on v5 filter expressions
|
||||
const filterExpression = (query as any)?.query?.builder?.queryData?.[0]
|
||||
?.filter?.expression;
|
||||
const filterExpression =
|
||||
query.query?.builder?.queryData?.[0]?.filter?.expression;
|
||||
|
||||
if (!filterExpression) return Promise.resolve(mockEmptyLogsResponse);
|
||||
|
||||
@@ -410,17 +321,17 @@ describe('SpanDetailsDrawer', () => {
|
||||
} = apiCallHistory;
|
||||
|
||||
// 1. Span logs query (trace_id + span_id)
|
||||
expect((spanQuery as any).query.builder.queryData[0].filter.expression).toBe(
|
||||
expect(spanQuery.query.builder.queryData[0].filter.expression).toBe(
|
||||
expectedSpanFilterExpression,
|
||||
);
|
||||
|
||||
// 2. Before logs query (trace_id + id < first_span_log_id)
|
||||
expect(
|
||||
(beforeQuery as any).query.builder.queryData[0].filter.expression,
|
||||
).toBe(expectedBeforeFilterExpression);
|
||||
expect(beforeQuery.query.builder.queryData[0].filter.expression).toBe(
|
||||
expectedBeforeFilterExpression,
|
||||
);
|
||||
|
||||
// 3. After logs query (trace_id + id > last_span_log_id)
|
||||
expect((afterQuery as any).query.builder.queryData[0].filter.expression).toBe(
|
||||
expect(afterQuery.query.builder.queryData[0].filter.expression).toBe(
|
||||
expectedAfterFilterExpression,
|
||||
);
|
||||
|
||||
@@ -449,19 +360,13 @@ describe('SpanDetailsDrawer', () => {
|
||||
} = apiCallHistory;
|
||||
|
||||
// Verify ordering: span query should use 'desc' (default)
|
||||
expect((spanQuery as any).query.builder.queryData[0].orderBy[0].order).toBe(
|
||||
'desc',
|
||||
);
|
||||
expect(spanQuery.query.builder.queryData[0].orderBy[0].order).toBe('desc');
|
||||
|
||||
// Before query should use 'desc' (default)
|
||||
expect((beforeQuery as any).query.builder.queryData[0].orderBy[0].order).toBe(
|
||||
'desc',
|
||||
);
|
||||
expect(beforeQuery.query.builder.queryData[0].orderBy[0].order).toBe('desc');
|
||||
|
||||
// After query should use 'asc' for chronological order
|
||||
expect((afterQuery as any).query.builder.queryData[0].orderBy[0].order).toBe(
|
||||
'asc',
|
||||
);
|
||||
expect(afterQuery.query.builder.queryData[0].orderBy[0].order).toBe('asc');
|
||||
});
|
||||
|
||||
it('should navigate to logs explorer with span filters when span log is clicked', async () => {
|
||||
@@ -622,435 +527,6 @@ describe('SpanDetailsDrawer', () => {
|
||||
expect(contextLogAfter).toHaveClass('log-context');
|
||||
expect(contextLogBefore).not.toHaveAttribute('title');
|
||||
});
|
||||
|
||||
// Span Percentile Tests
|
||||
describe('Span Percentile Functionality', () => {
|
||||
beforeEach(() => {
|
||||
// Setup default mocks for percentile tests
|
||||
mockGetUserPreference.mockResolvedValue(mockUserPreferenceResponse);
|
||||
mockGetSpanPercentiles.mockResolvedValue(mockSpanPercentileResponse);
|
||||
});
|
||||
|
||||
it('should display span percentile value after successful API call', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for the 2-second delay and API call to complete
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
});
|
||||
|
||||
it('should show loading spinner while fetching percentile data', async () => {
|
||||
// Mock a delayed response
|
||||
mockGetSpanPercentiles.mockImplementation(
|
||||
() =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(() => resolve(mockSpanPercentileResponse), 1000);
|
||||
}),
|
||||
);
|
||||
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for loading spinner to appear (it's visible as a div with class loading-spinner-container)
|
||||
await waitFor(
|
||||
() => {
|
||||
const spinnerContainer = document.querySelector(
|
||||
'.loading-spinner-container',
|
||||
);
|
||||
expect(spinnerContainer).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
});
|
||||
|
||||
it('should expand percentile details when percentile value is clicked', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
// Click on the percentile value to expand details
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
fireEvent.click(percentileValue);
|
||||
|
||||
// Verify percentile details are expanded
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
|
||||
// Look for the text that's actually rendered
|
||||
expect(screen.getByText(/This span duration is/)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/out of the distribution for this resource/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display percentile table with correct values', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
fireEvent.click(percentileValue);
|
||||
|
||||
// Wait for the percentile details to expand
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Wait for the table to be visible (it might take a moment to render)
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText('Percentile')).toBeInTheDocument();
|
||||
expect(screen.getByText('Duration')).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
);
|
||||
|
||||
// Verify percentile values are displayed
|
||||
expect(screen.getByText('p50')).toBeInTheDocument();
|
||||
expect(screen.getByText('p90')).toBeInTheDocument();
|
||||
expect(screen.getByText('p95')).toBeInTheDocument();
|
||||
expect(screen.getByText('p99')).toBeInTheDocument();
|
||||
|
||||
// Verify current span row - use getAllByText since there are multiple p75 elements
|
||||
expect(screen.getAllByText(P75_TEXT)).toHaveLength(3); // Should appear in value, expanded details, and table
|
||||
|
||||
// Verify the table has the current span indicator (there are multiple occurrences)
|
||||
expect(screen.getAllByText(/this span/i).length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should allow time range selection and trigger API call', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load and expand
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
fireEvent.click(percentileValue);
|
||||
|
||||
// Wait for percentile details to expand
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the time range selector and verify it exists
|
||||
const timeRangeSelector = screen.getByRole('combobox');
|
||||
expect(timeRangeSelector).toBeInTheDocument();
|
||||
|
||||
// Verify the default time range is displayed
|
||||
expect(screen.getByText(/1.*hour/i)).toBeInTheDocument();
|
||||
|
||||
// Verify API was called with default parameters
|
||||
await waitFor(() => {
|
||||
expect(mockGetSpanPercentiles).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
start: expect.any(Number),
|
||||
end: expect.any(Number),
|
||||
spanDuration: mockSpan.durationNano,
|
||||
serviceName: mockSpan.serviceName,
|
||||
name: mockSpan.name,
|
||||
resourceAttributes: expect.any(Object),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should show resource attributes selector when plus icon is clicked', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load and expand
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
fireEvent.click(percentileValue);
|
||||
|
||||
// Wait for percentile details to expand
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click the plus icon using test ID
|
||||
const plusIcon = screen.getByTestId('plus-icon');
|
||||
fireEvent.click(plusIcon);
|
||||
|
||||
// Verify resource attributes selector is shown
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter resource attributes based on search query', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load and expand
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
fireEvent.click(percentileValue);
|
||||
|
||||
// Wait for percentile details to expand and show resource attributes
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const plusIcon = screen.getByTestId('plus-icon');
|
||||
fireEvent.click(plusIcon);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Type in search query
|
||||
const searchInput = screen.getByPlaceholderText(
|
||||
SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER,
|
||||
);
|
||||
fireEvent.change(searchInput, { target: { value: 'http' } });
|
||||
|
||||
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
|
||||
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
|
||||
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
|
||||
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should handle resource attribute selection and trigger API call', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load and expand
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
fireEvent.click(percentileValue);
|
||||
|
||||
// Wait for percentile details to expand and show resource attributes
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const plusIcon = screen.getByTestId('plus-icon');
|
||||
fireEvent.click(plusIcon);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find and click a checkbox for a resource attribute
|
||||
const httpMethodCheckbox = screen.getByRole('checkbox', {
|
||||
name: /http\.method/i,
|
||||
});
|
||||
fireEvent.click(httpMethodCheckbox);
|
||||
|
||||
// Verify API was called with updated resource attributes
|
||||
await waitFor(() => {
|
||||
expect(mockGetSpanPercentiles).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
resourceAttributes: expect.objectContaining({
|
||||
'http.method': 'GET',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle API error gracefully', async () => {
|
||||
// Mock API error
|
||||
mockGetSpanPercentiles.mockResolvedValue(mockSpanPercentileErrorResponse);
|
||||
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for the 2-second delay
|
||||
await waitFor(
|
||||
() => {
|
||||
// Verify no percentile value is displayed on error
|
||||
expect(screen.queryByText(/p\d+/)).not.toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
});
|
||||
|
||||
it('should not display percentile value when API returns non-200 status', async () => {
|
||||
// Mock API response with non-200 status
|
||||
mockGetSpanPercentiles.mockResolvedValue(({
|
||||
httpStatusCode: 500 as const,
|
||||
data: null,
|
||||
} as unknown) as Awaited<ReturnType<typeof getSpanPercentiles>>);
|
||||
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for the 2-second delay
|
||||
await waitFor(
|
||||
() => {
|
||||
// Verify no percentile value is displayed
|
||||
expect(screen.queryByText(/p\d+/)).not.toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
});
|
||||
|
||||
it('should display tooltip with correct content', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
// Hover over the percentile value to show tooltip
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
fireEvent.mouseEnter(percentileValue);
|
||||
|
||||
// Verify tooltip content - use more flexible text matching
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/This span duration is/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/out of the distribution/)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/evaluated for 1 hour\(s\) since the span start time/),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText('Click to learn more')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty percentile data gracefully', async () => {
|
||||
// Mock empty percentile response
|
||||
mockGetSpanPercentiles.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
percentiles: {},
|
||||
position: {
|
||||
percentile: 0,
|
||||
description: '',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for the 2-second delay
|
||||
await waitFor(
|
||||
() => {
|
||||
// Verify p0 is displayed for empty data
|
||||
expect(screen.getByText('p0')).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
});
|
||||
|
||||
it('should call API with correct parameters', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for API call to be made
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(mockGetSpanPercentiles).toHaveBeenCalled();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
// Verify API was called with correct parameters
|
||||
expect(mockGetSpanPercentiles).toHaveBeenCalledWith({
|
||||
start: expect.any(Number),
|
||||
end: expect.any(Number),
|
||||
spanDuration: mockSpan.durationNano,
|
||||
serviceName: mockSpan.serviceName,
|
||||
name: mockSpan.name,
|
||||
resourceAttributes: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle user preference loading', async () => {
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Verify getUserPreference was called
|
||||
await waitFor(() => {
|
||||
expect(mockGetUserPreference).toHaveBeenCalledWith({
|
||||
name: 'span_percentile_resource_attributes',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should close resource attributes selector when check icon is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
renderSpanDetailsDrawer();
|
||||
|
||||
// Wait for percentile data to load and expand
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByText(P75_TEXT)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
const percentileValue = screen.getByText(P75_TEXT);
|
||||
await user.click(percentileValue);
|
||||
|
||||
// Wait for percentile details to expand and show resource attributes
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(SPAN_PERCENTILE_TEXT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const plusIcon = screen.getByTestId('plus-icon');
|
||||
await user.click(plusIcon);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click the check icon to close the selector
|
||||
const checkIcon = screen.getByTestId('check-icon');
|
||||
await user.click(checkIcon);
|
||||
|
||||
// Verify resource attributes selector is hidden
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByPlaceholderText(SEARCH_RESOURCE_ATTRIBUTES_PLACEHOLDER),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
|
||||
|
||||
@@ -1,169 +0,0 @@
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
|
||||
// Constants
|
||||
const TEST_TRACE_ID = 'test-trace-id';
|
||||
const TEST_CLUSTER_NAME = 'test-cluster';
|
||||
const TEST_POD_NAME = 'test-pod-abc123';
|
||||
const TEST_NODE_NAME = 'test-node-456';
|
||||
const TEST_HOST_NAME = 'test-host.example.com';
|
||||
|
||||
// Mock span with infrastructure metadata (pod + node + host)
|
||||
export const mockSpanWithInfraMetadata: Span = {
|
||||
spanId: 'infra-span-id',
|
||||
traceId: TEST_TRACE_ID,
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
name: 'api-service',
|
||||
serviceName: 'api-service',
|
||||
timestamp: 1640995200000000, // 2022-01-01 00:00:00 in microseconds
|
||||
durationNano: 2000000000, // 2 seconds in nanoseconds
|
||||
spanKind: 'server',
|
||||
statusCodeString: 'STATUS_CODE_OK',
|
||||
statusMessage: '',
|
||||
parentSpanId: '',
|
||||
references: [],
|
||||
event: [],
|
||||
tagMap: {
|
||||
'k8s.cluster.name': TEST_CLUSTER_NAME,
|
||||
'k8s.pod.name': TEST_POD_NAME,
|
||||
'k8s.node.name': TEST_NODE_NAME,
|
||||
'host.name': TEST_HOST_NAME,
|
||||
'service.name': 'api-service',
|
||||
'http.method': 'GET',
|
||||
},
|
||||
hasError: false,
|
||||
rootSpanId: '',
|
||||
kind: 0,
|
||||
rootName: '',
|
||||
hasChildren: false,
|
||||
hasSibling: false,
|
||||
subTreeNodeCount: 0,
|
||||
level: 0,
|
||||
};
|
||||
|
||||
// Mock span with only pod metadata
|
||||
export const mockSpanWithPodOnly: Span = {
|
||||
...mockSpanWithInfraMetadata,
|
||||
spanId: 'pod-only-span-id',
|
||||
tagMap: {
|
||||
'k8s.cluster.name': TEST_CLUSTER_NAME,
|
||||
'k8s.pod.name': TEST_POD_NAME,
|
||||
'service.name': 'api-service',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock span with only node metadata
|
||||
export const mockSpanWithNodeOnly: Span = {
|
||||
...mockSpanWithInfraMetadata,
|
||||
spanId: 'node-only-span-id',
|
||||
tagMap: {
|
||||
'k8s.node.name': TEST_NODE_NAME,
|
||||
'service.name': 'api-service',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock span with only host metadata
|
||||
export const mockSpanWithHostOnly: Span = {
|
||||
...mockSpanWithInfraMetadata,
|
||||
spanId: 'host-only-span-id',
|
||||
tagMap: {
|
||||
'host.name': TEST_HOST_NAME,
|
||||
'service.name': 'api-service',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock span without any infrastructure metadata
|
||||
export const mockSpanWithoutInfraMetadata: Span = {
|
||||
...mockSpanWithInfraMetadata,
|
||||
spanId: 'no-infra-span-id',
|
||||
tagMap: {
|
||||
'service.name': 'api-service',
|
||||
'http.method': 'GET',
|
||||
'http.status_code': '200',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock infrastructure metrics API responses
|
||||
export const mockPodMetricsResponse = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: { pod_name: TEST_POD_NAME },
|
||||
values: [
|
||||
[1640995200, '0.5'], // CPU usage
|
||||
[1640995260, '0.6'],
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const mockNodeMetricsResponse = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: { node_name: TEST_NODE_NAME },
|
||||
values: [
|
||||
[1640995200, '2.1'], // Memory usage
|
||||
[1640995260, '2.3'],
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const mockEmptyMetricsResponse = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Expected infrastructure metadata extractions
|
||||
export const expectedInfraMetadata = {
|
||||
clusterName: TEST_CLUSTER_NAME,
|
||||
podName: TEST_POD_NAME,
|
||||
nodeName: TEST_NODE_NAME,
|
||||
hostName: TEST_HOST_NAME,
|
||||
};
|
||||
|
||||
export const expectedPodOnlyMetadata = {
|
||||
clusterName: TEST_CLUSTER_NAME,
|
||||
podName: TEST_POD_NAME,
|
||||
nodeName: '',
|
||||
hostName: '',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
spanTimestamp: '2022-01-01T00:00:00.000Z',
|
||||
};
|
||||
|
||||
export const expectedNodeOnlyMetadata = {
|
||||
clusterName: '',
|
||||
podName: '',
|
||||
nodeName: TEST_NODE_NAME,
|
||||
hostName: '',
|
||||
spanTimestamp: '2022-01-01T00:00:00.000Z',
|
||||
};
|
||||
|
||||
export const expectedHostOnlyMetadata = {
|
||||
clusterName: '',
|
||||
podName: '',
|
||||
nodeName: '',
|
||||
hostName: TEST_HOST_NAME,
|
||||
spanTimestamp: '2022-01-01T00:00:00.000Z',
|
||||
};
|
||||
@@ -1,11 +1,11 @@
|
||||
export enum RelatedSignalsViews {
|
||||
LOGS = 'logs',
|
||||
// METRICS = 'metrics',
|
||||
INFRA = 'infra',
|
||||
// INFRA = 'infra',
|
||||
}
|
||||
|
||||
export const RELATED_SIGNALS_VIEW_TYPES = {
|
||||
LOGS: RelatedSignalsViews.LOGS,
|
||||
// METRICS: RelatedSignalsViews.METRICS,
|
||||
INFRA: RelatedSignalsViews.INFRA,
|
||||
// INFRA: RelatedSignalsViews.INFRA,
|
||||
};
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
|
||||
/**
|
||||
* Infrastructure metadata keys that indicate infra signals are available
|
||||
*/
|
||||
export const INFRA_METADATA_KEYS = [
|
||||
'k8s.cluster.name',
|
||||
'k8s.pod.name',
|
||||
'k8s.node.name',
|
||||
'host.name',
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Checks if a span has any infrastructure metadata attributes
|
||||
* @param span - The span to check for infrastructure metadata
|
||||
* @returns true if the span has at least one infrastructure metadata key, false otherwise
|
||||
*/
|
||||
export function hasInfraMetadata(span: Span | undefined): boolean {
|
||||
if (!span?.tagMap) return false;
|
||||
|
||||
return INFRA_METADATA_KEYS.some((key) => span.tagMap?.[key]);
|
||||
}
|
||||
@@ -35,21 +35,21 @@ function TraceMetadata(props: ITraceMetadataProps): JSX.Element {
|
||||
totalSpans,
|
||||
notFound,
|
||||
} = props;
|
||||
|
||||
const handlePreviousBtnClick = (): void => {
|
||||
if (window.history.length > 1) {
|
||||
history.goBack();
|
||||
} else {
|
||||
history.push(ROUTES.TRACES_EXPLORER);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="trace-metadata">
|
||||
<section className="metadata-info">
|
||||
<div className="first-row">
|
||||
<Button className="previous-btn" onClick={handlePreviousBtnClick}>
|
||||
<ArrowLeft size={14} />
|
||||
<Button className="previous-btn">
|
||||
<ArrowLeft
|
||||
size={14}
|
||||
onClick={(): void => {
|
||||
if (window.history.length > 1) {
|
||||
history.goBack();
|
||||
} else {
|
||||
history.push(ROUTES.TRACES_EXPLORER);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</Button>
|
||||
<div className="trace-name">
|
||||
<DraftingCompass size={14} className="drafting" />
|
||||
|
||||
@@ -700,27 +700,7 @@ export const getUPlotChartOptions = ({
|
||||
}
|
||||
};
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
const currentMarkerElement = thElement.querySelector(
|
||||
'.u-marker',
|
||||
) as HTMLElement;
|
||||
if (currentMarkerElement) {
|
||||
currentMarkerElement.classList.add('u-marker-clickable');
|
||||
currentMarkerElement.addEventListener(
|
||||
'click',
|
||||
markerClickHandler,
|
||||
false,
|
||||
);
|
||||
currentMarkerElement.addEventListener(
|
||||
'mousedown',
|
||||
(e) => {
|
||||
e.preventDefault();
|
||||
markerClickHandler(e);
|
||||
},
|
||||
false,
|
||||
);
|
||||
}
|
||||
});
|
||||
currentMarker.addEventListener('click', markerClickHandler);
|
||||
|
||||
// Store cleanup function for marker click listener
|
||||
(self as ExtendedUPlot)._legendElementCleanup?.push(() => {
|
||||
@@ -730,7 +710,6 @@ export const getUPlotChartOptions = ({
|
||||
|
||||
// Text click handler - show only/show all behavior (existing behavior)
|
||||
if (textElement) {
|
||||
// Create the click handler function
|
||||
const textClickHandler = (e: Event): void => {
|
||||
e.stopPropagation?.(); // Prevent event bubbling
|
||||
|
||||
@@ -764,45 +743,7 @@ export const getUPlotChartOptions = ({
|
||||
}
|
||||
};
|
||||
|
||||
// Use requestAnimationFrame to ensure DOM is fully ready
|
||||
requestAnimationFrame(() => {
|
||||
// Re-query the element to ensure we have the current DOM element
|
||||
const currentTextElement = thElement.querySelector(
|
||||
'.legend-text',
|
||||
) as HTMLElement;
|
||||
|
||||
if (currentTextElement) {
|
||||
// Force the element to be clickable
|
||||
currentTextElement.style.cursor = 'pointer';
|
||||
currentTextElement.style.pointerEvents = 'auto';
|
||||
|
||||
// Add multiple event listeners to ensure we catch the click
|
||||
currentTextElement.addEventListener(
|
||||
'click',
|
||||
textClickHandler,
|
||||
false,
|
||||
);
|
||||
currentTextElement.addEventListener(
|
||||
'mousedown',
|
||||
(e) => {
|
||||
e.preventDefault();
|
||||
textClickHandler(e);
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
// Also add to the parent th element as a fallback
|
||||
thElement.addEventListener(
|
||||
'click',
|
||||
(e) => {
|
||||
if (e.target === currentTextElement) {
|
||||
textClickHandler();
|
||||
}
|
||||
},
|
||||
false,
|
||||
);
|
||||
}
|
||||
});
|
||||
textElement.addEventListener('click', textClickHandler);
|
||||
|
||||
// Store cleanup function for text click listener
|
||||
(self as ExtendedUPlot)._legendElementCleanup?.push(() => {
|
||||
|
||||
@@ -74,12 +74,6 @@ body {
|
||||
|
||||
.u-marker {
|
||||
border-radius: 50%;
|
||||
|
||||
// Clickable marker styles
|
||||
&.u-marker-clickable {
|
||||
cursor: pointer;
|
||||
pointer-events: auto;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
export interface GetSpanPercentilesProps {
|
||||
start: number;
|
||||
end: number;
|
||||
spanDuration: number;
|
||||
serviceName: string;
|
||||
name: string;
|
||||
resourceAttributes: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface GetSpanPercentilesResponseDataProps {
|
||||
percentiles: Record<string, number>;
|
||||
position: {
|
||||
percentile: number;
|
||||
description: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetSpanPercentilesResponsePayloadProps {
|
||||
status: string;
|
||||
data: GetSpanPercentilesResponseDataProps;
|
||||
}
|
||||
@@ -374,12 +374,18 @@ func (module *Module) GetOrCreateUser(ctx context.Context, user *types.User, opt
|
||||
return existingUser, nil
|
||||
}
|
||||
|
||||
err = module.CreateUser(ctx, user, opts...)
|
||||
newUser, err := types.NewUser(user.DisplayName, user.Email, user.Role, user.OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return user, nil
|
||||
err = module.CreateUser(ctx, newUser, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newUser, nil
|
||||
|
||||
}
|
||||
|
||||
func (m *Module) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error {
|
||||
|
||||
@@ -1860,7 +1860,6 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
|
||||
response.DefaultTTLDays = 15
|
||||
response.TTLConditions = []model.CustomRetentionRule{}
|
||||
response.Status = constants.StatusFailed
|
||||
response.ColdStorageTTLDays = -1
|
||||
return response, nil
|
||||
}
|
||||
|
||||
@@ -1895,7 +1894,6 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
|
||||
response.ExpectedLogsTime = ttlResult.ExpectedLogsTime
|
||||
response.ExpectedLogsMoveTime = ttlResult.ExpectedLogsMoveTime
|
||||
response.Status = ttlResult.Status
|
||||
response.ColdStorageTTLDays = -1
|
||||
if ttlResult.LogsTime > 0 {
|
||||
response.DefaultTTLDays = ttlResult.LogsTime / 24
|
||||
}
|
||||
|
||||
@@ -499,6 +499,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
router.HandleFunc("/api/v1/alerts", am.ViewAccess(aH.AlertmanagerAPI.GetAlerts)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/rules/keys", am.ViewAccess(aH.getRuleAttributeKeys)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/rules/values", am.ViewAccess(aH.getRuleAttributeValues)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/rules", am.ViewAccess(aH.listRules)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/rules/{id}", am.ViewAccess(aH.getRule)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/rules", am.EditAccess(aH.createRule)).Methods(http.MethodPost)
|
||||
@@ -1152,6 +1155,63 @@ func (aH *APIHandler) getRuleStateHistoryTopContributors(w http.ResponseWriter,
|
||||
aH.Respond(w, res)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getRuleAttributeKeys(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get claims from context: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get orgId from claims: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
searchText := r.URL.Query().Get("searchText")
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if err != nil || limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
|
||||
keys, err := aH.ruleManager.GetSearchKeys(r.Context(), searchText, limit, orgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
render.Success(w, http.StatusOK, keys)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getRuleAttributeValues(w http.ResponseWriter, r *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get claims from context: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get orgId from claims: %v", err))
|
||||
return
|
||||
}
|
||||
attributeKey := r.URL.Query().Get("attributeKey")
|
||||
if attributeKey == "" {
|
||||
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInvalidInput, "attributeKey is required"))
|
||||
return
|
||||
}
|
||||
searchText := r.URL.Query().Get("searchText")
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if err != nil || limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
keys, err := aH.ruleManager.GetSearchValues(r.Context(), searchText, limit, attributeKey, orgID)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get rule search values: %v", err))
|
||||
return
|
||||
}
|
||||
render.Success(w, http.StatusOK, keys)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) listRules(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
rules, err := aH.ruleManager.ListRuleStates(r.Context())
|
||||
|
||||
@@ -36,6 +36,16 @@ func (s AlertState) String() string {
|
||||
panic(errors.Errorf("unknown alert state: %d", s))
|
||||
}
|
||||
|
||||
func GetAllRuleStates() []string {
|
||||
return []string{
|
||||
StateInactive.String(),
|
||||
StatePending.String(),
|
||||
StateFiring.String(),
|
||||
StateNoData.String(),
|
||||
StateDisabled.String(),
|
||||
}
|
||||
}
|
||||
|
||||
func (s AlertState) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(s.String())
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"log/slog"
|
||||
"sort"
|
||||
"strings"
|
||||
@@ -1083,3 +1084,60 @@ func (m *Manager) GetAlertDetailsForMetricNames(ctx context.Context, metricNames
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *Manager) GetSearchKeys(ctx context.Context, searchText string, limit int, orgId valuer.UUID) ([]ruletypes.GetRuleAttributeKeys, error) {
|
||||
keys, err := m.ruleStore.GetRuleLabelKeys(ctx, searchText, limit, orgId.String())
|
||||
if err != nil {
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "failed to get rule label keys: %v", err)
|
||||
}
|
||||
|
||||
result := make([]ruletypes.GetRuleAttributeKeys, len(ruletypes.FixedRuleAttributeKeys))
|
||||
copy(result, ruletypes.FixedRuleAttributeKeys)
|
||||
|
||||
for _, key := range keys {
|
||||
result = append(result, ruletypes.GetRuleAttributeKeys{
|
||||
Key: key,
|
||||
Type: ruletypes.RuleAttributeTypeLabel,
|
||||
DataType: telemetrytypes.FieldDataTypeString,
|
||||
})
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *Manager) GetSearchValues(ctx context.Context, searchText string, limit int, key string, orgId valuer.UUID) ([]string, error) {
|
||||
switch key {
|
||||
case ruletypes.RuleAttributeKeyChannel:
|
||||
return m.ruleStore.GetChannel(ctx, searchText, limit, orgId.String())
|
||||
case ruletypes.RuleAttributeKeyThresholdName:
|
||||
return m.ruleStore.GetThresholdNames(ctx, searchText, limit, orgId.String())
|
||||
case ruletypes.RuleAttributeKeyCreatedBy:
|
||||
return m.ruleStore.GetCreatedBy(ctx, searchText, limit, orgId.String())
|
||||
case ruletypes.RuleAttributeKeyUpdatedBy:
|
||||
return m.ruleStore.GetUpdatedBy(ctx, searchText, limit, orgId.String())
|
||||
case ruletypes.RuleAttributeKeyName:
|
||||
return m.ruleStore.GetNames(ctx, searchText, limit, orgId.String())
|
||||
case ruletypes.RuleAttributeKeyState:
|
||||
allStates := model.GetAllRuleStates()
|
||||
if searchText == "" {
|
||||
if limit > 0 && limit < len(allStates) {
|
||||
return allStates[:limit], nil
|
||||
}
|
||||
return allStates, nil
|
||||
}
|
||||
|
||||
filtered := make([]string, 0)
|
||||
searchLower := strings.ToLower(searchText)
|
||||
for _, state := range allStates {
|
||||
if strings.Contains(strings.ToLower(state), searchLower) {
|
||||
filtered = append(filtered, state)
|
||||
if limit > 0 && len(filtered) >= limit {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return filtered, nil
|
||||
default:
|
||||
return m.ruleStore.GetRuleLabelValues(ctx, searchText, limit, key, orgId.String())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ package sqlrulestore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
@@ -101,3 +104,205 @@ func (r *rule) GetStoredRule(ctx context.Context, id valuer.UUID) (*ruletypes.Ru
|
||||
}
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetRuleLabelKeys(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
|
||||
labelKeys := make([]string, 0)
|
||||
searchText = strings.ToLower(searchText) + "%"
|
||||
fmter := r.sqlstore.Formatter()
|
||||
|
||||
elements, elementsAlias := fmter.JSONKeys("data", "$.labels", "keys")
|
||||
elementsAliasStr := string(fmter.LowerExpression(string(elementsAlias)))
|
||||
query := r.sqlstore.BunDB().
|
||||
NewSelect().
|
||||
Distinct().
|
||||
ColumnExpr("?", bun.SafeQuery(elementsAliasStr)).
|
||||
TableExpr("rule, ?", bun.SafeQuery(string(elements))).
|
||||
Where("? LIKE ?", bun.SafeQuery(elementsAliasStr), searchText).
|
||||
Where("org_id = ?", orgId).
|
||||
Limit(limit)
|
||||
err := query.Scan(ctx, &labelKeys)
|
||||
if err != nil {
|
||||
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "search keys for rule with orgId %s not found", orgId)
|
||||
}
|
||||
|
||||
return labelKeys, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetThresholdNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
|
||||
names := make([]string, 0)
|
||||
searchText = strings.ToLower(searchText) + "%"
|
||||
fmter := r.sqlstore.Formatter()
|
||||
|
||||
// Query threshold spec names
|
||||
specQuery, specCol := fmter.JSONArrayElements("data", "$.condition.thresholds.spec", "spec")
|
||||
nameQuery := string(fmter.JSONExtractString(string(specCol), "$.name"))
|
||||
lowerNameQuery := string(fmter.LowerExpression(nameQuery))
|
||||
|
||||
query := r.sqlstore.BunDB().
|
||||
NewSelect().
|
||||
Distinct().
|
||||
ColumnExpr("?", bun.SafeQuery(nameQuery)).
|
||||
TableExpr("rule, ?", bun.SafeQuery(string(specQuery))).
|
||||
Where("? LIKE ?", bun.SafeQuery(lowerNameQuery), searchText).
|
||||
Where("org_id = ?", orgId).
|
||||
Limit(limit)
|
||||
|
||||
err := query.Scan(ctx, &names)
|
||||
if err != nil {
|
||||
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "threshold names for rule with orgId %s not found", orgId)
|
||||
}
|
||||
|
||||
if len(names) >= limit {
|
||||
return names[:limit], nil
|
||||
}
|
||||
|
||||
severityQuery := string(fmter.JSONExtractString("data", "$.labels.severity"))
|
||||
lowerSeverityQuery := string(fmter.LowerExpression(severityQuery))
|
||||
|
||||
thresholds := make([]string, 0)
|
||||
query = r.sqlstore.BunDB().
|
||||
NewSelect().
|
||||
Distinct().
|
||||
ColumnExpr("?", bun.SafeQuery(severityQuery)).
|
||||
TableExpr("rule").
|
||||
Where("org_id = ?", orgId).
|
||||
Where("? LIKE ?", bun.SafeQuery(lowerSeverityQuery), searchText).
|
||||
Limit(limit - len(names))
|
||||
|
||||
err = query.Scan(ctx, &thresholds)
|
||||
if err != nil {
|
||||
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "threshold names for rule with orgId %s not found", orgId)
|
||||
}
|
||||
|
||||
names = append(names, thresholds...)
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetChannel(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
|
||||
names := make([]string, 0)
|
||||
searchText = strings.ToLower(searchText) + "%"
|
||||
fmter := r.sqlstore.Formatter()
|
||||
|
||||
// Query v2 threshold channels
|
||||
specSQL, specCol := fmter.JSONArrayElements("data", "$.condition.thresholds.spec", "spec")
|
||||
channelSQL, channelCol := fmter.JSONArrayOfStrings(string(specCol), "$.channels", "channels")
|
||||
lowerChannelCol := string(fmter.LowerExpression(string(channelCol)))
|
||||
|
||||
query := r.sqlstore.BunDB().
|
||||
NewSelect().
|
||||
Distinct().
|
||||
ColumnExpr("?", bun.SafeQuery(string(channelCol))).
|
||||
TableExpr("rule, ?, ?",
|
||||
bun.SafeQuery(string(specSQL)),
|
||||
bun.SafeQuery(string(channelSQL))).
|
||||
Where("? LIKE ?", bun.SafeQuery(lowerChannelCol), searchText).
|
||||
Where("org_id = ?", orgId).
|
||||
Limit(limit)
|
||||
|
||||
err := query.Scan(ctx, &names)
|
||||
if err != nil {
|
||||
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "channel for rule with orgId %s not found", orgId)
|
||||
}
|
||||
|
||||
if len(names) >= limit {
|
||||
return names[:limit], nil
|
||||
}
|
||||
|
||||
// Query v1 preferred channels
|
||||
channelsSQL, channelsCol := fmter.JSONArrayOfStrings("data", "$.preferredChannels", "channels")
|
||||
lowerChannelsCol := fmter.LowerExpression(string(channelsCol))
|
||||
|
||||
channels := make([]string, 0)
|
||||
query = r.sqlstore.BunDB().
|
||||
NewSelect().
|
||||
Distinct().
|
||||
ColumnExpr("?", bun.SafeQuery(string(channelsCol))).
|
||||
TableExpr("rule, ?", bun.SafeQuery(string(channelsSQL))).
|
||||
Where("? LIKE ?", bun.SafeQuery(string(lowerChannelsCol)), searchText).
|
||||
Where("org_id = ?", orgId).
|
||||
Limit(limit - len(names))
|
||||
|
||||
err = query.Scan(ctx, &channels)
|
||||
if err != nil {
|
||||
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "channel for rule with orgId %s not found", orgId)
|
||||
}
|
||||
|
||||
names = append(names, channels...)
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
|
||||
names := make([]string, 0)
|
||||
searchText = strings.ToLower(searchText) + "%"
|
||||
fmter := r.sqlstore.Formatter()
|
||||
|
||||
namePath := fmter.JSONExtractString("data", "$.alert")
|
||||
lowerNamePath := fmter.LowerExpression(string(namePath))
|
||||
|
||||
query := r.sqlstore.BunDB().
|
||||
NewSelect().
|
||||
Distinct().
|
||||
ColumnExpr("?", bun.SafeQuery(string(namePath))).
|
||||
TableExpr("?", bun.SafeQuery("rule")).
|
||||
Where("? LIKE ?", bun.SafeQuery(string(lowerNamePath)), searchText).
|
||||
Where("org_id = ?", orgId).
|
||||
Limit(limit)
|
||||
|
||||
err := query.Scan(ctx, &names)
|
||||
if err != nil {
|
||||
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "names for rule with orgId %s not found", orgId)
|
||||
}
|
||||
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetCreatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
|
||||
names := make([]string, 0)
|
||||
searchText = strings.ToLower(searchText) + "%"
|
||||
query := r.sqlstore.BunDB().NewSelect().
|
||||
Distinct().
|
||||
Column("created_by").
|
||||
TableExpr("?", bun.SafeQuery("rule")).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("? LIKE ?", bun.SafeQuery(string(r.sqlstore.Formatter().LowerExpression("created_by"))), searchText).
|
||||
Limit(limit)
|
||||
err := query.Scan(ctx, &names)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetUpdatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
|
||||
names := make([]string, 0)
|
||||
searchText = strings.ToLower(searchText) + "%"
|
||||
query := r.sqlstore.BunDB().NewSelect().
|
||||
Distinct().
|
||||
Column("updated_by").
|
||||
TableExpr("?", bun.SafeQuery("rule")).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("? LIKE ?", bun.SafeQuery(string(r.sqlstore.Formatter().LowerExpression("updated_by"))), searchText).
|
||||
Limit(limit)
|
||||
err := query.Scan(ctx, &names)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetRuleLabelValues(ctx context.Context, searchText string, limit int, labelKey string, orgId string) ([]string, error) {
|
||||
names := make([]string, 0)
|
||||
labelPath := r.sqlstore.Formatter().JSONExtractString("data", "$.labels."+labelKey)
|
||||
searchText = strings.ToLower(searchText) + "%"
|
||||
query := r.sqlstore.BunDB().NewSelect().
|
||||
Distinct().
|
||||
ColumnExpr("?", bun.SafeQuery(string(labelPath))).
|
||||
TableExpr("?", bun.SafeQuery("rule")).
|
||||
Where("org_id = ?", orgId).
|
||||
Where("? LIKE ?", bun.SafeQuery(string(r.sqlstore.Formatter().LowerExpression(string(labelPath)))), searchText).Limit(limit)
|
||||
err := query.Scan(ctx, &names)
|
||||
if err != nil {
|
||||
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "search values for rule with orgId %s not found", orgId)
|
||||
}
|
||||
return names, nil
|
||||
}
|
||||
|
||||
113
pkg/sqlstore/sqlitesqlstore/formatter.go
Normal file
113
pkg/sqlstore/sqlitesqlstore/formatter.go
Normal file
@@ -0,0 +1,113 @@
|
||||
package sqlitesqlstore
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_extract("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "')"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_type("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "')"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = 'array'"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "'"...)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + ".value")
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
return f.JSONArrayElements(column, path, alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "'"...)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + ".key")
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_group_array("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
if len(values) == 0 {
|
||||
return []byte("json_array()")
|
||||
}
|
||||
var sql []byte
|
||||
sql = append(sql, "json_array("...)
|
||||
for i, v := range values {
|
||||
if i > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = append(sql, '\'')
|
||||
sql = append(sql, v...)
|
||||
sql = append(sql, '\'')
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
return f.bunf.AppendIdent([]byte{}, column)
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
397
pkg/sqlstore/sqlitesqlstore/formatter_test.go
Normal file
397
pkg/sqlstore/sqlitesqlstore/formatter_test.go
Normal file
@@ -0,0 +1,397 @@
|
||||
package sqlitesqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/sqlitedialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `json_extract("data", '$.field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `json_extract("metadata", '$.user.name')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `json_extract("json_col", '$')`,
|
||||
},
|
||||
{
|
||||
name: "array index path",
|
||||
column: "items",
|
||||
path: "$.list[0]",
|
||||
expected: `json_extract("items", '$.list[0]')`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `json_type("data", '$.field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `json_type("metadata", '$.user.age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `json_type("json_col", '$')`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `json_type("data", '$.items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `json_type("metadata", '$.user.tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `json_type("json_col", '$') = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `json_each("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `json_each("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `json_each("metadata", '$.items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `json_each("json_col", '$.user.tags') AS "tag"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `json_each("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `json_each("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `json_each("metadata", '$.strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `json_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `json_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `json_each("metadata", '$.object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "json_group_array(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "json_group_array(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "json_extract(data, '$.field')",
|
||||
expected: "json_group_array(json_extract(data, '$.field'))",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "json_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "json_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "json_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "json_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "json_extract expression",
|
||||
expr: "json_extract(data, '$.field')",
|
||||
expected: "lower(json_extract(data, '$.field'))",
|
||||
},
|
||||
{
|
||||
name: "nested json_extract",
|
||||
expr: "json_extract(metadata, '$.user.name')",
|
||||
expected: "lower(json_extract(metadata, '$.user.name'))",
|
||||
},
|
||||
{
|
||||
name: "json_type expression",
|
||||
expr: "json_type(data, '$.field')",
|
||||
expected: "lower(json_type(data, '$.field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -17,10 +17,11 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -54,11 +55,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
settings.Logger().InfoContext(ctx, "connected to sqlite", "path", config.Sqlite.Path)
|
||||
sqldb.SetMaxOpenConns(config.Connection.MaxOpenConns)
|
||||
|
||||
sqliteDialect := sqlitedialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, sqliteDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, sqlitedialect.New(), hooks),
|
||||
dialect: new(dialect),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -74,6 +78,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@ type SQLStore interface {
|
||||
// Returns the dialect of the database.
|
||||
Dialect() SQLDialect
|
||||
|
||||
Formatter() SQLFormatter
|
||||
|
||||
// RunInTxCtx runs the given callback in a transaction. It creates and injects a new context with the transaction.
|
||||
// If a transaction is present in the context, it will be used.
|
||||
RunInTxCtx(ctx context.Context, opts *SQLStoreTxOptions, cb func(ctx context.Context) error) error
|
||||
@@ -86,3 +88,35 @@ type SQLDialect interface {
|
||||
// as an argument.
|
||||
ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error
|
||||
}
|
||||
|
||||
type SQLFormatter interface {
|
||||
// JSONExtractString takes path in sqlite format like "$.labels.severity"
|
||||
JSONExtractString(column, path string) []byte
|
||||
|
||||
// JSONType used to determine the type of the value extracted from the path
|
||||
JSONType(column, path string) []byte
|
||||
|
||||
// JSONIsArray used to check whether the value is array or not
|
||||
JSONIsArray(column, path string) []byte
|
||||
|
||||
// JSONArrayElements returns query as well as columns alias to be used for select and where clause
|
||||
JSONArrayElements(column, path, alias string) ([]byte, []byte)
|
||||
|
||||
// JSONArrayOfStrings returns query as well as columns alias to be used for select and where clause
|
||||
JSONArrayOfStrings(column, path, alias string) ([]byte, []byte)
|
||||
|
||||
// JSONArrayAgg aggregates values into a JSON array
|
||||
JSONArrayAgg(expression string) []byte
|
||||
|
||||
// JSONArrayLiteral creates a literal JSON array from the given string values
|
||||
JSONArrayLiteral(values ...string) []byte
|
||||
|
||||
// JSONKeys return extracted key from json as well as alias to be used for select and where clause
|
||||
JSONKeys(column, path, alias string) ([]byte, []byte)
|
||||
|
||||
// TextToJsonColumn converts a text column to JSON type
|
||||
TextToJsonColumn(column string) []byte
|
||||
|
||||
// LowerExpression wraps any SQL expression with lower() function for case-insensitive operations
|
||||
LowerExpression(expression string) []byte
|
||||
}
|
||||
|
||||
112
pkg/sqlstore/sqlstoretest/formatter.go
Normal file
112
pkg/sqlstore/sqlstoretest/formatter.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package sqlstoretest
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_extract("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "')"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_type("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "')"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = 'array'"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "'"...)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + ".value")
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
return f.JSONArrayElements(column, path, alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", '"...)
|
||||
sql = append(sql, path...)
|
||||
sql = append(sql, "'"...)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias + ".key")
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_group_array("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
if len(values) == 0 {
|
||||
return []byte("json_array()")
|
||||
}
|
||||
var sql []byte
|
||||
sql = append(sql, "json_array("...)
|
||||
for i, v := range values {
|
||||
if i > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = append(sql, '\'')
|
||||
sql = append(sql, v...)
|
||||
sql = append(sql, '\'')
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
return f.bunf.AppendIdent([]byte{}, column)
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
@@ -15,10 +15,11 @@ import (
|
||||
var _ sqlstore.SQLStore = (*Provider)(nil)
|
||||
|
||||
type Provider struct {
|
||||
db *sql.DB
|
||||
mock sqlmock.Sqlmock
|
||||
bunDB *bun.DB
|
||||
dialect *dialect
|
||||
db *sql.DB
|
||||
mock sqlmock.Sqlmock
|
||||
bunDB *bun.DB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func New(config sqlstore.Config, matcher sqlmock.QueryMatcher) *Provider {
|
||||
@@ -38,10 +39,11 @@ func New(config sqlstore.Config, matcher sqlmock.QueryMatcher) *Provider {
|
||||
}
|
||||
|
||||
return &Provider{
|
||||
db: db,
|
||||
mock: mock,
|
||||
bunDB: bunDB,
|
||||
dialect: new(dialect),
|
||||
db: db,
|
||||
mock: mock,
|
||||
bunDB: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,6 +63,8 @@ func (provider *Provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *Provider) Formatter() sqlstore.SQLFormatter { return provider.formatter }
|
||||
|
||||
func (provider *Provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bunDB
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"slices"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
@@ -452,3 +454,18 @@ func (g *GettableRule) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(aux)
|
||||
}
|
||||
}
|
||||
|
||||
type RuleAttributeKeyType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
RuleAttributeTypeFixed = RuleAttributeKeyType{valuer.NewString("fixed")}
|
||||
RuleAttributeTypeLabel = RuleAttributeKeyType{valuer.NewString("label")}
|
||||
)
|
||||
|
||||
type GetRuleAttributeKeys struct {
|
||||
Key string `json:"key"`
|
||||
DataType telemetrytypes.FieldDataType `json:"dataType"`
|
||||
Type RuleAttributeKeyType `json:"type"`
|
||||
}
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
package ruletypes
|
||||
|
||||
const CriticalThresholdName = "CRITICAL"
|
||||
const LabelThresholdName = "threshold.name"
|
||||
const LabelRuleId = "ruleId"
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
const (
|
||||
CriticalThresholdName = "CRITICAL"
|
||||
LabelThresholdName = "threshold.name"
|
||||
LabelRuleId = "ruleId"
|
||||
|
||||
// Rule attribute key constants for search and filtering
|
||||
RuleAttributeKeyCreatedBy = "created_by"
|
||||
RuleAttributeKeyUpdatedBy = "updated_by"
|
||||
RuleAttributeKeyName = "name"
|
||||
RuleAttributeKeyThresholdName = "threshold.name"
|
||||
RuleAttributeKeyPolicy = "policy"
|
||||
RuleAttributeKeyChannel = "channel"
|
||||
RuleAttributeKeyState = "state"
|
||||
//RuleAttributeKeyRuleType = "type"
|
||||
)
|
||||
|
||||
var (
|
||||
FixedRuleAttributeKeys = []GetRuleAttributeKeys{
|
||||
{Key: RuleAttributeKeyCreatedBy, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
|
||||
{Key: RuleAttributeKeyUpdatedBy, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
|
||||
{Key: RuleAttributeKeyName, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
|
||||
{Key: RuleAttributeKeyThresholdName, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
|
||||
{Key: RuleAttributeKeyChannel, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
|
||||
{Key: RuleAttributeKeyPolicy, DataType: telemetrytypes.FieldDataTypeBool, Type: RuleAttributeTypeFixed},
|
||||
{Key: RuleAttributeKeyState, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -53,4 +53,11 @@ type RuleStore interface {
|
||||
DeleteRule(context.Context, valuer.UUID, func(context.Context) error) error
|
||||
GetStoredRules(context.Context, string) ([]*Rule, error)
|
||||
GetStoredRule(context.Context, valuer.UUID) (*Rule, error)
|
||||
GetRuleLabelKeys(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
|
||||
GetThresholdNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
|
||||
GetChannel(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
|
||||
GetNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
|
||||
GetCreatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
|
||||
GetUpdatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
|
||||
GetRuleLabelValues(ctx context.Context, searchText string, limit int, labelKey string, orgId string) ([]string, error)
|
||||
}
|
||||
|
||||
@@ -103,7 +103,7 @@ func (r BasicRuleThresholds) ShouldAlert(series v3.Series, unit string) (Vector,
|
||||
for _, threshold := range thresholds {
|
||||
smpl, shouldAlert := threshold.shouldAlert(series, unit)
|
||||
if shouldAlert {
|
||||
smpl.Target = *threshold.TargetValue
|
||||
smpl.Target = threshold.target(unit)
|
||||
smpl.TargetUnit = threshold.TargetUnit
|
||||
resultVector = append(resultVector, smpl)
|
||||
}
|
||||
|
||||
@@ -1,294 +0,0 @@
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func TestBasicRuleThresholdShouldAlert_UnitConversion(t *testing.T) {
|
||||
target := 100.0
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
threshold BasicRuleThreshold
|
||||
series v3.Series
|
||||
ruleUnit string
|
||||
shouldAlert bool
|
||||
}{
|
||||
{
|
||||
name: "milliseconds to seconds conversion - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100ms
|
||||
TargetUnit: "ms",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 150ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
},
|
||||
{
|
||||
name: "milliseconds to seconds conversion - should not alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100ms
|
||||
TargetUnit: "ms",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: false,
|
||||
},
|
||||
{
|
||||
name: "seconds to milliseconds conversion - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100s
|
||||
TargetUnit: "s",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150000, Timestamp: 1000}, // 150000ms = 150s
|
||||
},
|
||||
},
|
||||
ruleUnit: "ms",
|
||||
shouldAlert: true,
|
||||
},
|
||||
// Binary byte conversions
|
||||
{
|
||||
name: "bytes to kibibytes conversion - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100 bytes
|
||||
TargetUnit: "bytes",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 0.15KiB ≈ 153.6 bytes
|
||||
},
|
||||
},
|
||||
ruleUnit: "kbytes",
|
||||
shouldAlert: true,
|
||||
},
|
||||
{
|
||||
name: "kibibytes to mebibytes conversion - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100KiB
|
||||
TargetUnit: "kbytes",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000},
|
||||
},
|
||||
},
|
||||
ruleUnit: "mbytes",
|
||||
shouldAlert: true,
|
||||
},
|
||||
// ValueIsBelow with unit conversion
|
||||
{
|
||||
name: "milliseconds to seconds with ValueIsBelow - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100ms
|
||||
TargetUnit: "ms",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsBelow,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
},
|
||||
{
|
||||
name: "milliseconds to seconds with OnAverage - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100ms
|
||||
TargetUnit: "ms",
|
||||
MatchType: OnAverage,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.08, Timestamp: 1000}, // 80ms
|
||||
{Value: 0.12, Timestamp: 2000}, // 120ms
|
||||
{Value: 0.15, Timestamp: 3000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
},
|
||||
{
|
||||
name: "decimal megabytes to gigabytes with InTotal - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100MB
|
||||
TargetUnit: "decmbytes",
|
||||
MatchType: InTotal,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.04, Timestamp: 1000}, // 40MB
|
||||
{Value: 0.05, Timestamp: 2000}, // 50MB
|
||||
{Value: 0.03, Timestamp: 3000}, // 30MB
|
||||
},
|
||||
},
|
||||
ruleUnit: "decgbytes",
|
||||
shouldAlert: true,
|
||||
},
|
||||
{
|
||||
name: "milliseconds to seconds with AllTheTimes - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100ms
|
||||
TargetUnit: "ms",
|
||||
MatchType: AllTheTimes,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.11, Timestamp: 1000}, // 110ms
|
||||
{Value: 0.12, Timestamp: 2000}, // 120ms
|
||||
{Value: 0.15, Timestamp: 3000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
},
|
||||
{
|
||||
name: "kilobytes to megabytes with Last - should not alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100kB
|
||||
TargetUnit: "deckbytes",
|
||||
MatchType: Last,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 150kB
|
||||
{Value: 0.05, Timestamp: 2000}, // 50kB (last value)
|
||||
},
|
||||
},
|
||||
ruleUnit: "decmbytes",
|
||||
shouldAlert: false,
|
||||
},
|
||||
// Mixed units - bytes/second rate conversions
|
||||
{
|
||||
name: "bytes per second to kilobytes per second - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100 bytes/s
|
||||
TargetUnit: "Bps",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000},
|
||||
},
|
||||
},
|
||||
ruleUnit: "KBs",
|
||||
shouldAlert: true,
|
||||
},
|
||||
// Same unit (no conversion needed)
|
||||
{
|
||||
name: "same unit - no conversion needed - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100ms
|
||||
TargetUnit: "ms",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150, Timestamp: 1000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "ms",
|
||||
shouldAlert: true,
|
||||
},
|
||||
// Empty unit (unitless) - no conversion
|
||||
{
|
||||
name: "empty unit - no conversion - should alert",
|
||||
threshold: BasicRuleThreshold{
|
||||
Name: "test",
|
||||
TargetValue: &target, // 100 (unitless)
|
||||
TargetUnit: "",
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150, Timestamp: 1000}, // 150 (unitless)
|
||||
},
|
||||
},
|
||||
ruleUnit: "",
|
||||
shouldAlert: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
thresholds := BasicRuleThresholds{tt.threshold}
|
||||
vector, err := thresholds.ShouldAlert(tt.series, tt.ruleUnit)
|
||||
assert.NoError(t, err)
|
||||
|
||||
alert := len(vector) > 0
|
||||
assert.Equal(t, tt.shouldAlert, alert)
|
||||
|
||||
if tt.shouldAlert && alert {
|
||||
sample := vector[0]
|
||||
|
||||
hasThresholdLabel := false
|
||||
for _, label := range sample.Metric {
|
||||
if label.Name == LabelThresholdName && label.Value == "test" {
|
||||
hasThresholdLabel = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, hasThresholdLabel)
|
||||
assert.Equal(t, *tt.threshold.TargetValue, sample.Target)
|
||||
assert.Equal(t, tt.threshold.TargetUnit, sample.TargetUnit)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -74,63 +74,6 @@ def zeus(
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="gateway", scope="package")
|
||||
def gateway(
|
||||
network: Network,
|
||||
request: pytest.FixtureRequest,
|
||||
pytestconfig: pytest.Config,
|
||||
) -> types.TestContainerDocker:
|
||||
"""
|
||||
Package-scoped fixture for running gateway
|
||||
"""
|
||||
|
||||
def create() -> types.TestContainerDocker:
|
||||
container = WireMockContainer(image="wiremock/wiremock:2.35.1-1", secure=False)
|
||||
container.with_exposed_ports(8080)
|
||||
container.with_network(network)
|
||||
container.start()
|
||||
|
||||
return types.TestContainerDocker(
|
||||
id=container.get_wrapped_container().id,
|
||||
host_configs={
|
||||
"8080": types.TestContainerUrlConfig(
|
||||
"http",
|
||||
container.get_container_host_ip(),
|
||||
container.get_exposed_port(8080),
|
||||
)
|
||||
},
|
||||
container_configs={
|
||||
"8080": types.TestContainerUrlConfig(
|
||||
"http", container.get_wrapped_container().name, 8080
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
def delete(container: types.TestContainerDocker):
|
||||
client = docker.from_env()
|
||||
try:
|
||||
client.containers.get(container_id=container.id).stop()
|
||||
client.containers.get(container_id=container.id).remove(v=True)
|
||||
except docker.errors.NotFound:
|
||||
logger.info(
|
||||
"Skipping removal of Gateway, Gateway(%s) not found. Maybe it was manually removed?",
|
||||
{"id": container.id},
|
||||
)
|
||||
|
||||
def restore(cache: dict) -> types.TestContainerDocker:
|
||||
return types.TestContainerDocker.from_cache(cache)
|
||||
|
||||
return dev.wrap(
|
||||
request,
|
||||
pytestconfig,
|
||||
"gateway",
|
||||
lambda: types.TestContainerDocker(id="", host_configs={}, container_configs={}),
|
||||
create,
|
||||
delete,
|
||||
restore,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="make_http_mocks", scope="function")
|
||||
def make_http_mocks() -> Callable[[types.TestContainerDocker, List[Mapping]], None]:
|
||||
def _make_http_mocks(
|
||||
|
||||
@@ -20,7 +20,6 @@ logger = setup_logger(__name__)
|
||||
def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
network: Network,
|
||||
zeus: types.TestContainerDocker,
|
||||
gateway: types.TestContainerDocker,
|
||||
sqlstore: types.TestContainerSQL,
|
||||
clickhouse: types.TestContainerClickhouse,
|
||||
request: pytest.FixtureRequest,
|
||||
@@ -57,7 +56,6 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
"SIGNOZ_WEB_DIRECTORY": "/root/web",
|
||||
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
|
||||
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
|
||||
"SIGNOZ_GATEWAY_URL": gateway.container_configs["8080"].base(),
|
||||
}
|
||||
| sqlstore.env
|
||||
| clickhouse.env
|
||||
@@ -123,7 +121,6 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
sqlstore=sqlstore,
|
||||
telemetrystore=clickhouse,
|
||||
zeus=zeus,
|
||||
gateway=gateway,
|
||||
)
|
||||
|
||||
def delete(container: types.SigNoz) -> None:
|
||||
@@ -144,7 +141,6 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
sqlstore=sqlstore,
|
||||
telemetrystore=clickhouse,
|
||||
zeus=zeus,
|
||||
gateway=gateway,
|
||||
)
|
||||
|
||||
return dev.wrap(
|
||||
@@ -160,7 +156,6 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
sqlstore=sqlstore,
|
||||
telemetrystore=clickhouse,
|
||||
zeus=zeus,
|
||||
gateway=gateway,
|
||||
),
|
||||
create=create,
|
||||
delete=delete,
|
||||
|
||||
@@ -127,13 +127,12 @@ class SigNoz:
|
||||
sqlstore: TestContainerSQL
|
||||
telemetrystore: TestContainerClickhouse
|
||||
zeus: TestContainerDocker
|
||||
gateway: TestContainerDocker
|
||||
|
||||
def __cache__(self) -> dict:
|
||||
return self.self.__cache__()
|
||||
|
||||
def __log__(self) -> str:
|
||||
return f"SigNoz(self={self.self.__log__()}, sqlstore={self.sqlstore.__log__()}, telemetrystore={self.telemetrystore.__log__()}, zeus={self.zeus.__log__()}, gateway={self.gateway.__log__()})"
|
||||
return f"SigNoz(self={self.self.__log__()}, sqlstore={self.sqlstore.__log__()}, telemetrystore={self.telemetrystore.__log__()}, zeus={self.zeus.__log__()})"
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,161 +0,0 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Callable
|
||||
|
||||
import requests
|
||||
from wiremock.client import (
|
||||
HttpMethods,
|
||||
Mapping,
|
||||
MappingRequest,
|
||||
MappingResponse,
|
||||
WireMockMatchers,
|
||||
)
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import add_license
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
|
||||
def test_generate_connection_params(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
"""Test to generate connection parameters for AWS SigNoz cloud integration."""
|
||||
# Get authentication token for admin user
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
|
||||
add_license(signoz, make_http_mocks, get_token)
|
||||
|
||||
cloud_provider = "aws"
|
||||
|
||||
# Mock the deployment info query and ingestion key operations
|
||||
make_http_mocks(
|
||||
signoz.zeus,
|
||||
[
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.GET,
|
||||
url="/v2/deployments/me",
|
||||
headers={
|
||||
"X-Signoz-Cloud-Api-Key": {
|
||||
WireMockMatchers.EQUAL_TO: "secret-key"
|
||||
}
|
||||
},
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=200,
|
||||
json_body={
|
||||
"status": "success",
|
||||
"data": {
|
||||
"name": "test-deployment",
|
||||
"cluster": {"region": {"dns": "test.signoz.cloud"}},
|
||||
},
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
make_http_mocks(
|
||||
signoz.gateway,
|
||||
[
|
||||
# Mock the ingestion keys search endpoint
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.GET,
|
||||
url="/v1/workspaces/me/keys/search?name=aws-integration",
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=200,
|
||||
json_body={"status": "success", "data": []},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
# Mock the ingestion key creation endpoint
|
||||
Mapping(
|
||||
request=MappingRequest(
|
||||
method=HttpMethods.POST,
|
||||
url="/v1/workspaces/me/keys",
|
||||
json_body={
|
||||
"name": "aws-integration",
|
||||
"tags": ["integration", "aws"],
|
||||
},
|
||||
headers={
|
||||
"X-Signoz-Cloud-Api-Key": {
|
||||
WireMockMatchers.EQUAL_TO: "secret-key"
|
||||
},
|
||||
"X-Consumer-Username": {
|
||||
WireMockMatchers.EQUAL_TO: "lid:00000000-0000-0000-0000-000000000000"
|
||||
},
|
||||
"X-Consumer-Groups": {WireMockMatchers.EQUAL_TO: "ns:default"},
|
||||
},
|
||||
),
|
||||
response=MappingResponse(
|
||||
status=200,
|
||||
json_body={
|
||||
"status": "success",
|
||||
"data": {
|
||||
"name": "aws-integration",
|
||||
"value": "test-ingestion-key-123456",
|
||||
},
|
||||
"error": "",
|
||||
},
|
||||
),
|
||||
persistent=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-params"
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(endpoint),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
# Assert successful response
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Expected 200, got {response.status_code}: {response.text}"
|
||||
|
||||
# Parse response JSON
|
||||
response_data = response.json()
|
||||
|
||||
# Assert response structure contains expected data
|
||||
assert "data" in response_data, "Response should contain 'data' field"
|
||||
|
||||
# Assert required fields in the response data
|
||||
expected_fields = [
|
||||
"ingestion_url",
|
||||
"ingestion_key",
|
||||
"signoz_api_url",
|
||||
"signoz_api_key",
|
||||
]
|
||||
|
||||
for field in expected_fields:
|
||||
assert (
|
||||
field in response_data["data"]
|
||||
), f"Response data should contain '{field}' field"
|
||||
|
||||
# Assert values for the returned fields
|
||||
data = response_data["data"]
|
||||
|
||||
# ingestion_key is created by the mocked gateway and should match
|
||||
assert data["ingestion_key"] == "test-ingestion-key-123456", (
|
||||
"ingestion_key should match the mocked ingestion key"
|
||||
)
|
||||
|
||||
# ingestion_url should be https://ingest.test.signoz.cloud based on the mocked deployment DNS
|
||||
assert data["ingestion_url"] == "https://ingest.test.signoz.cloud", (
|
||||
"ingestion_url should be https://ingest.test.signoz.cloud"
|
||||
)
|
||||
|
||||
# signoz_api_url should be https://test-deployment.test.signoz.cloud based on the mocked deployment name and DNS
|
||||
assert data["signoz_api_url"] == "https://test-deployment.test.signoz.cloud", (
|
||||
"signoz_api_url should be https://test-deployment.test.signoz.cloud"
|
||||
)
|
||||
Reference in New Issue
Block a user