Compare commits
13 Commits
SIG-5270
...
fix/pass_t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5165d1665 | ||
|
|
bf92c92204 | ||
|
|
bd63633be7 | ||
|
|
1158e1199b | ||
|
|
0a60c49314 | ||
|
|
c25e3beb81 | ||
|
|
c9e0f2b9ca | ||
|
|
6d831849c1 | ||
|
|
83eeb46f99 | ||
|
|
287558dc9d | ||
|
|
83aad793c2 | ||
|
|
3eff689c85 | ||
|
|
f5bcd65e2e |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.99.0
|
||||
image: signoz/signoz:v0.100.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,7 +209,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.8
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +233,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.99.0
|
||||
image: signoz/signoz:v0.100.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.7
|
||||
image: signoz/signoz-otel-collector:v0.129.8
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.7
|
||||
image: signoz/signoz-schema-migrator:v0.129.8
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.99.0}
|
||||
image: signoz/signoz:${VERSION:-v0.100.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +213,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +239,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +250,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.99.0}
|
||||
image: signoz/signoz:${VERSION:-v0.100.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.8}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
153
ee/sqlstore/postgressqlstore/formatter.go
Normal file
153
ee/sqlstore/postgressqlstore/formatter.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgres(path)...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_typeof("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements_text("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), "::text"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_agg("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_build_array("...)
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, "::jsonb"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
|
||||
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
|
||||
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
|
||||
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := strings.Split(path, ".")
|
||||
|
||||
var validParts []string
|
||||
for _, part := range parts {
|
||||
if part != "" {
|
||||
validParts = append(validParts, part)
|
||||
}
|
||||
}
|
||||
|
||||
if len(validParts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []byte
|
||||
|
||||
for idx, part := range validParts {
|
||||
if idx == len(validParts)-1 {
|
||||
if asText {
|
||||
result = append(result, "->>"...)
|
||||
} else {
|
||||
result = append(result, "->"...)
|
||||
}
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
return result
|
||||
}
|
||||
|
||||
result = append(result, "->"...)
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
500
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
500
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
@@ -0,0 +1,500 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `"data"->>'field'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `"metadata"->'user'->>'name'`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.level1.level2.level3",
|
||||
expected: `"json_col"->'level1'->'level2'->>'level3'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `"json_col"`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `"data"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `jsonb_typeof("data"->'field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col")`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data")`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `jsonb_typeof("data"->'items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col") = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data") = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `jsonb_each("metadata"->'object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "jsonb_agg(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "jsonb_agg(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "data->>'field'",
|
||||
expected: "jsonb_agg(data->>'field')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "jsonb_build_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "jsonb_build_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "jsonb_build_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonPath string
|
||||
asText bool
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path as text",
|
||||
jsonPath: "$.field",
|
||||
asText: true,
|
||||
expected: "->>'field'",
|
||||
},
|
||||
{
|
||||
name: "simple path as json",
|
||||
jsonPath: "$.field",
|
||||
asText: false,
|
||||
expected: "->'field'",
|
||||
},
|
||||
{
|
||||
name: "nested path as text",
|
||||
jsonPath: "$.user.name",
|
||||
asText: true,
|
||||
expected: "->'user'->>'name'",
|
||||
},
|
||||
{
|
||||
name: "nested path as json",
|
||||
jsonPath: "$.user.name",
|
||||
asText: false,
|
||||
expected: "->'user'->'name'",
|
||||
},
|
||||
{
|
||||
name: "deeply nested as text",
|
||||
jsonPath: "$.a.b.c.d",
|
||||
asText: true,
|
||||
expected: "->'a'->'b'->'c'->>'d'",
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
jsonPath: "$",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
jsonPath: "",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New()).(*formatter)
|
||||
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"::jsonb`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
expr: "name",
|
||||
expected: "lower(name)",
|
||||
},
|
||||
{
|
||||
name: "quoted column identifier",
|
||||
expr: `"column_name"`,
|
||||
expected: `lower("column_name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb text extraction",
|
||||
expr: "data->>'field'",
|
||||
expected: "lower(data->>'field')",
|
||||
},
|
||||
{
|
||||
name: "nested jsonb extraction",
|
||||
expr: "metadata->'user'->>'name'",
|
||||
expected: "lower(metadata->'user'->>'name')",
|
||||
},
|
||||
{
|
||||
name: "jsonb_typeof expression",
|
||||
expr: "jsonb_typeof(data->'field')",
|
||||
expected: "lower(jsonb_typeof(data->'field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
{
|
||||
name: "subquery column",
|
||||
expr: "users.email",
|
||||
expected: "lower(users.email)",
|
||||
},
|
||||
{
|
||||
name: "quoted identifier with special chars",
|
||||
expr: `"user-name"`,
|
||||
expected: `lower("user-name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb to text cast",
|
||||
expr: "data::text",
|
||||
expected: "lower(data::text)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -15,10 +15,11 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -55,11 +56,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
|
||||
sqldb := stdlib.OpenDBFromPool(pool)
|
||||
|
||||
pgDialect := pgdialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
dialect: new(dialect),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -75,6 +79,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
export const getQueryRangeV5 = async (
|
||||
props: QueryRangePayloadV5,
|
||||
version: string,
|
||||
signal: AbortSignal,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
|
||||
try {
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Switch, Table, Tooltip, Typography } from 'antd';
|
||||
import { getQueryRangeV5 } from 'api/v5/queryRange/getQueryRange';
|
||||
import { MetricRangePayloadV5, ScalarData } from 'api/v5/v5';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { withErrorBoundary } from 'components/ErrorBoundaryHOC';
|
||||
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
@@ -11,13 +13,12 @@ import {
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { Info } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { QueryFunctionContext, useQueries, useQuery } from 'react-query';
|
||||
import { SuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -46,7 +47,7 @@ function TopErrors({
|
||||
true,
|
||||
);
|
||||
|
||||
const queryPayloads = useMemo(
|
||||
const queryPayload = useMemo(
|
||||
() =>
|
||||
getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
@@ -82,37 +83,34 @@ function TopErrors({
|
||||
],
|
||||
);
|
||||
|
||||
const topErrorsDataQueries = useQueries(
|
||||
queryPayloads.map((payload) => ({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
payload,
|
||||
DEFAULT_ENTITY_VERSION,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
|
||||
GetMetricQueryRange(payload, DEFAULT_ENTITY_VERSION),
|
||||
enabled: !!payload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
})),
|
||||
);
|
||||
|
||||
const topErrorsDataQuery = topErrorsDataQueries[0];
|
||||
const {
|
||||
data: topErrorsData,
|
||||
isLoading,
|
||||
isRefetching,
|
||||
isError,
|
||||
refetch,
|
||||
} = topErrorsDataQuery;
|
||||
} = useQuery({
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
queryPayload,
|
||||
ENTITY_VERSION_V5,
|
||||
showStatusCodeErrors,
|
||||
],
|
||||
queryFn: ({
|
||||
signal,
|
||||
}: QueryFunctionContext): Promise<SuccessResponseV2<MetricRangePayloadV5>> =>
|
||||
getQueryRangeV5(queryPayload, ENTITY_VERSION_V5, signal),
|
||||
enabled: !!queryPayload,
|
||||
staleTime: 0,
|
||||
cacheTime: 0,
|
||||
});
|
||||
|
||||
const topErrorsColumnsConfig = useMemo(() => getTopErrorsColumnsConfig(), []);
|
||||
|
||||
const formattedTopErrorsData = useMemo(
|
||||
() =>
|
||||
formatTopErrorsDataForTable(
|
||||
topErrorsData?.payload?.data?.result as TopErrorsResponseRow[],
|
||||
topErrorsData?.data?.data?.data?.results[0] as ScalarData,
|
||||
),
|
||||
[topErrorsData],
|
||||
);
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
endPointStatusCodeColumns,
|
||||
extractPortAndEndpoint,
|
||||
formatDataForTable,
|
||||
formatTopErrorsDataForTable,
|
||||
getAllEndpointsWidgetData,
|
||||
getCustomFiltersForBarChart,
|
||||
getEndPointDetailsQueryPayload,
|
||||
@@ -23,8 +22,6 @@ import {
|
||||
getStatusCodeBarChartWidgetData,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
TopErrorsResponseRow,
|
||||
} from '../utils';
|
||||
import { APIMonitoringColumnsMock } from './mock';
|
||||
|
||||
@@ -344,49 +341,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatTopErrorsDataForTable', () => {
|
||||
it('should format top errors data correctly', () => {
|
||||
// Arrange
|
||||
const inputData = [
|
||||
{
|
||||
metric: {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: '/api/test',
|
||||
[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]: '500',
|
||||
status_message: 'Internal Server Error',
|
||||
},
|
||||
values: [[1000000100, '10']],
|
||||
queryName: 'A',
|
||||
legend: 'Test Legend',
|
||||
},
|
||||
];
|
||||
|
||||
// Act
|
||||
const result = formatTopErrorsDataForTable(
|
||||
inputData as TopErrorsResponseRow[],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBe(1);
|
||||
|
||||
// Check first item is formatted correctly
|
||||
expect(result[0].endpointName).toBe('/api/test');
|
||||
expect(result[0].statusCode).toBe('500');
|
||||
expect(result[0].statusMessage).toBe('Internal Server Error');
|
||||
expect(result[0].count).toBe('10');
|
||||
expect(result[0].key).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle empty input', () => {
|
||||
// Act
|
||||
const result = formatTopErrorsDataForTable(undefined);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTopErrorsColumnsConfig', () => {
|
||||
it('should return column configuration with expected fields', () => {
|
||||
// Act
|
||||
@@ -453,72 +407,6 @@ describe('API Monitoring Utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTopErrorsQueryPayload', () => {
|
||||
it('should create correct query payload with filters', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const start = 1000000000;
|
||||
const end = 1000010000;
|
||||
const filters = {
|
||||
items: [
|
||||
{
|
||||
id: 'test-filter',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: 'test-key',
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-value',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = getTopErrorsQueryPayload(
|
||||
domainName,
|
||||
start,
|
||||
end,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeDefined();
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify query params
|
||||
expect(result[0].start).toBe(start);
|
||||
expect(result[0].end).toBe(end);
|
||||
|
||||
// Verify correct structure
|
||||
expect(result[0].graphType).toBeDefined();
|
||||
expect(result[0].query).toBeDefined();
|
||||
expect(result[0].query.builder).toBeDefined();
|
||||
expect(result[0].query.builder.queryData).toBeDefined();
|
||||
|
||||
// Verify domain filter is included
|
||||
const queryData = result[0].query.builder.queryData[0];
|
||||
expect(queryData.filters).toBeDefined();
|
||||
|
||||
// Check for domain filter
|
||||
const domainFilter = queryData.filters?.items?.find(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
(item) =>
|
||||
item.key &&
|
||||
item.key.key === SPAN_ATTRIBUTES.SERVER_NAME &&
|
||||
item.value === domainName,
|
||||
);
|
||||
expect(domainFilter).toBeDefined();
|
||||
|
||||
// Check that custom filters were included
|
||||
const testFilter = queryData.filters?.items?.find(
|
||||
(item) => item.id === 'test-filter',
|
||||
);
|
||||
expect(testFilter).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
// Add new tests for EndPointDetails utility functions
|
||||
describe('extractPortAndEndpoint', () => {
|
||||
it('should extract port and endpoint from a valid URL', () => {
|
||||
|
||||
@@ -1,14 +1,6 @@
|
||||
import { fireEvent, render, screen, within } from '@testing-library/react';
|
||||
import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
formatTopErrorsDataForTable,
|
||||
getEndPointDetailsQueryPayload,
|
||||
getTopErrorsColumnsConfig,
|
||||
getTopErrorsCoRelationQueryFilters,
|
||||
getTopErrorsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import { useQueries } from 'react-query';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
|
||||
@@ -35,28 +27,15 @@ jest.mock(
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('react-query', () => ({
|
||||
...jest.requireActual('react-query'),
|
||||
useQueries: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('components/CeleryTask/useNavigateToExplorer', () => ({
|
||||
useNavigateToExplorer: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('container/ApiMonitoring/utils', () => ({
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY: ['key1', 'key2', 'key3', 'key4', 'key5'],
|
||||
formatTopErrorsDataForTable: jest.fn(),
|
||||
getEndPointDetailsQueryPayload: jest.fn(),
|
||||
getTopErrorsColumnsConfig: jest.fn(),
|
||||
getTopErrorsCoRelationQueryFilters: jest.fn(),
|
||||
getTopErrorsQueryPayload: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('TopErrors', () => {
|
||||
const TABLE_BODY_SELECTOR = '.ant-table-tbody';
|
||||
const V5_QUERY_RANGE_API_PATH = '*/api/v5/query_range';
|
||||
|
||||
const mockProps = {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
domainName: 'test-domain',
|
||||
timeRange: {
|
||||
startTime: 1000000000,
|
||||
@@ -68,75 +47,72 @@ describe('TopErrors', () => {
|
||||
},
|
||||
};
|
||||
|
||||
// Setup basic mocks
|
||||
// Helper function to wait for table data to load
|
||||
const waitForTableDataToLoad = async (
|
||||
container: HTMLElement,
|
||||
): Promise<void> => {
|
||||
await waitFor(() => {
|
||||
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
|
||||
expect(tableBody).not.toBeNull();
|
||||
if (tableBody) {
|
||||
expect(
|
||||
within(tableBody as HTMLElement).queryByText('/api/test'),
|
||||
).toBeInTheDocument();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Mock getTopErrorsColumnsConfig
|
||||
(getTopErrorsColumnsConfig as jest.Mock).mockReturnValue([
|
||||
{
|
||||
title: 'Endpoint',
|
||||
dataIndex: 'endpointName',
|
||||
key: 'endpointName',
|
||||
},
|
||||
{
|
||||
title: 'Status Code',
|
||||
dataIndex: 'statusCode',
|
||||
key: 'statusCode',
|
||||
},
|
||||
{
|
||||
title: 'Status Message',
|
||||
dataIndex: 'statusMessage',
|
||||
key: 'statusMessage',
|
||||
},
|
||||
{
|
||||
title: 'Count',
|
||||
dataIndex: 'count',
|
||||
key: 'count',
|
||||
},
|
||||
]);
|
||||
// Mock useNavigateToExplorer
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
|
||||
|
||||
// Mock useQueries
|
||||
(useQueries as jest.Mock).mockImplementation((queryConfigs) => {
|
||||
// For topErrorsDataQueries
|
||||
if (
|
||||
queryConfigs.length === 1 &&
|
||||
queryConfigs[0].queryKey &&
|
||||
queryConfigs[0].queryKey[0] === REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN
|
||||
) {
|
||||
return [
|
||||
{
|
||||
// Mock V5 API endpoint for top errors
|
||||
server.use(
|
||||
rest.post(V5_QUERY_RANGE_API_PATH, (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: {
|
||||
'http.url': '/api/test',
|
||||
status_code: '500',
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
status_message: 'Internal Server Error',
|
||||
data: {
|
||||
results: [
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
values: [[1000000100, '10']],
|
||||
queryName: 'A',
|
||||
legend: 'Test Legend',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'response_status_code',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
{
|
||||
name: 'status_message',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
{ name: 'count()', fieldDataType: 'int64', fieldContext: '' },
|
||||
],
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
data: [['/api/test', '500', 'Internal Server Error', 10]],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
];
|
||||
}
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
// For endPointDropDownDataQueries
|
||||
return [
|
||||
{
|
||||
data: {
|
||||
// Mock V4 API endpoint for dropdown data
|
||||
server.use(
|
||||
rest.post('*/api/v1/query_range', (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
@@ -153,62 +129,13 @@ describe('TopErrors', () => {
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
isError: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
// Mock formatTopErrorsDataForTable
|
||||
(formatTopErrorsDataForTable as jest.Mock).mockReturnValue([
|
||||
{
|
||||
key: '1',
|
||||
endpointName: '/api/test',
|
||||
statusCode: '500',
|
||||
statusMessage: 'Internal Server Error',
|
||||
count: 10,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getTopErrorsQueryPayload
|
||||
(getTopErrorsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{
|
||||
queryName: 'TopErrorsQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock getEndPointDetailsQueryPayload
|
||||
(getEndPointDetailsQueryPayload as jest.Mock).mockReturnValue([
|
||||
{},
|
||||
{},
|
||||
{
|
||||
queryName: 'EndpointDropdownQuery',
|
||||
start: mockProps.timeRange.startTime,
|
||||
end: mockProps.timeRange.endTime,
|
||||
step: 60,
|
||||
},
|
||||
]);
|
||||
|
||||
// Mock useNavigateToExplorer
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(jest.fn());
|
||||
|
||||
// Mock getTopErrorsCoRelationQueryFilters
|
||||
(getTopErrorsCoRelationQueryFilters as jest.Mock).mockReturnValue({
|
||||
items: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
op: 'AND',
|
||||
});
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('renders component correctly', () => {
|
||||
it('renders component correctly', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
@@ -216,10 +143,11 @@ describe('TopErrors', () => {
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
expect(screen.getByText('Status Message Exists')).toBeInTheDocument();
|
||||
|
||||
// Find the table row and verify content
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
expect(tableBody).not.toBeNull();
|
||||
// Wait for data to load
|
||||
await waitForTableDataToLoad(container);
|
||||
|
||||
// Find the table row and verify content
|
||||
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
|
||||
if (tableBody) {
|
||||
const row = within(tableBody as HTMLElement).getByRole('row');
|
||||
expect(within(row).getByText('/api/test')).toBeInTheDocument();
|
||||
@@ -228,35 +156,40 @@ describe('TopErrors', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('renders error state when isError is true', () => {
|
||||
// Mock useQueries to return isError: true
|
||||
(useQueries as jest.Mock).mockImplementationOnce(() => [
|
||||
{
|
||||
isError: true,
|
||||
refetch: jest.fn(),
|
||||
},
|
||||
]);
|
||||
it('renders error state when API fails', async () => {
|
||||
// Mock API to return error
|
||||
server.use(
|
||||
rest.post(V5_QUERY_RANGE_API_PATH, (_req, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json({ error: 'Internal Server Error' })),
|
||||
),
|
||||
);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Error state should be shown with the actual text displayed in the UI
|
||||
expect(
|
||||
screen.getByText('Uh-oh :/ We ran into an error.'),
|
||||
).toBeInTheDocument();
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText('Uh-oh :/ We ran into an error.'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByText('Please refresh this panel.')).toBeInTheDocument();
|
||||
expect(screen.getByText('Refresh this panel')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles row click correctly', () => {
|
||||
it('handles row click correctly', async () => {
|
||||
const navigateMock = jest.fn();
|
||||
(useNavigateToExplorer as jest.Mock).mockReturnValue(navigateMock);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
const { container } = render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for data to load
|
||||
await waitForTableDataToLoad(container);
|
||||
|
||||
// Find and click on the table cell containing the endpoint
|
||||
const tableBody = container.querySelector('.ant-table-tbody');
|
||||
const tableBody = container.querySelector(TABLE_BODY_SELECTOR);
|
||||
expect(tableBody).not.toBeNull();
|
||||
|
||||
if (tableBody) {
|
||||
@@ -267,11 +200,28 @@ describe('TopErrors', () => {
|
||||
|
||||
// Check if navigateToExplorer was called with correct params
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: [
|
||||
{ id: 'test1', key: { key: 'domain' }, op: '=', value: 'test-domain' },
|
||||
{ id: 'test2', key: { key: 'endpoint' }, op: '=', value: '/api/test' },
|
||||
{ id: 'test3', key: { key: 'status' }, op: '=', value: '500' },
|
||||
],
|
||||
filters: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'http.url' }),
|
||||
op: '=',
|
||||
value: '/api/test',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'has_error' }),
|
||||
op: '=',
|
||||
value: 'true',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'net.peer.name' }),
|
||||
op: '=',
|
||||
value: 'test-domain',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'response_status_code' }),
|
||||
op: '=',
|
||||
value: '500',
|
||||
}),
|
||||
]),
|
||||
dataSource: DataSource.TRACES,
|
||||
startTime: mockProps.timeRange.startTime,
|
||||
endTime: mockProps.timeRange.endTime,
|
||||
@@ -279,24 +229,34 @@ describe('TopErrors', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('updates endpoint filter when dropdown value changes', () => {
|
||||
it('updates endpoint filter when dropdown value changes', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for initial load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('combobox')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the dropdown
|
||||
const dropdown = screen.getByRole('combobox');
|
||||
|
||||
// Mock the change
|
||||
fireEvent.change(dropdown, { target: { value: '/api/new-endpoint' } });
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with updated parameters
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalled();
|
||||
// Component should re-render with new filter
|
||||
expect(dropdown).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles status message toggle correctly', () => {
|
||||
it('handles status message toggle correctly', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for initial load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('switch')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the toggle switch
|
||||
const toggle = screen.getByRole('switch');
|
||||
expect(toggle).toBeInTheDocument();
|
||||
@@ -307,69 +267,104 @@ describe('TopErrors', () => {
|
||||
// Click the toggle to turn it off
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=false
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
false,
|
||||
);
|
||||
|
||||
// Title should change
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click the toggle to turn it back on
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Check if getTopErrorsQueryPayload was called with showStatusCodeErrors=true
|
||||
expect(getTopErrorsQueryPayload).toHaveBeenCalledWith(
|
||||
mockProps.domainName,
|
||||
mockProps.timeRange.startTime,
|
||||
mockProps.timeRange.endTime,
|
||||
expect.any(Object),
|
||||
true,
|
||||
);
|
||||
|
||||
// Title should change back
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Errors with Status Message')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('includes toggle state in query key for cache busting', () => {
|
||||
it('includes toggle state in query key for cache busting', async () => {
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
const toggle = screen.getByRole('switch');
|
||||
// Wait for initial load
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('switch')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Initial query should include showStatusCodeErrors=true
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
true,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
);
|
||||
const toggle = screen.getByRole('switch');
|
||||
|
||||
// Click toggle
|
||||
fireEvent.click(toggle);
|
||||
|
||||
// Query should be called with showStatusCodeErrors=false in key
|
||||
expect(useQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
queryKey: expect.arrayContaining([
|
||||
REACT_QUERY_KEY.GET_TOP_ERRORS_BY_DOMAIN,
|
||||
expect.any(Object),
|
||||
expect.any(String),
|
||||
false,
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
// Wait for title to change, indicating query was refetched with new key
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('All Errors')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// The fact that data refetches when toggle changes proves the query key includes the toggle state
|
||||
expect(toggle).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('sends query_range v5 API call with required filters including has_error', async () => {
|
||||
let capturedRequest: any;
|
||||
|
||||
// Override the v5 API mock to capture the request
|
||||
server.use(
|
||||
rest.post(V5_QUERY_RANGE_API_PATH, async (req, res, ctx) => {
|
||||
capturedRequest = await req.json();
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
data: {
|
||||
data: {
|
||||
results: [
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
{
|
||||
name: 'response_status_code',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
{
|
||||
name: 'status_message',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
{ name: 'count()', fieldDataType: 'int64', fieldContext: '' },
|
||||
],
|
||||
data: [['/api/test', '500', 'Internal Server Error', 10]],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
// eslint-disable-next-line react/jsx-props-no-spreading
|
||||
render(<TopErrors {...mockProps} />);
|
||||
|
||||
// Wait for the API call to be made
|
||||
await waitFor(() => {
|
||||
expect(capturedRequest).toBeDefined();
|
||||
});
|
||||
|
||||
// Extract the filter expression from the captured request
|
||||
const filterExpression =
|
||||
capturedRequest.compositeQuery.queries[0].spec.filter.expression;
|
||||
|
||||
// Verify all required filters are present
|
||||
expect(filterExpression).toContain(`kind_string = 'Client'`);
|
||||
expect(filterExpression).toContain(`(http.url EXISTS OR url.full EXISTS)`);
|
||||
expect(filterExpression).toContain(
|
||||
`(net.peer.name = 'test-domain' OR server.address = 'test-domain')`,
|
||||
);
|
||||
expect(filterExpression).toContain(`has_error = true`);
|
||||
expect(filterExpression).toContain(`status_message EXISTS`); // toggle is on by default
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Progress, Tag, Tooltip } from 'antd';
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import { convertFiltersToExpressionWithExistingQuery } from 'components/QueryBuilderV2/utils';
|
||||
import {
|
||||
FiltersType,
|
||||
IQuickFiltersConfig,
|
||||
@@ -27,6 +28,11 @@ import {
|
||||
OrderByPayload,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
ColumnDescriptor,
|
||||
QueryRangePayloadV5,
|
||||
ScalarData,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -40,6 +46,9 @@ import {
|
||||
EndPointsResponseRow,
|
||||
} from './types';
|
||||
|
||||
export const isEmptyFilterValue = (value: unknown): boolean =>
|
||||
value === '' || value === null || value === undefined || value === 'n/a';
|
||||
|
||||
export const ApiMonitoringQuickFiltersConfig: IQuickFiltersConfig[] = [
|
||||
{
|
||||
type: FiltersType.CHECKBOX,
|
||||
@@ -816,153 +825,100 @@ export const getEndPointsQueryPayload = (
|
||||
];
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function buildFilterExpression(
|
||||
domainName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
showStatusCodeErrors: boolean,
|
||||
): string {
|
||||
const baseFilterParts = [
|
||||
`kind_string = 'Client'`,
|
||||
`(http.url EXISTS OR url.full EXISTS)`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`has_error = true`,
|
||||
];
|
||||
if (showStatusCodeErrors) {
|
||||
baseFilterParts.push('status_message EXISTS');
|
||||
}
|
||||
const filterExpression = baseFilterParts.join(' AND ');
|
||||
if (!filters) {
|
||||
return filterExpression;
|
||||
}
|
||||
const { filter } = convertFiltersToExpressionWithExistingQuery(
|
||||
filters,
|
||||
filterExpression,
|
||||
);
|
||||
return filter.expression;
|
||||
}
|
||||
|
||||
export const getTopErrorsQueryPayload = (
|
||||
domainName: string,
|
||||
start: number,
|
||||
end: number,
|
||||
filters: IBuilderQuery['filters'],
|
||||
showStatusCodeErrors = true,
|
||||
): GetQueryResultsProps[] => [
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
dataSource: DataSource.TRACES,
|
||||
queryName: 'A',
|
||||
aggregateOperator: 'count',
|
||||
aggregateAttribute: {
|
||||
id: '------false',
|
||||
dataType: DataTypes.String,
|
||||
key: '',
|
||||
type: '',
|
||||
},
|
||||
timeAggregation: 'rate',
|
||||
spaceAggregation: 'sum',
|
||||
functions: [],
|
||||
filters: {
|
||||
op: 'AND',
|
||||
items: [
|
||||
{
|
||||
id: '04da97bd',
|
||||
key: {
|
||||
key: 'kind_string',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: 'Client',
|
||||
},
|
||||
{
|
||||
id: 'b1af6bdb',
|
||||
key: {
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
op: 'exists',
|
||||
value: '',
|
||||
},
|
||||
...(showStatusCodeErrors
|
||||
? [
|
||||
{
|
||||
id: '75d65388',
|
||||
key: {
|
||||
key: 'status_message',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
},
|
||||
op: 'exists',
|
||||
value: '',
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
id: '4872bf91',
|
||||
key: {
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
{
|
||||
id: 'ab4c885d',
|
||||
key: {
|
||||
key: 'has_error',
|
||||
dataType: DataTypes.bool,
|
||||
type: '',
|
||||
},
|
||||
op: '=',
|
||||
value: true,
|
||||
},
|
||||
...(filters?.items || []),
|
||||
],
|
||||
},
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
stepInterval: 60,
|
||||
having: [],
|
||||
limit: 10,
|
||||
orderBy: [
|
||||
{
|
||||
columnName: 'timestamp',
|
||||
order: 'desc',
|
||||
},
|
||||
],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
key: 'response_status_code',
|
||||
type: '',
|
||||
id: 'response_status_code--string----true',
|
||||
},
|
||||
{
|
||||
key: 'status_message',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
},
|
||||
],
|
||||
legend: '',
|
||||
reduceTo: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
|
||||
promql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
): QueryRangePayloadV5 => {
|
||||
const filterExpression = buildFilterExpression(
|
||||
domainName,
|
||||
filters,
|
||||
showStatusCodeErrors,
|
||||
);
|
||||
|
||||
return {
|
||||
schemaVersion: 'v1',
|
||||
start,
|
||||
end,
|
||||
step: 240,
|
||||
},
|
||||
];
|
||||
requestType: 'scalar',
|
||||
compositeQuery: {
|
||||
queries: [
|
||||
{
|
||||
type: 'builder_query',
|
||||
spec: {
|
||||
name: 'A',
|
||||
signal: 'traces',
|
||||
stepInterval: 60,
|
||||
disabled: false,
|
||||
aggregations: [{ expression: 'count()' }],
|
||||
filter: { expression: filterExpression },
|
||||
groupBy: [
|
||||
{
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
{
|
||||
name: 'url.full',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
{
|
||||
name: 'response_status_code',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
{
|
||||
name: 'status_message',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'span',
|
||||
},
|
||||
],
|
||||
limit: 10,
|
||||
order: [
|
||||
{
|
||||
key: {
|
||||
name: 'count()',
|
||||
},
|
||||
direction: 'desc',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
formatOptions: { formatTableResultForUI: true, fillGaps: false },
|
||||
variables: {},
|
||||
};
|
||||
};
|
||||
|
||||
export interface EndPointsTableRowData {
|
||||
key: string;
|
||||
@@ -1242,63 +1198,55 @@ export const formatEndPointsDataForTable = (
|
||||
return formattedData;
|
||||
};
|
||||
|
||||
export interface TopErrorsResponseRow {
|
||||
metric: {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: string;
|
||||
[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]: string;
|
||||
status_message: string;
|
||||
};
|
||||
values: [number, string][];
|
||||
queryName: string;
|
||||
legend: string;
|
||||
}
|
||||
export type TopErrorsResponseRow = ScalarData;
|
||||
|
||||
export interface TopErrorsTableRowData {
|
||||
key: string;
|
||||
endpointName: string;
|
||||
statusCode: string;
|
||||
statusMessage: string;
|
||||
count: number | string;
|
||||
count: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns '-' if value is empty, otherwise returns value as string
|
||||
*/
|
||||
export function getDisplayValue(value: unknown): string {
|
||||
return isEmptyFilterValue(value) ? '-' : String(value);
|
||||
}
|
||||
|
||||
export const formatTopErrorsDataForTable = (
|
||||
data: TopErrorsResponseRow[] | undefined,
|
||||
scalarResult: TopErrorsResponseRow | undefined,
|
||||
): TopErrorsTableRowData[] => {
|
||||
if (!data) return [];
|
||||
if (!scalarResult?.data) return [];
|
||||
|
||||
return data.map((row) => ({
|
||||
key: v4(),
|
||||
endpointName:
|
||||
row.metric[SPAN_ATTRIBUTES.URL_PATH] === 'n/a' ||
|
||||
row.metric[SPAN_ATTRIBUTES.URL_PATH] === undefined
|
||||
? '-'
|
||||
: row.metric[SPAN_ATTRIBUTES.URL_PATH],
|
||||
statusCode:
|
||||
row.metric[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE] === 'n/a' ||
|
||||
row.metric[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE] === undefined
|
||||
? '-'
|
||||
: row.metric[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE],
|
||||
statusMessage:
|
||||
row.metric.status_message === 'n/a' ||
|
||||
row.metric.status_message === undefined
|
||||
? '-'
|
||||
: row.metric.status_message,
|
||||
count:
|
||||
row.values &&
|
||||
row.values[0] &&
|
||||
row.values[0][1] !== undefined &&
|
||||
row.values[0][1] !== 'n/a'
|
||||
? row.values[0][1]
|
||||
: '-',
|
||||
}));
|
||||
const columns = scalarResult.columns || [];
|
||||
const rows = scalarResult.data || [];
|
||||
|
||||
return rows.map((rowData: unknown[]) => {
|
||||
const rowObj: Record<string, unknown> = {};
|
||||
columns.forEach((col: ColumnDescriptor, index: number) => {
|
||||
rowObj[col.name] = rowData[index];
|
||||
});
|
||||
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: getDisplayValue(
|
||||
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
|
||||
),
|
||||
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
|
||||
statusMessage: getDisplayValue(rowObj.status_message),
|
||||
count: getDisplayValue(rowObj.__result_0),
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const getTopErrorsCoRelationQueryFilters = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
statusCode: string,
|
||||
): IBuilderQuery['filters'] => ({
|
||||
items: [
|
||||
): IBuilderQuery['filters'] => {
|
||||
const items: TagFilterItem[] = [
|
||||
{
|
||||
id: 'ea16470b',
|
||||
key: {
|
||||
@@ -1330,7 +1278,10 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
{
|
||||
];
|
||||
|
||||
if (statusCode !== '-') {
|
||||
items.push({
|
||||
id: 'f6891e27',
|
||||
key: {
|
||||
key: 'response_status_code',
|
||||
@@ -1340,10 +1291,14 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
},
|
||||
op: '=',
|
||||
value: statusCode,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
items,
|
||||
op: 'AND',
|
||||
};
|
||||
};
|
||||
|
||||
export const getTopErrorsColumnsConfig = (): ColumnType<TopErrorsTableRowData>[] => [
|
||||
{
|
||||
|
||||
@@ -137,8 +137,7 @@ function GeneralSettings({
|
||||
if (logsCurrentTTLValues) {
|
||||
setLogsTotalRetentionPeriod(logsCurrentTTLValues.default_ttl_days * 24);
|
||||
setLogsS3RetentionPeriod(
|
||||
logsCurrentTTLValues.cold_storage_ttl_days &&
|
||||
logsCurrentTTLValues.cold_storage_ttl_days > 0
|
||||
logsCurrentTTLValues.cold_storage_ttl_days
|
||||
? logsCurrentTTLValues.cold_storage_ttl_days * 24
|
||||
: null,
|
||||
);
|
||||
|
||||
@@ -94,6 +94,9 @@ const mockDisksWithoutS3: IDiskType[] = [
|
||||
];
|
||||
|
||||
describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
const BUTTON_SELECTOR = 'button[type="button"]';
|
||||
const PRIMARY_BUTTON_CLASS = 'ant-btn-primary';
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(setRetentionApiV2 as jest.Mock).mockResolvedValue({
|
||||
@@ -155,10 +158,10 @@ describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
await user.type(s3Input, '5');
|
||||
|
||||
// Find the save button in the Logs card
|
||||
const buttons = logsCard?.querySelectorAll('button[type="button"]');
|
||||
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
|
||||
// The primary button should be the save button
|
||||
const saveButton = Array.from(buttons || []).find((btn) =>
|
||||
btn.className.includes('ant-btn-primary'),
|
||||
btn.className.includes(PRIMARY_BUTTON_CLASS),
|
||||
) as HTMLButtonElement;
|
||||
|
||||
expect(saveButton).toBeInTheDocument();
|
||||
@@ -262,9 +265,9 @@ describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
await user.type(totalInput, '60');
|
||||
|
||||
// Find the save button
|
||||
const buttons = logsCard?.querySelectorAll('button[type="button"]');
|
||||
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
|
||||
const saveButton = Array.from(buttons || []).find((btn) =>
|
||||
btn.className.includes('ant-btn-primary'),
|
||||
btn.className.includes(PRIMARY_BUTTON_CLASS),
|
||||
) as HTMLButtonElement;
|
||||
|
||||
expect(saveButton).toBeInTheDocument();
|
||||
@@ -329,4 +332,59 @@ describe('GeneralSettings - S3 Logs Retention', () => {
|
||||
expect(dropdowns?.[1]).toHaveTextContent('Days');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test 4: Save Button State with S3 Disabled', () => {
|
||||
it('should disable save button when cold_storage_ttl_days is -1 and no changes made', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<GeneralSettings
|
||||
metricsTtlValuesPayload={mockMetricsRetention}
|
||||
tracesTtlValuesPayload={mockTracesRetention}
|
||||
logsTtlValuesPayload={mockLogsRetentionWithoutS3}
|
||||
getAvailableDiskPayload={mockDisksWithS3}
|
||||
metricsTtlValuesRefetch={jest.fn()}
|
||||
tracesTtlValuesRefetch={jest.fn()}
|
||||
logsTtlValuesRefetch={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find the Logs card
|
||||
const logsCard = screen.getByText('Logs').closest('.ant-card');
|
||||
expect(logsCard).toBeInTheDocument();
|
||||
|
||||
// Find the save button
|
||||
const buttons = logsCard?.querySelectorAll(BUTTON_SELECTOR);
|
||||
const saveButton = Array.from(buttons || []).find((btn) =>
|
||||
btn.className.includes(PRIMARY_BUTTON_CLASS),
|
||||
) as HTMLButtonElement;
|
||||
|
||||
expect(saveButton).toBeInTheDocument();
|
||||
|
||||
// Verify save button is disabled on initial load (no changes, S3 disabled with -1)
|
||||
expect(saveButton).toBeDisabled();
|
||||
|
||||
// Find the total retention input
|
||||
const inputs = logsCard?.querySelectorAll('input[type="text"]');
|
||||
const totalInput = inputs?.[0] as HTMLInputElement;
|
||||
|
||||
// Change total retention value to trigger button enable
|
||||
await user.clear(totalInput);
|
||||
await user.type(totalInput, '60');
|
||||
|
||||
// Button should now be enabled after change
|
||||
await waitFor(() => {
|
||||
expect(saveButton).not.toBeDisabled();
|
||||
});
|
||||
|
||||
// Revert to original value (30 days displays as 1 Month)
|
||||
await user.clear(totalInput);
|
||||
await user.type(totalInput, '1');
|
||||
|
||||
// Button should be disabled again (back to original state)
|
||||
await waitFor(() => {
|
||||
expect(saveButton).toBeDisabled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -46,8 +46,7 @@ export const convertHoursValueToRelevantUnit = (
|
||||
availableUnits?: ITimeUnit[],
|
||||
): ITimeUnitConversion => {
|
||||
const unitsToConsider = availableUnits?.length ? availableUnits : TimeUnits;
|
||||
|
||||
if (value) {
|
||||
if (value >= 0) {
|
||||
for (let idx = unitsToConsider.length - 1; idx >= 0; idx -= 1) {
|
||||
const timeUnit = unitsToConsider[idx];
|
||||
const convertedValue = timeUnit.multiplier * value;
|
||||
@@ -62,7 +61,7 @@ export const convertHoursValueToRelevantUnit = (
|
||||
}
|
||||
|
||||
// Fallback to the first available unit
|
||||
return { value, timeUnitValue: unitsToConsider[0].value };
|
||||
return { value: -1, timeUnitValue: unitsToConsider[0].value };
|
||||
};
|
||||
|
||||
export const convertHoursValueToRelevantUnitString = (
|
||||
|
||||
@@ -88,17 +88,13 @@ function GridTableComponent({
|
||||
const newValue = { ...val };
|
||||
Object.keys(val).forEach((k) => {
|
||||
const unit = getColumnUnit(k, columnUnits);
|
||||
// Apply formatting if:
|
||||
// 1. Column has a unit defined, OR
|
||||
// 2. decimalPrecision is specified (format all values)
|
||||
const shouldFormat = unit || decimalPrecision !== undefined;
|
||||
|
||||
if (shouldFormat) {
|
||||
if (unit) {
|
||||
// the check below takes care of not adding units for rows that have n/a or null values
|
||||
if (val[k] !== 'n/a' && val[k] !== null) {
|
||||
newValue[k] = getYAxisFormattedValue(
|
||||
String(val[k]),
|
||||
unit || 'none',
|
||||
unit,
|
||||
decimalPrecision,
|
||||
);
|
||||
} else if (val[k] === null) {
|
||||
|
||||
@@ -423,6 +423,7 @@
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 14px;
|
||||
align-items: flex-start;
|
||||
|
||||
.section-icon {
|
||||
display: flex;
|
||||
@@ -461,7 +462,6 @@
|
||||
flex-direction: column;
|
||||
gap: 14px;
|
||||
|
||||
width: 150px;
|
||||
justify-content: flex-end;
|
||||
|
||||
.ant-btn {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
import { getNonIntegrationDashboardById } from 'mocks-server/__mockdata__/dashboards';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
@@ -7,6 +8,16 @@ import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import DashboardDescription from '..';
|
||||
|
||||
interface MockSafeNavigateReturn {
|
||||
safeNavigate: jest.MockedFunction<(url: string) => void>;
|
||||
}
|
||||
|
||||
const DASHBOARD_TEST_ID = 'dashboard-title';
|
||||
const DASHBOARD_TITLE_TEXT = 'thor';
|
||||
const DASHBOARD_PATH = '/dashboard/4';
|
||||
|
||||
const mockSafeNavigate = jest.fn();
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn(),
|
||||
@@ -26,20 +37,24 @@ jest.mock(
|
||||
);
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: jest.fn(),
|
||||
useSafeNavigate: (): MockSafeNavigateReturn => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Dashboard landing page actions header tests', () => {
|
||||
beforeEach(() => {
|
||||
mockSafeNavigate.mockClear();
|
||||
});
|
||||
|
||||
it('unlock dashboard should be disabled for integrations created dashboards', async () => {
|
||||
const mockLocation = {
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}/dashboard/4`,
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${DASHBOARD_PATH}`,
|
||||
search: '',
|
||||
};
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={['/dashboard/4']}>
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardProvider>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
@@ -54,7 +69,9 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(getByTestId('dashboard-title')).toHaveTextContent('thor'),
|
||||
expect(getByTestId(DASHBOARD_TEST_ID)).toHaveTextContent(
|
||||
DASHBOARD_TITLE_TEXT,
|
||||
),
|
||||
);
|
||||
|
||||
const dashboardSettingsTrigger = getByTestId('options');
|
||||
@@ -65,9 +82,10 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
|
||||
await waitFor(() => expect(lockUnlockButton).toBeDisabled());
|
||||
});
|
||||
|
||||
it('unlock dashboard should not be disabled for non integration created dashboards', async () => {
|
||||
const mockLocation = {
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}/dashboard/4`,
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}${DASHBOARD_PATH}`,
|
||||
search: '',
|
||||
};
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
@@ -77,7 +95,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
),
|
||||
);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={['/dashboard/4']}>
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardProvider>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
@@ -92,7 +110,9 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(getByTestId('dashboard-title')).toHaveTextContent('thor'),
|
||||
expect(getByTestId(DASHBOARD_TEST_ID)).toHaveTextContent(
|
||||
DASHBOARD_TITLE_TEXT,
|
||||
),
|
||||
);
|
||||
|
||||
const dashboardSettingsTrigger = getByTestId('options');
|
||||
@@ -103,4 +123,58 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
|
||||
await waitFor(() => expect(lockUnlockButton).not.toBeDisabled());
|
||||
});
|
||||
|
||||
it('should navigate to dashboard list with correct params and exclude variables', async () => {
|
||||
const dashboardUrlWithVariables = `${DASHBOARD_PATH}?variables=%7B%22var1%22%3A%22value1%22%7D&otherParam=test`;
|
||||
const mockLocation = {
|
||||
pathname: DASHBOARD_PATH,
|
||||
search: '?variables=%7B%22var1%22%3A%22value1%22%7D&otherParam=test',
|
||||
};
|
||||
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
|
||||
const { getByText } = render(
|
||||
<MemoryRouter initialEntries={[dashboardUrlWithVariables]}>
|
||||
<DashboardProvider>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
enter: (): Promise<void> => Promise.resolve(),
|
||||
exit: (): Promise<void> => Promise.resolve(),
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId(DASHBOARD_TEST_ID)).toHaveTextContent(
|
||||
DASHBOARD_TITLE_TEXT,
|
||||
),
|
||||
);
|
||||
|
||||
// Click the dashboard breadcrumb to navigate back to list
|
||||
const dashboardButton = getByText('Dashboard /');
|
||||
fireEvent.click(dashboardButton);
|
||||
|
||||
// Verify navigation was called with correct URL
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
'/dashboard?columnKey=updatedAt&order=descend&page=1&search=',
|
||||
);
|
||||
|
||||
// Ensure the URL contains only essential dashboard list params
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0] as string;
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
|
||||
// Should have essential dashboard list params
|
||||
expect(urlParams.get('columnKey')).toBe('updatedAt');
|
||||
expect(urlParams.get('order')).toBe('descend');
|
||||
expect(urlParams.get('page')).toBe('1');
|
||||
expect(urlParams.get('search')).toBe('');
|
||||
|
||||
// Should NOT have variables or other dashboard-specific params
|
||||
expect(urlParams.has('variables')).toBeFalsy();
|
||||
expect(urlParams.has('relativeTime')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
} from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
|
||||
@@ -22,7 +21,6 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import {
|
||||
Check,
|
||||
@@ -116,8 +114,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
|
||||
const updateDashboardMutation = useUpdateDashboard();
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
|
||||
const { user } = useAppContext();
|
||||
const [editDashboard] = useComponentPermission(['edit_dashboard'], user.role);
|
||||
const [isDashboardSettingsOpen, setIsDashbordSettingsOpen] = useState<boolean>(
|
||||
@@ -291,13 +287,13 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
}
|
||||
|
||||
function goToListPage(): void {
|
||||
urlQuery.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlQuery.set('order', listSortOrder.order as string);
|
||||
urlQuery.set('page', listSortOrder.pagination as string);
|
||||
urlQuery.set('search', listSortOrder.search as string);
|
||||
urlQuery.delete(QueryParams.relativeTime);
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlQuery.toString()}`;
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}
|
||||
|
||||
|
||||
@@ -121,6 +121,7 @@ export const tablePanelWidgetQuery = {
|
||||
stackedBarChart: false,
|
||||
bucketWidth: 0,
|
||||
mergeAllActiveQueries: false,
|
||||
decimalPrecision: 2,
|
||||
};
|
||||
|
||||
export const tablePanelQueryResponse = {
|
||||
|
||||
@@ -68,23 +68,39 @@
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
.span-percentile-value {
|
||||
color: var(--text-sakura-400, #f56c87);
|
||||
font-variant-numeric: lining-nums tabular-nums stacked-fractions
|
||||
slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
min-width: 48px;
|
||||
padding: 4px 8px;
|
||||
.span-percentile-value-container {
|
||||
.span-percentile-value {
|
||||
color: var(--text-sakura-400, #f56c87);
|
||||
font-variant-numeric: lining-nums tabular-nums stacked-fractions
|
||||
slashed-zero;
|
||||
font-feature-settings: 'dlig' on, 'salt' on;
|
||||
|
||||
border-left: 1px solid var(--bg-slate-400);
|
||||
border-radius: 0 50px 50px 0;
|
||||
|
||||
cursor: pointer;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
min-width: 48px;
|
||||
padding-left: 8px;
|
||||
padding-right: 8px;
|
||||
|
||||
border-left: 1px solid var(--bg-slate-400);
|
||||
|
||||
cursor: pointer;
|
||||
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
word-break: normal;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
&.span-percentile-value-container-open {
|
||||
.span-percentile-value {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -605,6 +621,15 @@
|
||||
.description {
|
||||
.item {
|
||||
.span-name-wrapper {
|
||||
.span-percentile-value-container {
|
||||
&.span-percentile-value-container-open {
|
||||
.span-percentile-value {
|
||||
border: 1px solid var(--bg-vanilla-200);
|
||||
background: var(--bg-vanilla-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.span-percentile-value {
|
||||
color: var(--text-sakura-400, #f56c87);
|
||||
border-left: 1px solid var(--bg-slate-300);
|
||||
|
||||
@@ -31,6 +31,7 @@ import {
|
||||
Bookmark,
|
||||
Check,
|
||||
ChevronDown,
|
||||
ChevronUp,
|
||||
Link2,
|
||||
Loader2,
|
||||
PanelRight,
|
||||
@@ -387,17 +388,24 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
setShouldUpdateUserPreference(false);
|
||||
}
|
||||
},
|
||||
keepPreviousData: false,
|
||||
cacheTime: 0, // no cache
|
||||
});
|
||||
|
||||
// Prod Req - Wait for 2 seconds before fetching span percentile data on initial load
|
||||
useEffect(() => {
|
||||
setSpanPercentileData(null);
|
||||
setIsSpanPercentilesOpen(false);
|
||||
setInitialWaitCompleted(false);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
setInitialWaitCompleted(true);
|
||||
}, 2000); // 2-second delay
|
||||
|
||||
return (): void => clearTimeout(timer); // Cleanup on re-run or unmount
|
||||
return (): void => {
|
||||
// clean the old state around span percentile data
|
||||
clearTimeout(timer); // Cleanup on re-run or unmount
|
||||
};
|
||||
}, [selectedSpan?.spanId]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -538,6 +546,11 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
initialWaitCompleted,
|
||||
]);
|
||||
|
||||
const loadingSpanPercentilesData =
|
||||
isLoadingSpanPercentilesData || isFetchingSpanPercentilesData;
|
||||
|
||||
const spanPercentileValue = Math.floor(spanPercentileData?.percentile || 0);
|
||||
|
||||
return (
|
||||
<>
|
||||
<section className="header">
|
||||
@@ -566,25 +579,43 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
</Typography.Text>
|
||||
</Tooltip>
|
||||
|
||||
{isLoadingSpanPercentilesData && (
|
||||
{loadingSpanPercentilesData && (
|
||||
<div className="loading-spinner-container">
|
||||
<Loader2 size={16} className="animate-spin" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isLoadingSpanPercentilesData && spanPercentileData && (
|
||||
{!loadingSpanPercentilesData && spanPercentileData && (
|
||||
<Tooltip
|
||||
title={isSpanPercentilesOpen ? '' : spanPercentileTooltipText}
|
||||
placement="bottomRight"
|
||||
overlayClassName="span-percentile-tooltip"
|
||||
arrow={false}
|
||||
>
|
||||
<Typography.Text
|
||||
className="span-percentile-value"
|
||||
onClick={(): void => setIsSpanPercentilesOpen((prev) => !prev)}
|
||||
<div
|
||||
className={`span-percentile-value-container ${
|
||||
isSpanPercentilesOpen
|
||||
? 'span-percentile-value-container-open'
|
||||
: 'span-percentile-value-container-closed'
|
||||
}`}
|
||||
>
|
||||
p{Math.floor(spanPercentileData?.percentile || 0)}
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
className="span-percentile-value"
|
||||
onClick={(): void => setIsSpanPercentilesOpen((prev) => !prev)}
|
||||
disabled={loadingSpanPercentilesData}
|
||||
>
|
||||
<span className="span-percentile-value-text">
|
||||
p{spanPercentileValue}
|
||||
</span>
|
||||
|
||||
{!isSpanPercentilesOpen && (
|
||||
<ChevronDown size={16} className="span-percentile-value-icon" />
|
||||
)}
|
||||
{isSpanPercentilesOpen && (
|
||||
<ChevronUp size={16} className="span-percentile-value-icon" />
|
||||
)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -18,7 +18,6 @@ interface UseVariablesFromUrlReturn {
|
||||
name: string,
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
) => void;
|
||||
clearUrlVariables: () => void;
|
||||
}
|
||||
|
||||
const useVariablesFromUrl = (): UseVariablesFromUrlReturn => {
|
||||
@@ -68,15 +67,6 @@ const useVariablesFromUrl = (): UseVariablesFromUrlReturn => {
|
||||
[history, urlQuery],
|
||||
);
|
||||
|
||||
const clearUrlVariables = useCallback((): void => {
|
||||
const params = new URLSearchParams(urlQuery.toString());
|
||||
params.delete(QueryParams.variables);
|
||||
|
||||
history.replace({
|
||||
search: params.toString(),
|
||||
});
|
||||
}, [history, urlQuery]);
|
||||
|
||||
const updateUrlVariable = useCallback(
|
||||
(name: string, selectedValue: IDashboardVariable['selectedValue']): void => {
|
||||
const currentVariables = getUrlVariables();
|
||||
@@ -95,7 +85,6 @@ const useVariablesFromUrl = (): UseVariablesFromUrlReturn => {
|
||||
getUrlVariables,
|
||||
setUrlVariables,
|
||||
updateUrlVariable,
|
||||
clearUrlVariables,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -662,21 +662,23 @@ const generateTableColumns = (
|
||||
*
|
||||
* @param columnKey - The column identifier (could be queryName.expression or queryName)
|
||||
* @param columnUnits - The column units mapping
|
||||
* @returns The unit string or undefined if not found
|
||||
* @returns The unit string (none if the unit is set to empty string) or undefined if not found
|
||||
*/
|
||||
export const getColumnUnit = (
|
||||
columnKey: string,
|
||||
columnUnits: Record<string, string>,
|
||||
): string | undefined => {
|
||||
// First try the exact match (new syntax: queryName.expression)
|
||||
if (columnUnits[columnKey]) {
|
||||
return columnUnits[columnKey];
|
||||
if (columnUnits[columnKey] !== undefined) {
|
||||
return columnUnits[columnKey] || 'none';
|
||||
}
|
||||
|
||||
// Fallback to old syntax: extract queryName from queryName.expression
|
||||
if (columnKey.includes('.')) {
|
||||
const queryName = columnKey.split('.')[0];
|
||||
return columnUnits[queryName];
|
||||
if (columnUnits[queryName] !== undefined) {
|
||||
return columnUnits[queryName] || 'none';
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
|
||||
@@ -285,10 +285,11 @@ export const getUPlotChartOptions = ({
|
||||
cursor: {
|
||||
lock: false,
|
||||
focus: {
|
||||
prox: 1e6,
|
||||
prox: 25,
|
||||
bias: 1,
|
||||
},
|
||||
points: {
|
||||
one: true,
|
||||
size: (u, seriesIdx): number => u.series[seriesIdx].points.size * 3,
|
||||
width: (u, seriesIdx, size): number => size / 4,
|
||||
stroke: (u, seriesIdx): string =>
|
||||
@@ -394,14 +395,25 @@ export const getUPlotChartOptions = ({
|
||||
hooks: {
|
||||
draw: [
|
||||
(u): void => {
|
||||
if (isAnomalyRule) {
|
||||
if (isAnomalyRule || !thresholds?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
thresholds?.forEach((threshold) => {
|
||||
const { ctx } = u;
|
||||
const { left: plotLeft, width: plotWidth } = u.bbox;
|
||||
const plotRight = plotLeft + plotWidth;
|
||||
const canvasHeight = ctx.canvas.height;
|
||||
const threshold90Percent = canvasHeight * 0.9;
|
||||
|
||||
// Single save/restore for all thresholds
|
||||
ctx.save();
|
||||
ctx.lineWidth = 2;
|
||||
ctx.setLineDash([10, 5]);
|
||||
|
||||
for (let i = 0; i < thresholds.length; i++) {
|
||||
const threshold = thresholds[i];
|
||||
if (threshold.thresholdValue !== undefined) {
|
||||
const { ctx } = u;
|
||||
ctx.save();
|
||||
const color = threshold.thresholdColor || 'red';
|
||||
const yPos = u.valToPos(
|
||||
convertValue(
|
||||
threshold.thresholdValue,
|
||||
@@ -411,35 +423,28 @@ export const getUPlotChartOptions = ({
|
||||
'y',
|
||||
true,
|
||||
);
|
||||
ctx.strokeStyle = threshold.thresholdColor || 'red';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.setLineDash([10, 5]);
|
||||
|
||||
// Draw threshold line
|
||||
ctx.strokeStyle = color;
|
||||
ctx.beginPath();
|
||||
const plotLeft = u.bbox.left; // left edge of the plot area
|
||||
const plotRight = plotLeft + u.bbox.width; // right edge of the plot area
|
||||
ctx.moveTo(plotLeft, yPos);
|
||||
ctx.lineTo(plotRight, yPos);
|
||||
ctx.stroke();
|
||||
// Text configuration
|
||||
|
||||
// Draw threshold label if present
|
||||
if (threshold.thresholdLabel) {
|
||||
const text = threshold.thresholdLabel;
|
||||
const textX = plotRight - ctx.measureText(text).width - 20;
|
||||
const canvasHeight = ctx.canvas.height;
|
||||
const textWidth = ctx.measureText(threshold.thresholdLabel).width;
|
||||
const textX = plotRight - textWidth - 20;
|
||||
const yposHeight = canvasHeight - yPos;
|
||||
const isHeightGreaterThan90Percent = canvasHeight * 0.9 < yposHeight;
|
||||
// Adjust textY based on the condition
|
||||
let textY;
|
||||
if (isHeightGreaterThan90Percent) {
|
||||
textY = yPos + 15; // Below the threshold line
|
||||
} else {
|
||||
textY = yPos - 15; // Above the threshold line
|
||||
}
|
||||
ctx.fillStyle = threshold.thresholdColor || 'red';
|
||||
ctx.fillText(text, textX, textY);
|
||||
const textY = yposHeight > threshold90Percent ? yPos + 15 : yPos - 15;
|
||||
|
||||
ctx.fillStyle = color;
|
||||
ctx.fillText(threshold.thresholdLabel, textX, textY);
|
||||
}
|
||||
ctx.restore();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ctx.restore();
|
||||
},
|
||||
],
|
||||
setSelect: [
|
||||
@@ -555,19 +560,22 @@ export const getUPlotChartOptions = ({
|
||||
// Get the current text content
|
||||
const legendText = seriesLabels[index];
|
||||
|
||||
// Clear the th content and rebuild it
|
||||
thElement.innerHTML = '';
|
||||
// Use DocumentFragment to batch DOM operations
|
||||
const fragment = document.createDocumentFragment();
|
||||
|
||||
// Add back the marker
|
||||
if (markerClone) {
|
||||
thElement.appendChild(markerClone);
|
||||
fragment.appendChild(markerClone);
|
||||
}
|
||||
|
||||
// Create text wrapper
|
||||
const textSpan = document.createElement('span');
|
||||
textSpan.className = 'legend-text';
|
||||
textSpan.textContent = legendText;
|
||||
thElement.appendChild(textSpan);
|
||||
fragment.appendChild(textSpan);
|
||||
|
||||
// Replace the children in a single operation
|
||||
thElement.replaceChildren(fragment);
|
||||
|
||||
// Setup tooltip functionality - check truncation on hover
|
||||
let tooltipElement: HTMLElement | null = null;
|
||||
|
||||
@@ -38,6 +38,31 @@ function getTooltipBaseValue(
|
||||
return data[index][idx];
|
||||
}
|
||||
|
||||
function sortTooltipContentBasedOnValue(
|
||||
tooltipDataObj: Record<string, UplotTooltipDataProps>,
|
||||
): Record<string, UplotTooltipDataProps> {
|
||||
const entries = Object.entries(tooltipDataObj);
|
||||
|
||||
// Separate focused and non-focused entries in a single pass
|
||||
const focusedEntries: [string, UplotTooltipDataProps][] = [];
|
||||
const nonFocusedEntries: [string, UplotTooltipDataProps][] = [];
|
||||
|
||||
for (let i = 0; i < entries.length; i++) {
|
||||
const entry = entries[i];
|
||||
if (entry[1].focus) {
|
||||
focusedEntries.push(entry);
|
||||
} else {
|
||||
nonFocusedEntries.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort non-focused entries by value (descending)
|
||||
nonFocusedEntries.sort((a, b) => b[1].value - a[1].value);
|
||||
|
||||
// Combine with focused entries on top
|
||||
return Object.fromEntries(focusedEntries.concat(nonFocusedEntries));
|
||||
}
|
||||
|
||||
const generateTooltipContent = (
|
||||
seriesList: any[],
|
||||
data: any[],
|
||||
@@ -57,23 +82,31 @@ const generateTooltipContent = (
|
||||
): HTMLElement => {
|
||||
const container = document.createElement('div');
|
||||
container.classList.add('tooltip-container');
|
||||
const overlay = document.getElementById('overlay');
|
||||
let tooltipCount = 0;
|
||||
|
||||
let tooltipTitle = '';
|
||||
const formattedData: Record<string, UplotTooltipDataProps> = {};
|
||||
const duplicatedLegendLabels: Record<string, true> = {};
|
||||
|
||||
function sortTooltipContentBasedOnValue(
|
||||
tooltipDataObj: Record<string, UplotTooltipDataProps>,
|
||||
): Record<string, UplotTooltipDataProps> {
|
||||
const entries = Object.entries(tooltipDataObj);
|
||||
entries.sort((a, b) => b[1].value - a[1].value);
|
||||
return Object.fromEntries(entries);
|
||||
// Pre-build a label-to-series map for O(1) lookup instead of O(n) search
|
||||
let seriesColorMap: Map<string, string> | null = null;
|
||||
if (isBillingUsageGraphs && series) {
|
||||
seriesColorMap = new Map();
|
||||
for (let i = 0; i < series.length; i++) {
|
||||
const item = series[i];
|
||||
if (item.label) {
|
||||
const fillColor = get(item, '_fill');
|
||||
if (fillColor) {
|
||||
seriesColorMap.set(item.label, fillColor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(series) && series.length > 0) {
|
||||
series.forEach((item, index) => {
|
||||
for (let index = 0; index < series.length; index++) {
|
||||
const item = series[index];
|
||||
|
||||
if (index === 0) {
|
||||
if (isBillingUsageGraphs) {
|
||||
tooltipTitle = dayjs(data[0][idx] * 1000)
|
||||
@@ -114,15 +147,12 @@ const generateTooltipContent = (
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
);
|
||||
|
||||
// in case of billing graph pick colors from the series options
|
||||
if (isBillingUsageGraphs) {
|
||||
let clr;
|
||||
series.forEach((item) => {
|
||||
if (item.label === label) {
|
||||
clr = get(item, '_fill');
|
||||
}
|
||||
});
|
||||
color = clr ?? color;
|
||||
// O(1) lookup instead of O(n) search for billing graph colors
|
||||
if (isBillingUsageGraphs && seriesColorMap) {
|
||||
const billingColor = seriesColorMap.get(label);
|
||||
if (billingColor) {
|
||||
color = billingColor;
|
||||
}
|
||||
}
|
||||
|
||||
let tooltipItemLabel = label;
|
||||
@@ -130,10 +160,7 @@ const generateTooltipContent = (
|
||||
if (Number.isFinite(value)) {
|
||||
const tooltipValue = getToolTipValue(value, yAxisUnit, decimalPrecision);
|
||||
const dataIngestedFormated = getToolTipValue(dataIngested);
|
||||
if (
|
||||
duplicatedLegendLabels[label] ||
|
||||
Object.prototype.hasOwnProperty.call(formattedData, label)
|
||||
) {
|
||||
if (duplicatedLegendLabels[label] || label in formattedData) {
|
||||
duplicatedLegendLabels[label] = true;
|
||||
const tempDataObj = formattedData[label];
|
||||
|
||||
@@ -170,15 +197,11 @@ const generateTooltipContent = (
|
||||
formattedData[tooltipItemLabel] = dataObj;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Show tooltip only if atleast only series has a value at the hovered timestamp
|
||||
// Early return if no valid data points - avoids unnecessary DOM manipulation
|
||||
if (tooltipCount <= 0) {
|
||||
if (overlay && overlay.style.display === 'block') {
|
||||
overlay.style.display = 'none';
|
||||
}
|
||||
|
||||
return container;
|
||||
}
|
||||
|
||||
@@ -187,48 +210,42 @@ const generateTooltipContent = (
|
||||
UplotTooltipDataProps
|
||||
> = sortTooltipContentBasedOnValue(formattedData);
|
||||
|
||||
const div = document.createElement('div');
|
||||
div.classList.add('tooltip-content-row');
|
||||
div.textContent = isHistogramGraphs ? '' : tooltipTitle;
|
||||
div.classList.add('tooltip-content-header');
|
||||
container.appendChild(div);
|
||||
const headerDiv = document.createElement('div');
|
||||
headerDiv.classList.add('tooltip-content-row', 'tooltip-content-header');
|
||||
headerDiv.textContent = isHistogramGraphs ? '' : tooltipTitle;
|
||||
container.appendChild(headerDiv);
|
||||
|
||||
const sortedKeys = Object.keys(sortedData);
|
||||
// Use DocumentFragment for better performance when adding multiple elements
|
||||
const fragment = document.createDocumentFragment();
|
||||
|
||||
if (Array.isArray(sortedKeys) && sortedKeys.length > 0) {
|
||||
sortedKeys.forEach((key) => {
|
||||
if (sortedData[key]) {
|
||||
const { textContent, color, focus } = sortedData[key];
|
||||
const div = document.createElement('div');
|
||||
div.classList.add('tooltip-content-row');
|
||||
div.classList.add('tooltip-content');
|
||||
const squareBox = document.createElement('div');
|
||||
squareBox.classList.add('pointSquare');
|
||||
const sortedValues = Object.values(sortedData);
|
||||
|
||||
squareBox.style.borderColor = color;
|
||||
for (let i = 0; i < sortedValues.length; i++) {
|
||||
const { textContent, color, focus } = sortedValues[i];
|
||||
|
||||
const text = document.createElement('div');
|
||||
text.classList.add('tooltip-data-point');
|
||||
const div = document.createElement('div');
|
||||
div.classList.add('tooltip-content-row', 'tooltip-content');
|
||||
|
||||
text.textContent = textContent;
|
||||
text.style.color = color;
|
||||
const squareBox = document.createElement('div');
|
||||
squareBox.classList.add('pointSquare');
|
||||
squareBox.style.borderColor = color;
|
||||
|
||||
if (focus) {
|
||||
text.classList.add('focus');
|
||||
} else {
|
||||
text.classList.remove('focus');
|
||||
}
|
||||
const text = document.createElement('div');
|
||||
text.classList.add('tooltip-data-point');
|
||||
text.textContent = textContent;
|
||||
text.style.color = color;
|
||||
|
||||
div.appendChild(squareBox);
|
||||
div.appendChild(text);
|
||||
if (focus) {
|
||||
text.classList.add('focus');
|
||||
}
|
||||
|
||||
container.appendChild(div);
|
||||
}
|
||||
});
|
||||
div.appendChild(squareBox);
|
||||
div.appendChild(text);
|
||||
fragment.appendChild(div);
|
||||
}
|
||||
|
||||
if (overlay && overlay.style.display === 'none') {
|
||||
overlay.style.display = 'block';
|
||||
if (fragment.hasChildNodes()) {
|
||||
container.appendChild(fragment);
|
||||
}
|
||||
|
||||
return container;
|
||||
@@ -266,80 +283,148 @@ const tooltipPlugin = ({
|
||||
ToolTipPluginProps): any => {
|
||||
let over: HTMLElement;
|
||||
let bound: HTMLElement;
|
||||
let bLeft: any;
|
||||
let bTop: any;
|
||||
// Cache bounding box to avoid recalculating on every cursor move
|
||||
let cachedBBox: DOMRect | null = null;
|
||||
let isActive = false;
|
||||
let overlay: HTMLElement | null = null;
|
||||
|
||||
// Pre-compute apiResult once
|
||||
const apiResult = apiResponse?.data?.result || [];
|
||||
|
||||
// Sync bounds and cache the result
|
||||
const syncBounds = (): void => {
|
||||
const bbox = over.getBoundingClientRect();
|
||||
bLeft = bbox.left;
|
||||
bTop = bbox.top;
|
||||
if (over) {
|
||||
cachedBBox = over.getBoundingClientRect();
|
||||
}
|
||||
};
|
||||
|
||||
let overlay = document.getElementById('overlay');
|
||||
// Create overlay once and reuse it
|
||||
const initOverlay = (): void => {
|
||||
if (!overlay) {
|
||||
overlay = document.getElementById('overlay');
|
||||
if (!overlay) {
|
||||
overlay = document.createElement('div');
|
||||
overlay.id = 'overlay';
|
||||
overlay.style.cssText = 'display: none; position: absolute;';
|
||||
document.body.appendChild(overlay);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (!overlay) {
|
||||
overlay = document.createElement('div');
|
||||
overlay.id = 'overlay';
|
||||
overlay.style.display = 'none';
|
||||
overlay.style.position = 'absolute';
|
||||
document.body.appendChild(overlay);
|
||||
}
|
||||
const showOverlay = (): void => {
|
||||
if (overlay && overlay.style.display === 'none') {
|
||||
overlay.style.display = 'block';
|
||||
}
|
||||
};
|
||||
|
||||
const apiResult = apiResponse?.data?.result || [];
|
||||
const hideOverlay = (): void => {
|
||||
if (overlay && overlay.style.display === 'block') {
|
||||
overlay.style.display = 'none';
|
||||
}
|
||||
};
|
||||
|
||||
const plotEnter = (): void => {
|
||||
isActive = true;
|
||||
showOverlay();
|
||||
};
|
||||
|
||||
const plotLeave = (): void => {
|
||||
isActive = false;
|
||||
hideOverlay();
|
||||
};
|
||||
|
||||
// Cleanup function to remove event listeners
|
||||
const cleanup = (): void => {
|
||||
if (over) {
|
||||
over.removeEventListener('mouseenter', plotEnter);
|
||||
over.removeEventListener('mouseleave', plotLeave);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
hooks: {
|
||||
init: (u: any): void => {
|
||||
over = u?.over;
|
||||
bound = over;
|
||||
over.onmouseenter = (): void => {
|
||||
if (overlay) {
|
||||
overlay.style.display = 'block';
|
||||
}
|
||||
};
|
||||
over.onmouseleave = (): void => {
|
||||
if (overlay) {
|
||||
overlay.style.display = 'none';
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize overlay once during init
|
||||
initOverlay();
|
||||
|
||||
// Initial bounds sync
|
||||
syncBounds();
|
||||
|
||||
over.addEventListener('mouseenter', plotEnter);
|
||||
over.addEventListener('mouseleave', plotLeave);
|
||||
},
|
||||
setSize: (): void => {
|
||||
// Re-sync bounds when size changes
|
||||
syncBounds();
|
||||
},
|
||||
// Cache bounding box on syncRect for better performance
|
||||
syncRect: (u: any, rect: DOMRect): void => {
|
||||
cachedBBox = rect;
|
||||
},
|
||||
setCursor: (u: {
|
||||
cursor: { left: any; top: any; idx: any };
|
||||
data: any[];
|
||||
series: uPlot.Options['series'];
|
||||
}): void => {
|
||||
if (overlay) {
|
||||
overlay.textContent = '';
|
||||
const { left, top, idx } = u.cursor;
|
||||
|
||||
if (Number.isInteger(idx)) {
|
||||
const anchor = { left: left + bLeft, top: top + bTop };
|
||||
const content = generateTooltipContent(
|
||||
apiResult,
|
||||
u.data,
|
||||
idx,
|
||||
isDarkMode,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
u.series,
|
||||
isBillingUsageGraphs,
|
||||
isHistogramGraphs,
|
||||
isMergedSeries,
|
||||
stackBarChart,
|
||||
timezone,
|
||||
colorMapping,
|
||||
query,
|
||||
);
|
||||
if (customTooltipElement) {
|
||||
content.appendChild(customTooltipElement);
|
||||
}
|
||||
overlay.appendChild(content);
|
||||
placement(overlay, anchor, 'right', 'start', { bound });
|
||||
}
|
||||
if (!overlay) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { left, top, idx } = u.cursor;
|
||||
|
||||
// Early return if not active or no valid index
|
||||
if (!isActive || !Number.isInteger(idx)) {
|
||||
if (isActive) {
|
||||
// Clear tooltip content efficiently using replaceChildren
|
||||
overlay.replaceChildren();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Use cached bounding box if available
|
||||
const bbox = cachedBBox || over.getBoundingClientRect();
|
||||
const anchor = {
|
||||
left: left + bbox.left,
|
||||
top: top + bbox.top,
|
||||
};
|
||||
|
||||
const content = generateTooltipContent(
|
||||
apiResult,
|
||||
u.data,
|
||||
idx,
|
||||
isDarkMode,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
u.series,
|
||||
isBillingUsageGraphs,
|
||||
isHistogramGraphs,
|
||||
isMergedSeries,
|
||||
stackBarChart,
|
||||
timezone,
|
||||
colorMapping,
|
||||
query,
|
||||
);
|
||||
|
||||
// Only show tooltip if there's actual content
|
||||
if (content.children.length > 1) {
|
||||
if (customTooltipElement) {
|
||||
content.appendChild(customTooltipElement);
|
||||
}
|
||||
// Clear and set new content in one operation
|
||||
overlay.replaceChildren(content);
|
||||
placement(overlay, anchor, 'right', 'start', { bound });
|
||||
showOverlay();
|
||||
} else {
|
||||
hideOverlay();
|
||||
}
|
||||
},
|
||||
destroy: (): void => {
|
||||
// Cleanup on destroy
|
||||
cleanup();
|
||||
hideOverlay();
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -202,11 +202,7 @@ export function DashboardProvider({
|
||||
updateLocalStorageDashboardVariables,
|
||||
} = useDashboardVariablesFromLocalStorage(dashboardId);
|
||||
|
||||
const {
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
clearUrlVariables,
|
||||
} = useVariablesFromUrl();
|
||||
const { getUrlVariables, updateUrlVariable } = useVariablesFromUrl();
|
||||
|
||||
const updatedTimeRef = useRef<Dayjs | null>(null); // Using ref to store the updated time
|
||||
const modalRef = useRef<any>(null);
|
||||
@@ -218,14 +214,6 @@ export function DashboardProvider({
|
||||
|
||||
const [isDashboardFetching, setIsDashboardFetching] = useState<boolean>(false);
|
||||
|
||||
// Clear variable configs when not on dashboard pages
|
||||
useEffect(() => {
|
||||
const isOnDashboardPage = !!isDashboardPage || !!isDashboardWidgetPage;
|
||||
if (!isOnDashboardPage) {
|
||||
clearUrlVariables();
|
||||
}
|
||||
}, [isDashboardPage, isDashboardWidgetPage, clearUrlVariables]);
|
||||
|
||||
const mergeDBWithLocalStorage = (
|
||||
data: Dashboard,
|
||||
localStorageVariables: any,
|
||||
|
||||
@@ -121,7 +121,6 @@ function renderWithDashboardProvider(
|
||||
// Mock URL variables hook
|
||||
const mockGetUrlVariables = jest.fn();
|
||||
const mockUpdateUrlVariable = jest.fn();
|
||||
const mockClearUrlVariables = jest.fn();
|
||||
const mockSetUrlVariables = jest.fn();
|
||||
|
||||
jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
|
||||
@@ -129,7 +128,6 @@ jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
|
||||
default: jest.fn(() => ({
|
||||
getUrlVariables: mockGetUrlVariables,
|
||||
updateUrlVariable: mockUpdateUrlVariable,
|
||||
clearUrlVariables: mockClearUrlVariables,
|
||||
setUrlVariables: mockSetUrlVariables,
|
||||
})),
|
||||
}));
|
||||
@@ -524,16 +522,6 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should clear URL variables when not on dashboard page', async () => {
|
||||
mockUseRouteMatch.mockReturnValue(null); // Not on dashboard page
|
||||
|
||||
renderWithDashboardProvider('/other-page', null);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockClearUrlVariables).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Variable Value Normalization', () => {
|
||||
|
||||
@@ -99,9 +99,9 @@ func transformToSpanPercentileResponse(queryResult *qbtypes.QueryRangeResponse)
|
||||
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "no spans found matching the specified criteria")
|
||||
}
|
||||
|
||||
description := fmt.Sprintf("faster than %.1f%% of spans", position)
|
||||
description := fmt.Sprintf("slower than %.1f%% of spans", position)
|
||||
if position < 50 {
|
||||
description = fmt.Sprintf("slower than %.1f%% of spans", 100-position)
|
||||
description = fmt.Sprintf("faster than %.1f%% of spans", 100-position)
|
||||
}
|
||||
|
||||
return &spanpercentiletypes.SpanPercentileResponse{
|
||||
|
||||
@@ -54,6 +54,8 @@ func (r *aggExprRewriter) Rewrite(
|
||||
expr string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
) (string, []any, error) {
|
||||
|
||||
wrapped := fmt.Sprintf("SELECT %s", expr)
|
||||
@@ -83,6 +85,8 @@ func (r *aggExprRewriter) Rewrite(
|
||||
r.conditionBuilder,
|
||||
r.jsonBodyPrefix,
|
||||
r.jsonKeyToKey,
|
||||
startNs,
|
||||
endNs,
|
||||
)
|
||||
// Rewrite the first select item (our expression)
|
||||
if err := sel.SelectItems[0].Accept(visitor); err != nil {
|
||||
@@ -101,12 +105,14 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
exprs []string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
) ([]string, [][]any, error) {
|
||||
out := make([]string, len(exprs))
|
||||
var errs []error
|
||||
var chArgsList [][]any
|
||||
for i, e := range exprs {
|
||||
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
|
||||
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys, startNs, endNs)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
out[i] = e
|
||||
@@ -134,6 +140,8 @@ type exprVisitor struct {
|
||||
Modified bool
|
||||
chArgs []any
|
||||
isRate bool
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
}
|
||||
|
||||
func newExprVisitor(
|
||||
@@ -144,6 +152,8 @@ func newExprVisitor(
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
logger: logger,
|
||||
@@ -153,6 +163,8 @@ func newExprVisitor(
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
startNs: startNs,
|
||||
endNs: endNs,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -200,7 +212,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonBodyPrefix: v.jsonBodyPrefix,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
},
|
||||
}, v.startNs, v.endNs,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -45,7 +45,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -48,6 +48,8 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
op qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
|
||||
@@ -206,7 +206,7 @@ func TestConditionBuilder(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedErr != nil {
|
||||
|
||||
@@ -169,7 +169,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
IgnoreNotFoundKeys: true,
|
||||
Variables: variables,
|
||||
})
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -42,6 +42,8 @@ type filterExpressionVisitor struct {
|
||||
variables map[string]qbtypes.VariableItem
|
||||
|
||||
keysWithWarnings map[string]bool
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
}
|
||||
|
||||
type FilterExprVisitorOpts struct {
|
||||
@@ -58,6 +60,8 @@ type FilterExprVisitorOpts struct {
|
||||
SkipFunctionCalls bool
|
||||
IgnoreNotFoundKeys bool
|
||||
Variables map[string]qbtypes.VariableItem
|
||||
StartNs uint64
|
||||
EndNs uint64
|
||||
}
|
||||
|
||||
// newFilterExpressionVisitor creates a new filterExpressionVisitor
|
||||
@@ -77,6 +81,8 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
|
||||
ignoreNotFoundKeys: opts.IgnoreNotFoundKeys,
|
||||
variables: opts.Variables,
|
||||
keysWithWarnings: make(map[string]bool),
|
||||
startNs: opts.StartNs,
|
||||
endNs: opts.EndNs,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,7 +93,7 @@ type PreparedWhereClause struct {
|
||||
}
|
||||
|
||||
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
|
||||
func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWhereClause, error) {
|
||||
func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64, endNs uint64) (*PreparedWhereClause, error) {
|
||||
// Setup the ANTLR parsing pipeline
|
||||
input := antlr.NewInputStream(query)
|
||||
lexer := grammar.NewFilterQueryLexer(input)
|
||||
@@ -120,6 +126,8 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWher
|
||||
}
|
||||
tokens.Reset()
|
||||
|
||||
opts.StartNs = startNs
|
||||
opts.EndNs = endNs
|
||||
visitor := newFilterExpressionVisitor(opts)
|
||||
|
||||
// Handle syntax errors
|
||||
@@ -311,7 +319,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
// create a full text search condition on the body field
|
||||
|
||||
keyText := keyCtx.GetText()
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder)
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
@@ -331,7 +339,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
v.errors = append(v.errors, fmt.Sprintf("unsupported value type: %s", valCtx.GetText()))
|
||||
return ""
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
@@ -375,7 +383,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -443,7 +451,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, values, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, values, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -475,7 +483,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -556,7 +564,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
|
||||
return ""
|
||||
@@ -635,7 +643,7 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
v.errors = append(v.errors, "full text search is not supported")
|
||||
return ""
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
|
||||
107
pkg/sqlstore/sqlitesqlstore/formatter.go
Normal file
107
pkg/sqlstore/sqlitesqlstore/formatter.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package sqlitesqlstore
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_extract("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
sql = append(sql, ")"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_type("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
sql = append(sql, ")"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".value"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
return f.JSONArrayElements(column, path, alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_group_array("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_array("...)
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
return f.bunf.AppendIdent([]byte{}, column)
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
422
pkg/sqlstore/sqlitesqlstore/formatter_test.go
Normal file
422
pkg/sqlstore/sqlitesqlstore/formatter_test.go
Normal file
@@ -0,0 +1,422 @@
|
||||
package sqlitesqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/sqlitedialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `json_extract("data", '$.field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `json_extract("metadata", '$.user.name')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `json_extract("json_col", '$')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "",
|
||||
expected: `json_extract("json_col", '')`,
|
||||
},
|
||||
{
|
||||
name: "array index path",
|
||||
column: "items",
|
||||
path: "$.list[0]",
|
||||
expected: `json_extract("items", '$.list[0]')`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `json_type("data", '$.field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `json_type("metadata", '$.user.age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `json_type("json_col", '$')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "",
|
||||
expected: `json_type("json_col", '')`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `json_type("data", '$.items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `json_type("metadata", '$.user.tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `json_type("json_col", '$') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "",
|
||||
expected: `json_type("json_col", '') = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `json_each("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `json_each("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `json_each("metadata", '$.items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `json_each("json_col", '$.user.tags') AS "tag"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "",
|
||||
alias: "item",
|
||||
expected: `json_each("metadata") AS "item"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `json_each("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `json_each("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `json_each("metadata", '$.strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `json_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `json_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `json_each("metadata", '$.object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "json_group_array(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "json_group_array(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "json_extract(data, '$.field')",
|
||||
expected: "json_group_array(json_extract(data, '$.field'))",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "json_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "json_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "json_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "json_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "json_extract expression",
|
||||
expr: "json_extract(data, '$.field')",
|
||||
expected: "lower(json_extract(data, '$.field'))",
|
||||
},
|
||||
{
|
||||
name: "nested json_extract",
|
||||
expr: "json_extract(metadata, '$.user.name')",
|
||||
expected: "lower(json_extract(metadata, '$.user.name'))",
|
||||
},
|
||||
{
|
||||
name: "json_type expression",
|
||||
expr: "json_type(data, '$.field')",
|
||||
expected: "lower(json_type(data, '$.field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(sqlitedialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -17,10 +17,11 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -54,11 +55,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
settings.Logger().InfoContext(ctx, "connected to sqlite", "path", config.Sqlite.Path)
|
||||
sqldb.SetMaxOpenConns(config.Connection.MaxOpenConns)
|
||||
|
||||
sqliteDialect := sqlitedialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, sqliteDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, sqlitedialect.New(), hooks),
|
||||
dialect: new(dialect),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -74,6 +78,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@ type SQLStore interface {
|
||||
// Returns the dialect of the database.
|
||||
Dialect() SQLDialect
|
||||
|
||||
Formatter() SQLFormatter
|
||||
|
||||
// RunInTxCtx runs the given callback in a transaction. It creates and injects a new context with the transaction.
|
||||
// If a transaction is present in the context, it will be used.
|
||||
RunInTxCtx(ctx context.Context, opts *SQLStoreTxOptions, cb func(ctx context.Context) error) error
|
||||
@@ -86,3 +88,35 @@ type SQLDialect interface {
|
||||
// as an argument.
|
||||
ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error
|
||||
}
|
||||
|
||||
type SQLFormatter interface {
|
||||
// JSONExtractString takes a JSON path (e.g., "$.labels.severity")
|
||||
JSONExtractString(column, path string) []byte
|
||||
|
||||
// JSONType used to determine the type of the value extracted from the path
|
||||
JSONType(column, path string) []byte
|
||||
|
||||
// JSONIsArray used to check whether the value is array or not
|
||||
JSONIsArray(column, path string) []byte
|
||||
|
||||
// JSONArrayElements returns query as well as columns alias to be used for select and where clause
|
||||
JSONArrayElements(column, path, alias string) ([]byte, []byte)
|
||||
|
||||
// JSONArrayOfStrings returns query as well as columns alias to be used for select and where clause
|
||||
JSONArrayOfStrings(column, path, alias string) ([]byte, []byte)
|
||||
|
||||
// JSONArrayAgg aggregates values into a JSON array
|
||||
JSONArrayAgg(expression string) []byte
|
||||
|
||||
// JSONArrayLiteral creates a literal JSON array from the given string values
|
||||
JSONArrayLiteral(values ...string) []byte
|
||||
|
||||
// JSONKeys return extracted key from json as well as alias to be used for select and where clause
|
||||
JSONKeys(column, path, alias string) ([]byte, []byte)
|
||||
|
||||
// TextToJsonColumn converts a text column to JSON type
|
||||
TextToJsonColumn(column string) []byte
|
||||
|
||||
// LowerExpression wraps any SQL expression with lower() function for case-insensitive operations
|
||||
LowerExpression(expression string) []byte
|
||||
}
|
||||
|
||||
107
pkg/sqlstore/sqlstoretest/formatter.go
Normal file
107
pkg/sqlstore/sqlstoretest/formatter.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package sqlstoretest
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_extract("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
sql = append(sql, ")"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_type("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
sql = append(sql, ")"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".value"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
return f.JSONArrayElements(column, path, alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
if path != "$" && path != "" {
|
||||
sql = append(sql, ", "...)
|
||||
sql = schema.Append(f.bunf, sql, path)
|
||||
}
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_group_array("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "json_array("...)
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
return f.bunf.AppendIdent([]byte{}, column)
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
@@ -15,10 +15,11 @@ import (
|
||||
var _ sqlstore.SQLStore = (*Provider)(nil)
|
||||
|
||||
type Provider struct {
|
||||
db *sql.DB
|
||||
mock sqlmock.Sqlmock
|
||||
bunDB *bun.DB
|
||||
dialect *dialect
|
||||
db *sql.DB
|
||||
mock sqlmock.Sqlmock
|
||||
bunDB *bun.DB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func New(config sqlstore.Config, matcher sqlmock.QueryMatcher) *Provider {
|
||||
@@ -38,10 +39,11 @@ func New(config sqlstore.Config, matcher sqlmock.QueryMatcher) *Provider {
|
||||
}
|
||||
|
||||
return &Provider{
|
||||
db: db,
|
||||
mock: mock,
|
||||
bunDB: bunDB,
|
||||
dialect: new(dialect),
|
||||
db: db,
|
||||
mock: mock,
|
||||
bunDB: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,6 +63,8 @@ func (provider *Provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *Provider) Formatter() sqlstore.SQLFormatter { return provider.formatter }
|
||||
|
||||
func (provider *Provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bunDB
|
||||
}
|
||||
|
||||
@@ -218,6 +218,8 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
|
||||
@@ -276,7 +276,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
@@ -331,7 +331,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var err error
|
||||
for _, key := range tc.keys {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)
|
||||
@@ -528,7 +528,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -34,7 +34,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -71,7 +71,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -163,7 +163,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -2387,7 +2387,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
@@ -2506,7 +2506,7 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -350,6 +350,8 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
start,
|
||||
end,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -499,6 +501,8 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
start,
|
||||
end,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -592,7 +596,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
Variables: variables,
|
||||
})
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -25,6 +25,8 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
|
||||
@@ -53,7 +53,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -978,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldMapper: t.fm,
|
||||
ConditionBuilder: t.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
})
|
||||
}, 0, 0)
|
||||
if err == nil {
|
||||
sb.AddWhereClause(whereClause.WhereClause)
|
||||
} else {
|
||||
@@ -1002,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
|
||||
// search on attributes
|
||||
key.FieldContext = telemetrytypes.FieldContextAttribute
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
// search on resource
|
||||
key.FieldContext = telemetrytypes.FieldContextResource
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
key.FieldContext = origContext
|
||||
} else {
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
@@ -148,7 +148,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -231,7 +231,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -295,7 +295,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -136,6 +136,8 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
|
||||
@@ -234,7 +234,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
@@ -289,7 +289,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var err error
|
||||
for _, key := range tc.keys {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)
|
||||
|
||||
@@ -348,7 +348,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -223,9 +223,11 @@ func (c *conditionBuilder) ConditionFor(
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
if c.isSpanScopeField(key.Name) {
|
||||
return c.buildSpanScopeCondition(key, operator, value)
|
||||
return c.buildSpanScopeCondition(key, operator, value, startNs)
|
||||
}
|
||||
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
@@ -257,7 +259,7 @@ func (c *conditionBuilder) isSpanScopeField(name string) bool {
|
||||
return keyName == SpanSearchScopeRoot || keyName == SpanSearchScopeEntryPoint
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, error) {
|
||||
func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any, startNs uint64) (string, error) {
|
||||
if operator != qbtypes.FilterOperatorEqual {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "span scope field %s only supports '=' operator", key.Name)
|
||||
}
|
||||
@@ -281,6 +283,11 @@ func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.Telemetry
|
||||
case SpanSearchScopeRoot:
|
||||
return "parent_span_id = ''", nil
|
||||
case SpanSearchScopeEntryPoint:
|
||||
if startNs > 0 { // only add time filter if it is a valid time, else do not add
|
||||
startS := int64(startNs / 1_000_000_000)
|
||||
return fmt.Sprintf("((name, resource_string_service$$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s WHERE time >= toDateTime(%d))) AND parent_span_id != ''",
|
||||
DBName, TopLevelOperationsTableName, startS), nil
|
||||
}
|
||||
return fmt.Sprintf("((name, resource_string_service$$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''",
|
||||
DBName, TopLevelOperationsTableName), nil
|
||||
default:
|
||||
|
||||
@@ -289,7 +289,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 1761437108000000000, 1761458708000000000)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -21,26 +21,37 @@ func TestSpanScopeFilterExpression(t *testing.T) {
|
||||
expression string
|
||||
expectedCondition string
|
||||
expectError bool
|
||||
startNs uint64
|
||||
}{
|
||||
{
|
||||
name: "simple isroot filter",
|
||||
expression: "isroot = true",
|
||||
expectedCondition: "parent_span_id = ''",
|
||||
startNs: 1761437108000000000,
|
||||
},
|
||||
{
|
||||
name: "simple isentrypoint filter",
|
||||
name: "simple isentrypoint filter (unbounded)",
|
||||
expression: "isentrypoint = true",
|
||||
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
|
||||
startNs: 0,
|
||||
},
|
||||
{
|
||||
name: "simple isentrypoint filter (bounded)",
|
||||
expression: "isentrypoint = true",
|
||||
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations WHERE time >= toDateTime(1761437108))) AND parent_span_id != ''",
|
||||
startNs: 1761437108000000000,
|
||||
},
|
||||
{
|
||||
name: "combined filter with AND",
|
||||
expression: "isroot = true AND has_error = true",
|
||||
expectedCondition: "parent_span_id = ''",
|
||||
startNs: 1761437108000000000,
|
||||
},
|
||||
{
|
||||
name: "combined filter with OR",
|
||||
expression: "isentrypoint = true OR has_error = true",
|
||||
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
|
||||
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations WHERE time >= toDateTime(1761437108))) AND parent_span_id != ''",
|
||||
startNs: 1761437108000000000,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -64,13 +75,13 @@ func TestSpanScopeFilterExpression(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}}
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
whereClause, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: fieldKeys,
|
||||
Builder: sb,
|
||||
})
|
||||
}, tt.startNs, 1761458708000000000)
|
||||
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
@@ -131,13 +142,13 @@ func TestSpanScopeWithResourceFilter(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}}
|
||||
|
||||
_, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
_, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: fieldKeys,
|
||||
SkipResourceFilter: false, // This would be set by the statement builder
|
||||
})
|
||||
}, 1761437108000000000, 1761458708000000000)
|
||||
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
@@ -512,6 +512,8 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
start,
|
||||
end,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -657,6 +659,8 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
start,
|
||||
end,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -746,7 +750,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
})
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
},
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -575,6 +575,8 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
agg.Expression,
|
||||
uint64(b.operator.StepInterval.Seconds()),
|
||||
keys,
|
||||
b.start,
|
||||
b.end,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
@@ -687,6 +689,8 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
agg.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
b.start,
|
||||
b.end,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
@@ -825,6 +829,8 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
agg.Expression,
|
||||
uint64((b.end-b.start)/querybuilder.NsToSeconds),
|
||||
keys,
|
||||
b.start,
|
||||
b.end,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
|
||||
@@ -31,13 +31,14 @@ type FieldMapper interface {
|
||||
// ConditionBuilder builds the condition for the filter.
|
||||
type ConditionBuilder interface {
|
||||
// ConditionFor returns the condition for the given key, operator and value.
|
||||
ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error)
|
||||
// TODO(srikanthccv,nikhilmantri0902): remove startNs, endNs when top_level_operations can be replaced with `is_remote`
|
||||
ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator FilterOperator, value any, sb *sqlbuilder.SelectBuilder, startNs uint64, endNs uint64) (string, error)
|
||||
}
|
||||
|
||||
type AggExprRewriter interface {
|
||||
// Rewrite rewrites the aggregation expression to be used in the query.
|
||||
Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error)
|
||||
Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey, startNs uint64, endNs uint64) (string, []any, error)
|
||||
RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey, startNs uint64, endNs uint64) ([]string, [][]any, error)
|
||||
}
|
||||
|
||||
type Statement struct {
|
||||
|
||||
Reference in New Issue
Block a user