Compare commits

...

22 Commits

Author SHA1 Message Date
aniket
44080a1d59 chore: resolved conflict 2025-11-02 11:09:13 +05:30
aniket
554c498209 Merge branch 'chore/json_formatter' of github.com:SigNoz/signoz into chore/filter-rules 2025-11-02 11:06:03 +05:30
aniket
156de83626 chore: minor changes 2025-11-01 22:16:25 +05:30
aniket
7c4a18687a chore: minor changes 2025-11-01 22:07:01 +05:30
aniket
7214f51e98 chore: minor changes 2025-11-01 21:06:58 +05:30
aniket
290e0754c6 chore: minor changes 2025-11-01 21:02:56 +05:30
aniket
0ea16f9472 chore: updated queries: 2025-11-01 16:52:33 +05:30
aniket
0d773211af chore: resolved pr comments 2025-11-01 16:10:40 +05:30
aniket
7028031e01 Merge branch 'chore/json_formatter' of github.com:SigNoz/signoz into chore/filter-rules 2025-11-01 16:07:18 +05:30
aniket
2a4407280d chore: resolved pr comments 2025-11-01 15:32:54 +05:30
Vikrant Gupta
8c75fb29a6 Merge branch 'main' into chore/json_formatter 2025-11-01 14:56:53 +05:30
aniket
85ea6105f8 chore: resolved pr comments 2025-11-01 14:55:42 +05:30
aniket
dc8fba6944 chore: resolved pr comments 2025-11-01 14:51:55 +05:30
aniket
97bbc95aab Merge branch 'chore/json_formatter' of github.com:SigNoz/signoz into chore/filter-rules 2025-10-31 14:35:35 +05:30
aniket
fbcb17006d chore: added apend ident 2025-10-31 14:21:48 +05:30
aniket
d642b69f8e Merge branch 'main' of github.com:SigNoz/signoz into chore/filter-rules 2025-10-31 01:42:16 +05:30
aniket
7230069de6 chore: minor changes 2025-10-31 01:39:24 +05:30
aniket
80fff10273 chore: added keys, values api for rule filtering 2025-10-30 16:58:48 +05:30
aniket
39a6e3865e Merge branch 'chore/json_formatter' of github.com:SigNoz/signoz into chore/json_formatter 2025-10-30 16:53:32 +05:30
aniket
492e249c29 chore: updated json extract columns 2025-10-30 16:52:27 +05:30
aniketio-ctrl
d68affd1d6 Merge branch 'main' into chore/json_formatter 2025-10-27 17:34:32 +05:30
aniket
7bad6d5377 chore: added sql formatter for json 2025-10-27 17:14:29 +05:30
16 changed files with 1731 additions and 28 deletions

View File

@@ -0,0 +1,157 @@
package postgressqlstore
import (
"strings"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun/schema"
)
type formatter struct {
bunf schema.Formatter
}
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
return &formatter{bunf: schema.NewFormatter(dialect)}
}
func (f *formatter) JSONExtractString(column, path string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgres(path)...)
return sql
}
func (f *formatter) JSONType(column, path string) []byte {
var sql []byte
sql = append(sql, "jsonb_typeof("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONIsArray(column, path string) []byte {
var sql []byte
sql = append(sql, f.JSONType(column, path)...)
sql = append(sql, " = 'array'"...)
return sql
}
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements("...)
sql = f.bunf.AppendIdent(sql, column)
if path != "$" && path != "" {
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
}
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias)
}
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_array_elements_text("...)
sql = f.bunf.AppendIdent(sql, column)
if path != "$" && path != "" {
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
}
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias + "::text")
}
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "jsonb_each("...)
sql = f.bunf.AppendIdent(sql, column)
if path != "$" && path != "" {
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
}
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias + ".key")
}
func (f *formatter) JSONArrayAgg(expression string) []byte {
var sql []byte
sql = append(sql, "jsonb_agg("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
if len(values) == 0 {
return []byte("jsonb_build_array()")
}
var sql []byte
sql = append(sql, "jsonb_build_array("...)
for i, v := range values {
if i > 0 {
sql = append(sql, ", "...)
}
sql = append(sql, '\'')
sql = append(sql, v...)
sql = append(sql, '\'')
}
sql = append(sql, ')')
return sql
}
func (f *formatter) TextToJsonColumn(column string) []byte {
var sql []byte
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, "::jsonb"...)
return sql
}
func (f *formatter) convertJSONPathToPostgres(jsonPath string) string {
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
}
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) string {
path := strings.TrimPrefix(jsonPath, "$")
if path == "" || path == "." {
return ""
}
parts := strings.Split(strings.TrimPrefix(path, "."), ".")
if len(parts) == 0 {
return ""
}
var result strings.Builder
for i, part := range parts {
if i < len(parts)-1 {
result.WriteString("->")
result.WriteString("'")
result.WriteString(part)
result.WriteString("'")
} else {
if asText {
result.WriteString("->>")
} else {
result.WriteString("->")
}
result.WriteString("'")
result.WriteString(part)
result.WriteString("'")
}
}
return result.String()
}
func (f *formatter) LowerExpression(expression string) []byte {
var sql []byte
sql = append(sql, "lower("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}

View File

@@ -0,0 +1,488 @@
package postgressqlstore
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/uptrace/bun/dialect/pgdialect"
)
func TestJSONExtractString(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `"data"->>'field'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.name",
expected: `"metadata"->'user'->>'name'`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.level1.level2.level3",
expected: `"json_col"->'level1'->'level2'->>'level3'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `"json_col"`,
},
{
name: "empty path",
column: "data",
path: "",
expected: `"data"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONExtractString(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONType(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `jsonb_typeof("data"->'field')`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.age",
expected: `jsonb_typeof("metadata"->'user'->'age')`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col")`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONType(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONIsArray(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.items",
expected: `jsonb_typeof("data"->'items') = 'array'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.tags",
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `jsonb_typeof("json_col") = 'array'`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONIsArray(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayElements(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "elem",
expected: `jsonb_array_elements("data") AS "elem"`,
},
{
name: "nested path",
column: "metadata",
path: "$.items",
alias: "item",
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.user.tags",
alias: "tag",
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayOfStrings(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "str",
expected: `jsonb_array_elements_text("data") AS "str"`,
},
{
name: "nested path",
column: "metadata",
path: "$.strings",
alias: "s",
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONKeys(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "k",
expected: `jsonb_each("data") AS "k"`,
},
{
name: "nested path",
column: "metadata",
path: "$.object",
alias: "key",
expected: `jsonb_each("metadata"->'object') AS "key"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayAgg(t *testing.T) {
tests := []struct {
name string
expression string
expected string
}{
{
name: "simple column",
expression: "id",
expected: "jsonb_agg(id)",
},
{
name: "expression with function",
expression: "DISTINCT name",
expected: "jsonb_agg(DISTINCT name)",
},
{
name: "complex expression",
expression: "data->>'field'",
expected: "jsonb_agg(data->>'field')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayAgg(tt.expression))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayLiteral(t *testing.T) {
tests := []struct {
name string
values []string
expected string
}{
{
name: "empty array",
values: []string{},
expected: "jsonb_build_array()",
},
{
name: "single value",
values: []string{"value1"},
expected: "jsonb_build_array('value1')",
},
{
name: "multiple values",
values: []string{"value1", "value2", "value3"},
expected: "jsonb_build_array('value1', 'value2', 'value3')",
},
{
name: "values with special characters",
values: []string{"test", "with space", "with-dash"},
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.JSONArrayLiteral(tt.values...))
assert.Equal(t, tt.expected, got)
})
}
}
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
tests := []struct {
name string
jsonPath string
asText bool
expected string
}{
{
name: "simple path as text",
jsonPath: "$.field",
asText: true,
expected: "->>'field'",
},
{
name: "simple path as json",
jsonPath: "$.field",
asText: false,
expected: "->'field'",
},
{
name: "nested path as text",
jsonPath: "$.user.name",
asText: true,
expected: "->'user'->>'name'",
},
{
name: "nested path as json",
jsonPath: "$.user.name",
asText: false,
expected: "->'user'->'name'",
},
{
name: "deeply nested as text",
jsonPath: "$.a.b.c.d",
asText: true,
expected: "->'a'->'b'->'c'->>'d'",
},
{
name: "root path",
jsonPath: "$",
asText: true,
expected: "",
},
{
name: "empty path",
jsonPath: "",
asText: true,
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New()).(*formatter)
got := f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText)
assert.Equal(t, tt.expected, got)
})
}
}
func TestTextToJsonColumn(t *testing.T) {
tests := []struct {
name string
column string
expected string
}{
{
name: "simple column name",
column: "data",
expected: `"data"::jsonb`,
},
{
name: "column with underscore",
column: "user_data",
expected: `"user_data"::jsonb`,
},
{
name: "column with special characters",
column: "json-col",
expected: `"json-col"::jsonb`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.TextToJsonColumn(tt.column))
assert.Equal(t, tt.expected, got)
})
}
}
func TestLowerExpression(t *testing.T) {
tests := []struct {
name string
expr string
expected string
}{
{
name: "simple column name",
expr: "name",
expected: "lower(name)",
},
{
name: "quoted column identifier",
expr: `"column_name"`,
expected: `lower("column_name")`,
},
{
name: "jsonb text extraction",
expr: "data->>'field'",
expected: "lower(data->>'field')",
},
{
name: "nested jsonb extraction",
expr: "metadata->'user'->>'name'",
expected: "lower(metadata->'user'->>'name')",
},
{
name: "jsonb_typeof expression",
expr: "jsonb_typeof(data->'field')",
expected: "lower(jsonb_typeof(data->'field'))",
},
{
name: "string concatenation",
expr: "first_name || ' ' || last_name",
expected: "lower(first_name || ' ' || last_name)",
},
{
name: "CAST expression",
expr: "CAST(value AS TEXT)",
expected: "lower(CAST(value AS TEXT))",
},
{
name: "COALESCE expression",
expr: "COALESCE(name, 'default')",
expected: "lower(COALESCE(name, 'default'))",
},
{
name: "subquery column",
expr: "users.email",
expected: "lower(users.email)",
},
{
name: "quoted identifier with special chars",
expr: `"user-name"`,
expected: `lower("user-name")`,
},
{
name: "jsonb to text cast",
expr: "data::text",
expected: "lower(data::text)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(pgdialect.New())
got := string(f.LowerExpression(tt.expr))
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -3,7 +3,6 @@ package postgressqlstore
import (
"context"
"database/sql"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
@@ -15,10 +14,11 @@ import (
)
type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
formatter sqlstore.SQLFormatter
}
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
@@ -55,11 +55,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
sqldb := stdlib.OpenDBFromPool(pool)
pgDialect := pgdialect.New()
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
return &provider{
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
dialect: new(dialect),
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks),
dialect: new(dialect),
formatter: newFormatter(bunDB.Dialect()),
}, nil
}
@@ -75,6 +78,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}
func (provider *provider) Formatter() sqlstore.SQLFormatter {
return provider.formatter
}
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
return provider.bundb.BunDBCtx(ctx)
}

View File

@@ -499,6 +499,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v1/alerts", am.ViewAccess(aH.AlertmanagerAPI.GetAlerts)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/rules/keys", am.ViewAccess(aH.getRuleAttributeKeys)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/rules/values", am.ViewAccess(aH.getRuleAttributeValues)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/rules", am.ViewAccess(aH.listRules)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/rules/{id}", am.ViewAccess(aH.getRule)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/rules", am.EditAccess(aH.createRule)).Methods(http.MethodPost)
@@ -1152,6 +1155,63 @@ func (aH *APIHandler) getRuleStateHistoryTopContributors(w http.ResponseWriter,
aH.Respond(w, res)
}
func (aH *APIHandler) getRuleAttributeKeys(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get claims from context: %v", err))
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get orgId from claims: %v", err))
return
}
searchText := r.URL.Query().Get("searchText")
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil || limit <= 0 {
limit = 10
}
keys, err := aH.ruleManager.GetSearchKeys(r.Context(), searchText, limit, orgID)
if err != nil {
render.Error(w, err)
return
}
render.Success(w, http.StatusOK, keys)
}
func (aH *APIHandler) getRuleAttributeValues(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get claims from context: %v", err))
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get orgId from claims: %v", err))
return
}
attributeKey := r.URL.Query().Get("attributeKey")
if attributeKey == "" {
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInvalidInput, "attributeKey is required"))
return
}
searchText := r.URL.Query().Get("searchText")
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil || limit <= 0 {
limit = 10
}
keys, err := aH.ruleManager.GetSearchValues(r.Context(), searchText, limit, attributeKey, orgID)
if err != nil {
render.Error(w, errorsV2.NewInternalf(errorsV2.CodeInternal, "failed to get rule search values: %v", err))
return
}
render.Success(w, http.StatusOK, keys)
}
func (aH *APIHandler) listRules(w http.ResponseWriter, r *http.Request) {
rules, err := aH.ruleManager.ListRuleStates(r.Context())

View File

@@ -36,6 +36,16 @@ func (s AlertState) String() string {
panic(errors.Errorf("unknown alert state: %d", s))
}
func GetAllRuleStates() []string {
return []string{
StateInactive.String(),
StatePending.String(),
StateFiring.String(),
StateNoData.String(),
StateDisabled.String(),
}
}
func (s AlertState) MarshalJSON() ([]byte, error) {
return json.Marshal(s.String())
}

View File

@@ -5,6 +5,7 @@ import (
"encoding/json"
"fmt"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"log/slog"
"sort"
"strings"
@@ -1083,3 +1084,60 @@ func (m *Manager) GetAlertDetailsForMetricNames(ctx context.Context, metricNames
return result, nil
}
func (m *Manager) GetSearchKeys(ctx context.Context, searchText string, limit int, orgId valuer.UUID) ([]ruletypes.GetRuleAttributeKeys, error) {
keys, err := m.ruleStore.GetRuleLabelKeys(ctx, searchText, limit, orgId.String())
if err != nil {
return nil, errors.NewInternalf(errors.CodeInternal, "failed to get rule label keys: %v", err)
}
result := make([]ruletypes.GetRuleAttributeKeys, len(ruletypes.FixedRuleAttributeKeys))
copy(result, ruletypes.FixedRuleAttributeKeys)
for _, key := range keys {
result = append(result, ruletypes.GetRuleAttributeKeys{
Key: key,
Type: ruletypes.RuleAttributeTypeLabel,
DataType: telemetrytypes.FieldDataTypeString,
})
}
return result, nil
}
func (m *Manager) GetSearchValues(ctx context.Context, searchText string, limit int, key string, orgId valuer.UUID) ([]string, error) {
switch key {
case ruletypes.RuleAttributeKeyChannel:
return m.ruleStore.GetChannel(ctx, searchText, limit, orgId.String())
case ruletypes.RuleAttributeKeyThresholdName:
return m.ruleStore.GetThresholdNames(ctx, searchText, limit, orgId.String())
case ruletypes.RuleAttributeKeyCreatedBy:
return m.ruleStore.GetCreatedBy(ctx, searchText, limit, orgId.String())
case ruletypes.RuleAttributeKeyUpdatedBy:
return m.ruleStore.GetUpdatedBy(ctx, searchText, limit, orgId.String())
case ruletypes.RuleAttributeKeyName:
return m.ruleStore.GetNames(ctx, searchText, limit, orgId.String())
case ruletypes.RuleAttributeKeyState:
allStates := model.GetAllRuleStates()
if searchText == "" {
if limit > 0 && limit < len(allStates) {
return allStates[:limit], nil
}
return allStates, nil
}
filtered := make([]string, 0)
searchLower := strings.ToLower(searchText)
for _, state := range allStates {
if strings.Contains(strings.ToLower(state), searchLower) {
filtered = append(filtered, state)
if limit > 0 && len(filtered) >= limit {
break
}
}
}
return filtered, nil
default:
return m.ruleStore.GetRuleLabelValues(ctx, searchText, limit, key, orgId.String())
}
}

View File

@@ -2,6 +2,9 @@ package sqlrulestore
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/uptrace/bun"
"strings"
"github.com/SigNoz/signoz/pkg/sqlstore"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
@@ -101,3 +104,205 @@ func (r *rule) GetStoredRule(ctx context.Context, id valuer.UUID) (*ruletypes.Ru
}
return rule, nil
}
func (r *rule) GetRuleLabelKeys(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
labelKeys := make([]string, 0)
searchText = strings.ToLower(searchText) + "%"
fmter := r.sqlstore.Formatter()
elements, elementsAlias := fmter.JSONKeys("data", "$.labels", "keys")
elementsAliasStr := string(fmter.LowerExpression(string(elementsAlias)))
query := r.sqlstore.BunDB().
NewSelect().
Distinct().
ColumnExpr("?", bun.SafeQuery(elementsAliasStr)).
TableExpr("rule, ?", bun.SafeQuery(string(elements))).
Where("? LIKE ?", bun.SafeQuery(elementsAliasStr), searchText).
Where("org_id = ?", orgId).
Limit(limit)
err := query.Scan(ctx, &labelKeys)
if err != nil {
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "search keys for rule with orgId %s not found", orgId)
}
return labelKeys, nil
}
func (r *rule) GetThresholdNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
names := make([]string, 0)
searchText = strings.ToLower(searchText) + "%"
fmter := r.sqlstore.Formatter()
// Query threshold spec names
specQuery, specCol := fmter.JSONArrayElements("data", "$.condition.thresholds.spec", "spec")
nameQuery := string(fmter.JSONExtractString(string(specCol), "$.name"))
lowerNameQuery := string(fmter.LowerExpression(nameQuery))
query := r.sqlstore.BunDB().
NewSelect().
Distinct().
ColumnExpr("?", bun.SafeQuery(nameQuery)).
TableExpr("rule, ?", bun.SafeQuery(string(specQuery))).
Where("? LIKE ?", bun.SafeQuery(lowerNameQuery), searchText).
Where("org_id = ?", orgId).
Limit(limit)
err := query.Scan(ctx, &names)
if err != nil {
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "threshold names for rule with orgId %s not found", orgId)
}
if len(names) >= limit {
return names[:limit], nil
}
severityQuery := string(fmter.JSONExtractString("data", "$.labels.severity"))
lowerSeverityQuery := string(fmter.LowerExpression(severityQuery))
thresholds := make([]string, 0)
query = r.sqlstore.BunDB().
NewSelect().
Distinct().
ColumnExpr("?", bun.SafeQuery(severityQuery)).
TableExpr("rule").
Where("org_id = ?", orgId).
Where("? LIKE ?", bun.SafeQuery(lowerSeverityQuery), searchText).
Limit(limit - len(names))
err = query.Scan(ctx, &thresholds)
if err != nil {
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "threshold names for rule with orgId %s not found", orgId)
}
names = append(names, thresholds...)
return names, nil
}
func (r *rule) GetChannel(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
names := make([]string, 0)
searchText = strings.ToLower(searchText) + "%"
fmter := r.sqlstore.Formatter()
// Query v2 threshold channels
specSQL, specCol := fmter.JSONArrayElements("data", "$.condition.thresholds.spec", "spec")
channelSQL, channelCol := fmter.JSONArrayOfStrings(string(specCol), "$.channels", "channels")
lowerChannelCol := string(fmter.LowerExpression(string(channelCol)))
query := r.sqlstore.BunDB().
NewSelect().
Distinct().
ColumnExpr("?", bun.SafeQuery(string(channelCol))).
TableExpr("rule, ?, ?",
bun.SafeQuery(string(specSQL)),
bun.SafeQuery(string(channelSQL))).
Where("? LIKE ?", bun.SafeQuery(lowerChannelCol), searchText).
Where("org_id = ?", orgId).
Limit(limit)
err := query.Scan(ctx, &names)
if err != nil {
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "channel for rule with orgId %s not found", orgId)
}
if len(names) >= limit {
return names[:limit], nil
}
// Query v1 preferred channels
channelsSQL, channelsCol := fmter.JSONArrayOfStrings("data", "$.preferredChannels", "channels")
lowerChannelsCol := fmter.LowerExpression(string(channelsCol))
channels := make([]string, 0)
query = r.sqlstore.BunDB().
NewSelect().
Distinct().
ColumnExpr("?", bun.SafeQuery(string(channelsCol))).
TableExpr("rule, ?", bun.SafeQuery(string(channelsSQL))).
Where("? LIKE ?", bun.SafeQuery(string(lowerChannelsCol)), searchText).
Where("org_id = ?", orgId).
Limit(limit - len(names))
err = query.Scan(ctx, &channels)
if err != nil {
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "channel for rule with orgId %s not found", orgId)
}
names = append(names, channels...)
return names, nil
}
func (r *rule) GetNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
names := make([]string, 0)
searchText = strings.ToLower(searchText) + "%"
fmter := r.sqlstore.Formatter()
namePath := fmter.JSONExtractString("data", "$.alert")
lowerNamePath := fmter.LowerExpression(string(namePath))
query := r.sqlstore.BunDB().
NewSelect().
Distinct().
ColumnExpr("?", bun.SafeQuery(string(namePath))).
TableExpr("?", bun.SafeQuery("rule")).
Where("? LIKE ?", bun.SafeQuery(string(lowerNamePath)), searchText).
Where("org_id = ?", orgId).
Limit(limit)
err := query.Scan(ctx, &names)
if err != nil {
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "names for rule with orgId %s not found", orgId)
}
return names, nil
}
func (r *rule) GetCreatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
names := make([]string, 0)
searchText = strings.ToLower(searchText) + "%"
query := r.sqlstore.BunDB().NewSelect().
Distinct().
Column("created_by").
TableExpr("?", bun.SafeQuery("rule")).
Where("org_id = ?", orgId).
Where("? LIKE ?", bun.SafeQuery(string(r.sqlstore.Formatter().LowerExpression("created_by"))), searchText).
Limit(limit)
err := query.Scan(ctx, &names)
if err != nil {
return nil, err
}
return names, nil
}
func (r *rule) GetUpdatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error) {
names := make([]string, 0)
searchText = strings.ToLower(searchText) + "%"
query := r.sqlstore.BunDB().NewSelect().
Distinct().
Column("updated_by").
TableExpr("?", bun.SafeQuery("rule")).
Where("org_id = ?", orgId).
Where("? LIKE ?", bun.SafeQuery(string(r.sqlstore.Formatter().LowerExpression("updated_by"))), searchText).
Limit(limit)
err := query.Scan(ctx, &names)
if err != nil {
return nil, err
}
return names, nil
}
func (r *rule) GetRuleLabelValues(ctx context.Context, searchText string, limit int, labelKey string, orgId string) ([]string, error) {
names := make([]string, 0)
labelPath := r.sqlstore.Formatter().JSONExtractString("data", "$.labels."+labelKey)
searchText = strings.ToLower(searchText) + "%"
query := r.sqlstore.BunDB().NewSelect().
Distinct().
ColumnExpr("?", bun.SafeQuery(string(labelPath))).
TableExpr("?", bun.SafeQuery("rule")).
Where("org_id = ?", orgId).
Where("? LIKE ?", bun.SafeQuery(string(r.sqlstore.Formatter().LowerExpression(string(labelPath)))), searchText).Limit(limit)
err := query.Scan(ctx, &names)
if err != nil {
return nil, r.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "search values for rule with orgId %s not found", orgId)
}
return names, nil
}

View File

@@ -0,0 +1,113 @@
package sqlitesqlstore
import (
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun/schema"
)
type formatter struct {
bunf schema.Formatter
}
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
return &formatter{bunf: schema.NewFormatter(dialect)}
}
func (f *formatter) JSONExtractString(column, path string) []byte {
var sql []byte
sql = append(sql, "json_extract("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "')"...)
return sql
}
func (f *formatter) JSONType(column, path string) []byte {
var sql []byte
sql = append(sql, "json_type("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "')"...)
return sql
}
func (f *formatter) JSONIsArray(column, path string) []byte {
var sql []byte
sql = append(sql, f.JSONType(column, path)...)
sql = append(sql, " = 'array'"...)
return sql
}
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "json_each("...)
sql = f.bunf.AppendIdent(sql, column)
if path != "$" && path != "" {
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "'"...)
}
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias + ".value")
}
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
return f.JSONArrayElements(column, path, alias)
}
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "json_each("...)
sql = f.bunf.AppendIdent(sql, column)
if path != "$" && path != "" {
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "'"...)
}
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias + ".key")
}
func (f *formatter) JSONArrayAgg(expression string) []byte {
var sql []byte
sql = append(sql, "json_group_array("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
if len(values) == 0 {
return []byte("json_array()")
}
var sql []byte
sql = append(sql, "json_array("...)
for i, v := range values {
if i > 0 {
sql = append(sql, ", "...)
}
sql = append(sql, '\'')
sql = append(sql, v...)
sql = append(sql, '\'')
}
sql = append(sql, ')')
return sql
}
func (f *formatter) TextToJsonColumn(column string) []byte {
return f.bunf.AppendIdent([]byte{}, column)
}
func (f *formatter) LowerExpression(expression string) []byte {
var sql []byte
sql = append(sql, "lower("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}

View File

@@ -0,0 +1,397 @@
package sqlitesqlstore
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/uptrace/bun/dialect/sqlitedialect"
)
func TestJSONExtractString(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `json_extract("data", '$.field')`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.name",
expected: `json_extract("metadata", '$.user.name')`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `json_extract("json_col", '$')`,
},
{
name: "array index path",
column: "items",
path: "$.list[0]",
expected: `json_extract("items", '$.list[0]')`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got := string(f.JSONExtractString(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONType(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.field",
expected: `json_type("data", '$.field')`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.age",
expected: `json_type("metadata", '$.user.age')`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `json_type("json_col", '$')`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got := string(f.JSONType(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONIsArray(t *testing.T) {
tests := []struct {
name string
column string
path string
expected string
}{
{
name: "simple path",
column: "data",
path: "$.items",
expected: `json_type("data", '$.items') = 'array'`,
},
{
name: "nested path",
column: "metadata",
path: "$.user.tags",
expected: `json_type("metadata", '$.user.tags') = 'array'`,
},
{
name: "root path",
column: "json_col",
path: "$",
expected: `json_type("json_col", '$') = 'array'`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got := string(f.JSONIsArray(tt.column, tt.path))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayElements(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "elem",
expected: `json_each("data") AS "elem"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "elem",
expected: `json_each("data") AS "elem"`,
},
{
name: "nested path",
column: "metadata",
path: "$.items",
alias: "item",
expected: `json_each("metadata", '$.items') AS "item"`,
},
{
name: "deeply nested path",
column: "json_col",
path: "$.user.tags",
alias: "tag",
expected: `json_each("json_col", '$.user.tags') AS "tag"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayOfStrings(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "str",
expected: `json_each("data") AS "str"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "str",
expected: `json_each("data") AS "str"`,
},
{
name: "nested path",
column: "metadata",
path: "$.strings",
alias: "s",
expected: `json_each("metadata", '$.strings') AS "s"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONKeys(t *testing.T) {
tests := []struct {
name string
column string
path string
alias string
expected string
}{
{
name: "root path with dollar sign",
column: "data",
path: "$",
alias: "k",
expected: `json_each("data") AS "k"`,
},
{
name: "root path empty",
column: "data",
path: "",
alias: "k",
expected: `json_each("data") AS "k"`,
},
{
name: "nested path",
column: "metadata",
path: "$.object",
alias: "key",
expected: `json_each("metadata", '$.object') AS "key"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
assert.Equal(t, tt.expected, string(got))
})
}
}
func TestJSONArrayAgg(t *testing.T) {
tests := []struct {
name string
expression string
expected string
}{
{
name: "simple column",
expression: "id",
expected: "json_group_array(id)",
},
{
name: "expression with function",
expression: "DISTINCT name",
expected: "json_group_array(DISTINCT name)",
},
{
name: "complex expression",
expression: "json_extract(data, '$.field')",
expected: "json_group_array(json_extract(data, '$.field'))",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got := string(f.JSONArrayAgg(tt.expression))
assert.Equal(t, tt.expected, got)
})
}
}
func TestJSONArrayLiteral(t *testing.T) {
tests := []struct {
name string
values []string
expected string
}{
{
name: "empty array",
values: []string{},
expected: "json_array()",
},
{
name: "single value",
values: []string{"value1"},
expected: "json_array('value1')",
},
{
name: "multiple values",
values: []string{"value1", "value2", "value3"},
expected: "json_array('value1', 'value2', 'value3')",
},
{
name: "values with special characters",
values: []string{"test", "with space", "with-dash"},
expected: "json_array('test', 'with space', 'with-dash')",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got := string(f.JSONArrayLiteral(tt.values...))
assert.Equal(t, tt.expected, got)
})
}
}
func TestTextToJsonColumn(t *testing.T) {
tests := []struct {
name string
column string
expected string
}{
{
name: "simple column name",
column: "data",
expected: `"data"`,
},
{
name: "column with underscore",
column: "user_data",
expected: `"user_data"`,
},
{
name: "column with special characters",
column: "json-col",
expected: `"json-col"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got := string(f.TextToJsonColumn(tt.column))
assert.Equal(t, tt.expected, got)
})
}
}
func TestLowerExpression(t *testing.T) {
tests := []struct {
name string
expr string
expected string
}{
{
name: "json_extract expression",
expr: "json_extract(data, '$.field')",
expected: "lower(json_extract(data, '$.field'))",
},
{
name: "nested json_extract",
expr: "json_extract(metadata, '$.user.name')",
expected: "lower(json_extract(metadata, '$.user.name'))",
},
{
name: "json_type expression",
expr: "json_type(data, '$.field')",
expected: "lower(json_type(data, '$.field'))",
},
{
name: "string concatenation",
expr: "first_name || ' ' || last_name",
expected: "lower(first_name || ' ' || last_name)",
},
{
name: "CAST expression",
expr: "CAST(value AS TEXT)",
expected: "lower(CAST(value AS TEXT))",
},
{
name: "COALESCE expression",
expr: "COALESCE(name, 'default')",
expected: "lower(COALESCE(name, 'default'))",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := newFormatter(sqlitedialect.New())
got := string(f.LowerExpression(tt.expr))
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -17,10 +17,11 @@ import (
)
type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
dialect *dialect
formatter sqlstore.SQLFormatter
}
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
@@ -54,11 +55,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
settings.Logger().InfoContext(ctx, "connected to sqlite", "path", config.Sqlite.Path)
sqldb.SetMaxOpenConns(config.Connection.MaxOpenConns)
sqliteDialect := sqlitedialect.New()
bunDB := sqlstore.NewBunDB(settings, sqldb, sqliteDialect, hooks)
return &provider{
settings: settings,
sqldb: sqldb,
bundb: sqlstore.NewBunDB(settings, sqldb, sqlitedialect.New(), hooks),
dialect: new(dialect),
settings: settings,
sqldb: sqldb,
bundb: bunDB,
dialect: new(dialect),
formatter: newFormatter(bunDB.Dialect()),
}, nil
}
@@ -74,6 +78,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}
func (provider *provider) Formatter() sqlstore.SQLFormatter {
return provider.formatter
}
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
return provider.bundb.BunDBCtx(ctx)
}

View File

@@ -20,6 +20,8 @@ type SQLStore interface {
// Returns the dialect of the database.
Dialect() SQLDialect
Formatter() SQLFormatter
// RunInTxCtx runs the given callback in a transaction. It creates and injects a new context with the transaction.
// If a transaction is present in the context, it will be used.
RunInTxCtx(ctx context.Context, opts *SQLStoreTxOptions, cb func(ctx context.Context) error) error
@@ -86,3 +88,35 @@ type SQLDialect interface {
// as an argument.
ToggleForeignKeyConstraint(ctx context.Context, bun *bun.DB, enable bool) error
}
type SQLFormatter interface {
// JSONExtractString takes path in sqlite format like "$.labels.severity"
JSONExtractString(column, path string) []byte
// JSONType used to determine the type of the value extracted from the path
JSONType(column, path string) []byte
// JSONIsArray used to check whether the value is array or not
JSONIsArray(column, path string) []byte
// JSONArrayElements returns query as well as columns alias to be used for select and where clause
JSONArrayElements(column, path, alias string) ([]byte, []byte)
// JSONArrayOfStrings returns query as well as columns alias to be used for select and where clause
JSONArrayOfStrings(column, path, alias string) ([]byte, []byte)
// JSONArrayAgg aggregates values into a JSON array
JSONArrayAgg(expression string) []byte
// JSONArrayLiteral creates a literal JSON array from the given string values
JSONArrayLiteral(values ...string) []byte
// JSONKeys return extracted key from json as well as alias to be used for select and where clause
JSONKeys(column, path, alias string) ([]byte, []byte)
// TextToJsonColumn converts a text column to JSON type
TextToJsonColumn(column string) []byte
// LowerExpression wraps any SQL expression with lower() function for case-insensitive operations
LowerExpression(expression string) []byte
}

View File

@@ -0,0 +1,112 @@
package sqlstoretest
import (
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun/schema"
)
type formatter struct {
bunf schema.Formatter
}
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
return &formatter{bunf: schema.NewFormatter(dialect)}
}
func (f *formatter) JSONExtractString(column, path string) []byte {
var sql []byte
sql = append(sql, "json_extract("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "')"...)
return sql
}
func (f *formatter) JSONType(column, path string) []byte {
var sql []byte
sql = append(sql, "json_type("...)
sql = f.bunf.AppendIdent(sql, column)
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "')"...)
return sql
}
func (f *formatter) JSONIsArray(column, path string) []byte {
var sql []byte
sql = append(sql, f.JSONType(column, path)...)
sql = append(sql, " = 'array'"...)
return sql
}
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "json_each("...)
sql = f.bunf.AppendIdent(sql, column)
if path != "$" && path != "" {
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "'"...)
}
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias + ".value")
}
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
return f.JSONArrayElements(column, path, alias)
}
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
var sql []byte
sql = append(sql, "json_each("...)
sql = f.bunf.AppendIdent(sql, column)
if path != "$" && path != "" {
sql = append(sql, ", '"...)
sql = append(sql, path...)
sql = append(sql, "'"...)
}
sql = append(sql, ") AS "...)
sql = f.bunf.AppendIdent(sql, alias)
return sql, []byte(alias + ".key")
}
func (f *formatter) JSONArrayAgg(expression string) []byte {
var sql []byte
sql = append(sql, "json_group_array("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
if len(values) == 0 {
return []byte("json_array()")
}
var sql []byte
sql = append(sql, "json_array("...)
for i, v := range values {
if i > 0 {
sql = append(sql, ", "...)
}
sql = append(sql, '\'')
sql = append(sql, v...)
sql = append(sql, '\'')
}
sql = append(sql, ')')
return sql
}
func (f *formatter) TextToJsonColumn(column string) []byte {
return f.bunf.AppendIdent([]byte{}, column)
}
func (f *formatter) LowerExpression(expression string) []byte {
var sql []byte
sql = append(sql, "lower("...)
sql = append(sql, expression...)
sql = append(sql, ')')
return sql
}

View File

@@ -15,10 +15,11 @@ import (
var _ sqlstore.SQLStore = (*Provider)(nil)
type Provider struct {
db *sql.DB
mock sqlmock.Sqlmock
bunDB *bun.DB
dialect *dialect
db *sql.DB
mock sqlmock.Sqlmock
bunDB *bun.DB
dialect *dialect
formatter sqlstore.SQLFormatter
}
func New(config sqlstore.Config, matcher sqlmock.QueryMatcher) *Provider {
@@ -38,10 +39,11 @@ func New(config sqlstore.Config, matcher sqlmock.QueryMatcher) *Provider {
}
return &Provider{
db: db,
mock: mock,
bunDB: bunDB,
dialect: new(dialect),
db: db,
mock: mock,
bunDB: bunDB,
dialect: new(dialect),
formatter: newFormatter(bunDB.Dialect()),
}
}
@@ -61,6 +63,8 @@ func (provider *Provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}
func (provider *Provider) Formatter() sqlstore.SQLFormatter { return provider.formatter }
func (provider *Provider) BunDBCtx(ctx context.Context) bun.IDB {
return provider.bunDB
}

View File

@@ -4,6 +4,8 @@ import (
"context"
"encoding/json"
"fmt"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"slices"
"time"
"unicode/utf8"
@@ -452,3 +454,18 @@ func (g *GettableRule) MarshalJSON() ([]byte, error) {
return json.Marshal(aux)
}
}
type RuleAttributeKeyType struct {
valuer.String
}
var (
RuleAttributeTypeFixed = RuleAttributeKeyType{valuer.NewString("fixed")}
RuleAttributeTypeLabel = RuleAttributeKeyType{valuer.NewString("label")}
)
type GetRuleAttributeKeys struct {
Key string `json:"key"`
DataType telemetrytypes.FieldDataType `json:"dataType"`
Type RuleAttributeKeyType `json:"type"`
}

View File

@@ -1,5 +1,31 @@
package ruletypes
const CriticalThresholdName = "CRITICAL"
const LabelThresholdName = "threshold.name"
const LabelRuleId = "ruleId"
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
const (
CriticalThresholdName = "CRITICAL"
LabelThresholdName = "threshold.name"
LabelRuleId = "ruleId"
// Rule attribute key constants for search and filtering
RuleAttributeKeyCreatedBy = "created_by"
RuleAttributeKeyUpdatedBy = "updated_by"
RuleAttributeKeyName = "name"
RuleAttributeKeyThresholdName = "threshold.name"
RuleAttributeKeyPolicy = "policy"
RuleAttributeKeyChannel = "channel"
RuleAttributeKeyState = "state"
//RuleAttributeKeyRuleType = "type"
)
var (
FixedRuleAttributeKeys = []GetRuleAttributeKeys{
{Key: RuleAttributeKeyCreatedBy, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
{Key: RuleAttributeKeyUpdatedBy, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
{Key: RuleAttributeKeyName, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
{Key: RuleAttributeKeyThresholdName, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
{Key: RuleAttributeKeyChannel, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
{Key: RuleAttributeKeyPolicy, DataType: telemetrytypes.FieldDataTypeBool, Type: RuleAttributeTypeFixed},
{Key: RuleAttributeKeyState, DataType: telemetrytypes.FieldDataTypeString, Type: RuleAttributeTypeFixed},
}
)

View File

@@ -53,4 +53,11 @@ type RuleStore interface {
DeleteRule(context.Context, valuer.UUID, func(context.Context) error) error
GetStoredRules(context.Context, string) ([]*Rule, error)
GetStoredRule(context.Context, valuer.UUID) (*Rule, error)
GetRuleLabelKeys(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
GetThresholdNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
GetChannel(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
GetNames(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
GetCreatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
GetUpdatedBy(ctx context.Context, searchText string, limit int, orgId string) ([]string, error)
GetRuleLabelValues(ctx context.Context, searchText string, limit int, labelKey string, orgId string) ([]string, error)
}