Compare commits

...

1 Commits

Author SHA1 Message Date
srikanthccv
6404e7388e chore: several fixes and dx 2025-06-11 10:14:32 +05:30
18 changed files with 2014 additions and 115 deletions

View File

@@ -33,6 +33,12 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
return
}
// Validate the query request
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)

View File

@@ -68,7 +68,7 @@ func CollisionHandledFinalExpr(
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
} else {
// not even a close match, return an error
return "", nil, err
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name)
}
} else {
for _, key := range keysForField {

View File

@@ -22,7 +22,7 @@ type filterExpressionVisitor struct {
conditionBuilder qbtypes.ConditionBuilder
warnings []string
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
errors []error
errors []string
builder *sqlbuilder.SelectBuilder
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
@@ -90,10 +90,12 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*sqlbuilder.W
combinedErrors := errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"found %d syntax errors while parsing the filter expression: %v",
"found %d syntax errors while parsing the filter expression",
len(parserErrorListener.SyntaxErrors),
parserErrorListener.SyntaxErrors,
)
for _, err := range parserErrorListener.SyntaxErrors {
combinedErrors = combinedErrors.WithAdditional(err.Error())
}
return nil, nil, combinedErrors
}
@@ -105,10 +107,12 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*sqlbuilder.W
combinedErrors := errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"found %d errors while parsing the search expression: %v",
"found %d errors while parsing the search expression",
len(visitor.errors),
visitor.errors,
)
for _, err := range visitor.errors {
combinedErrors = combinedErrors.WithAdditional(err)
}
return nil, nil, combinedErrors
}
@@ -238,11 +242,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
}
if v.fullTextColumn == nil {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"full text search is not supported",
))
v.errors = append(v.errors, "full text search is not supported")
return ""
}
child := ctx.GetChild(0)
@@ -251,7 +251,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
keyText := keyCtx.GetText()
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, keyText, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build full text search condition"))
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
}
return cond
@@ -266,12 +266,12 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
} else if valCtx.KEY() != nil {
text = valCtx.KEY().GetText()
} else {
v.errors = append(v.errors, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported value type: %s", valCtx.GetText()))
v.errors = append(v.errors, fmt.Sprintf("unsupported value type: %s", valCtx.GetText()))
return ""
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, text, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build full text search condition"))
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
}
return cond
@@ -419,7 +419,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build condition"))
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
return ""
}
conds = append(conds, condition)
@@ -471,16 +471,12 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
}
if v.fullTextColumn == nil {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"full text search is not supported",
))
v.errors = append(v.errors, "full text search is not supported")
return ""
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, text, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build full text search condition"))
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
}
return cond
@@ -498,34 +494,19 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
functionName = "hasAll"
} else {
// Default fallback
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"unknown function `%s`",
ctx.GetText(),
))
v.errors = append(v.errors, fmt.Sprintf("unknown function `%s`", ctx.GetText()))
return ""
}
params := v.Visit(ctx.FunctionParamList()).([]any)
if len(params) < 2 {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` expects key and value parameters",
functionName,
))
v.errors = append(v.errors, fmt.Sprintf("function `%s` expects key and value parameters", functionName))
return ""
}
keys, ok := params[0].([]*telemetrytypes.TelemetryFieldKey)
if !ok {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` expects key parameter to be a field key",
functionName,
))
v.errors = append(v.errors, fmt.Sprintf("function `%s` expects key parameter to be a field key", functionName))
return ""
}
value := params[1:]
@@ -536,12 +517,7 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
if strings.HasPrefix(key.Name, v.jsonBodyPrefix) {
fieldName, _ = v.jsonKeyToKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value)
} else {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` supports only body JSON search",
functionName,
))
v.errors = append(v.errors, fmt.Sprintf("function `%s` supports only body JSON search", functionName))
return ""
}
@@ -603,12 +579,7 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
} else if ctx.NUMBER() != nil {
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
if err != nil {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"failed to parse number %s",
ctx.NUMBER().GetText(),
))
v.errors = append(v.errors, fmt.Sprintf("failed to parse number %s", ctx.NUMBER().GetText()))
return ""
}
return number
@@ -648,19 +619,11 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
if len(fieldKeysForName) == 0 {
if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" && keyName == "" {
v.errors = append(v.errors, errors.NewInvalidInputf(
errors.CodeInvalidInput,
"missing key for body json search - expected key of the form `body.key` (ex: `body.status`)",
))
v.errors = append(v.errors, "missing key for body json search - expected key of the form `body.key` (ex: `body.status`)")
} else {
// TODO(srikanthccv): do we want to return an error here?
// should we infer the type and auto-magically build a key for expression?
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"key `%s` not found",
fieldKey.Name,
))
v.errors = append(v.errors, fmt.Sprintf("key `%s` not found", fieldKey.Name))
}
}

View File

@@ -173,7 +173,7 @@ func (m *fieldMapper) ColumnExpressionFor(
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
} else {
// not even a close match, return an error
return "", err
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name)
}
}
} else if len(keysForField) == 1 {
@@ -186,7 +186,7 @@ func (m *fieldMapper) ColumnExpressionFor(
colName, _ = m.FieldFor(ctx, key)
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
}
colName = fmt.Sprintf("multiIf(%s)", strings.Join(args, ", "))
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
}
}

View File

@@ -6,7 +6,6 @@ import (
"log/slog"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -14,10 +13,6 @@ import (
"github.com/huandu/go-sqlbuilder"
)
var (
ErrUnsupportedAggregation = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported aggregation")
)
type logQueryStatementBuilder struct {
logger *slog.Logger
metadataStore telemetrytypes.MetadataStore
@@ -165,12 +160,19 @@ func (b *logQueryStatementBuilder) buildListQuery(
// Add order by
for _, orderBy := range query.Order {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction))
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
if err != nil {
return nil, err
}
sb.OrderBy(fmt.Sprintf("%s %s", colExpr, orderBy.Direction.StringValue()))
}
// Add limit and offset
if query.Limit > 0 {
sb.Limit(query.Limit)
} else {
// default to 1k rows
sb.Limit(100)
}
if query.Offset > 0 {
@@ -381,9 +383,9 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
for _, orderBy := range query.Order {
idx, ok := aggOrderBy(orderBy, query)
if ok {
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction))
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue()))
} else {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction))
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
}
}
@@ -420,19 +422,25 @@ func (b *logQueryStatementBuilder) addFilterCondition(
keys map[string][]*telemetrytypes.TelemetryFieldKey,
) ([]string, error) {
// add filter expression
filterWhereClause, warnings, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,
SkipResourceFilter: true,
FullTextColumn: b.fullTextColumn,
JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey,
})
var filterWhereClause *sqlbuilder.WhereClause
var warnings []string
var err error
if err != nil {
return nil, err
if query.Filter != nil && query.Filter.Expression != "" {
// add filter expression
filterWhereClause, warnings, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,
SkipResourceFilter: true,
FullTextColumn: b.fullTextColumn,
JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey,
})
if err != nil {
return nil, err
}
}
if filterWhereClause != nil {

View File

@@ -95,7 +95,7 @@ func (m *fieldMapper) ColumnExpressionFor(
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
} else {
// not even a close match, return an error
return "", err
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name)
}
}
} else if len(keysForField) == 1 {
@@ -108,7 +108,7 @@ func (m *fieldMapper) ColumnExpressionFor(
colName, _ = m.FieldFor(ctx, key)
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
}
colName = fmt.Sprintf("multiIf(%s)", strings.Join(args, ", "))
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
}
}

View File

@@ -250,7 +250,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
} else {
// not even a close match, return an error
return "", err
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name)
}
}
} else if len(keysForField) == 1 {
@@ -263,7 +263,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
colName, _ = m.FieldFor(ctx, key)
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
}
colName = fmt.Sprintf("multiIf(%s)", strings.Join(args, ", "))
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
}
}

View File

@@ -179,12 +179,19 @@ func (b *traceQueryStatementBuilder) buildListQuery(
// Add order by
for _, orderBy := range query.Order {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
if err != nil {
return nil, err
}
sb.OrderBy(fmt.Sprintf("%s %s", colExpr, orderBy.Direction.StringValue()))
}
// Add limit and offset
if query.Limit > 0 {
sb.Limit(query.Limit)
} else {
// default to 1k rows
sb.Limit(100)
}
if query.Offset > 0 {

View File

@@ -56,3 +56,20 @@ type QueryBuilderQuery[T any] struct {
// functions to apply to the query
Functions []Function `json:"functions,omitempty"`
}
// UnmarshalJSON implements custom JSON unmarshaling to disallow unknown fields
func (q *QueryBuilderQuery[T]) UnmarshalJSON(data []byte) error {
// Define a type alias to avoid infinite recursion
type Alias QueryBuilderQuery[T]
var temp Alias
// Use UnmarshalJSONWithContext for better error messages
if err := UnmarshalJSONWithContext(data, &temp, "query spec"); err != nil {
return err
}
// Copy the decoded values back to the original struct
*q = QueryBuilderQuery[T](temp)
return nil
}

View File

@@ -24,6 +24,17 @@ type QueryBuilderFormula struct {
Functions []Function `json:"functions,omitempty"`
}
// UnmarshalJSON implements custom JSON unmarshaling to disallow unknown fields
func (f *QueryBuilderFormula) UnmarshalJSON(data []byte) error {
type Alias QueryBuilderFormula
var temp Alias
if err := UnmarshalJSONWithContext(data, &temp, "formula spec"); err != nil {
return err
}
*f = QueryBuilderFormula(temp)
return nil
}
// small container to store the query name and index or alias reference
// for a variable in the formula expression
// read below for more details on aggregation references

View File

@@ -0,0 +1,109 @@
package querybuildertypesv5
import (
"bytes"
"encoding/json"
"reflect"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
)
// UnmarshalJSONWithSuggestions unmarshals JSON data into the target struct
// and provides field name suggestions for unknown fields
func UnmarshalJSONWithSuggestions(data []byte, target any) error {
return UnmarshalJSONWithContext(data, target, "")
}
// UnmarshalJSONWithContext unmarshals JSON with context information for better error messages
func UnmarshalJSONWithContext(data []byte, target any, context string) error {
// First, try to unmarshal with DisallowUnknownFields to catch unknown fields
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
err := dec.Decode(target)
if err == nil {
// No error, successful unmarshal
return nil
}
// Check if it's an unknown field error
if strings.Contains(err.Error(), "unknown field") {
// Extract the unknown field name
unknownField := extractUnknownField(err.Error())
if unknownField != "" {
// Get valid field names from the target struct
validFields := getJSONFieldNames(target)
// Build error message with context
errorMsg := "unknown field %q"
if context != "" {
errorMsg = "unknown field %q in " + context
}
// Find closest match with max distance of 3 (reasonable for typos)
if suggestion, found := findClosestMatch(unknownField, validFields, 3); found {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
errorMsg,
unknownField,
).WithAdditional(
"Did you mean '" + suggestion + "'?",
)
}
// No good suggestion found
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
errorMsg,
unknownField,
).WithAdditional(
"Valid fields are: " + strings.Join(validFields, ", "),
)
}
}
// Return the original error if it's not an unknown field error
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid JSON: %v", err)
}
// extractUnknownField extracts the field name from an unknown field error message
func extractUnknownField(errMsg string) string {
// The error message format is: json: unknown field "fieldname"
parts := strings.Split(errMsg, `"`)
if len(parts) >= 2 {
return parts[1]
}
return ""
}
// getJSONFieldNames extracts all JSON field names from a struct
func getJSONFieldNames(v any) []string {
var fields []string
t := reflect.TypeOf(v)
if t.Kind() == reflect.Ptr {
t = t.Elem()
}
if t.Kind() != reflect.Struct {
return fields
}
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
jsonTag := field.Tag.Get("json")
if jsonTag == "" || jsonTag == "-" {
continue
}
// Extract the field name from the JSON tag
fieldName := strings.Split(jsonTag, ",")[0]
if fieldName != "" {
fields = append(fields, fieldName)
}
}
return fields
}

View File

@@ -0,0 +1,87 @@
package querybuildertypesv5
import (
"strings"
)
func levenshteinDistance(s1, s2 string) int {
if len(s1) == 0 {
return len(s2)
}
if len(s2) == 0 {
return len(s1)
}
// Create a matrix to store distances
matrix := make([][]int, len(s1)+1)
for i := range matrix {
matrix[i] = make([]int, len(s2)+1)
}
// Initialize first column and row
for i := 0; i <= len(s1); i++ {
matrix[i][0] = i
}
for j := 0; j <= len(s2); j++ {
matrix[0][j] = j
}
// Calculate distances
for i := 1; i <= len(s1); i++ {
for j := 1; j <= len(s2); j++ {
cost := 0
if s1[i-1] != s2[j-1] {
cost = 1
}
matrix[i][j] = min(
matrix[i-1][j]+1, // deletion
matrix[i][j-1]+1, // insertion
matrix[i-1][j-1]+cost, // substitution
)
}
}
return matrix[len(s1)][len(s2)]
}
func findClosestMatch(target string, validOptions []string, maxDistance int) (string, bool) {
if len(validOptions) == 0 {
return "", false
}
bestMatch := ""
bestDistance := maxDistance + 1
// Convert target to lowercase for case-insensitive comparison
targetLower := strings.ToLower(target)
for _, option := range validOptions {
// Case-insensitive comparison
distance := levenshteinDistance(targetLower, strings.ToLower(option))
if distance < bestDistance {
bestDistance = distance
bestMatch = option
}
}
// Only return a match if it's within the threshold
if bestDistance <= maxDistance {
return bestMatch, true
}
return "", false
}
// min returns the minimum of three integers
func min(a, b, c int) int {
if a < b {
if a < c {
return a
}
return c
}
if b < c {
return b
}
return c
}

View File

@@ -0,0 +1,323 @@
package querybuildertypesv5
import (
"encoding/json"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestLevenshteinDistance(t *testing.T) {
tests := []struct {
s1 string
s2 string
expected int
}{
{"", "", 0},
{"a", "", 1},
{"", "a", 1},
{"a", "a", 0},
{"abc", "abc", 0},
{"kitten", "sitting", 3},
{"saturday", "sunday", 3},
{"expires", "expires_in", 3},
{"start", "end", 5}, // s->e, t->n, a->d, r->"", t->""
{"schemaVersion", "schema_version", 2}, // V->_ and ""->_
}
for _, tt := range tests {
t.Run(tt.s1+"_"+tt.s2, func(t *testing.T) {
result := levenshteinDistance(tt.s1, tt.s2)
assert.Equal(t, tt.expected, result)
})
}
}
func TestFindClosestMatch(t *testing.T) {
tests := []struct {
name string
target string
validOptions []string
maxDistance int
expectedMatch string
expectedFound bool
}{
{
name: "exact match",
target: "start",
validOptions: []string{"start", "end", "limit"},
maxDistance: 3,
expectedMatch: "start",
expectedFound: true,
},
{
name: "close match",
target: "strt",
validOptions: []string{"start", "end", "limit"},
maxDistance: 3,
expectedMatch: "start",
expectedFound: true,
},
{
name: "case insensitive match",
target: "START",
validOptions: []string{"start", "end", "limit"},
maxDistance: 3,
expectedMatch: "start",
expectedFound: true,
},
{
name: "no match within distance",
target: "completely_different",
validOptions: []string{"start", "end", "limit"},
maxDistance: 3,
expectedMatch: "",
expectedFound: false,
},
{
name: "expires to expires_in",
target: "expires",
validOptions: []string{"expires_in", "start", "end"},
maxDistance: 3,
expectedMatch: "expires_in",
expectedFound: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
match, found := findClosestMatch(tt.target, tt.validOptions, tt.maxDistance)
assert.Equal(t, tt.expectedFound, found)
if tt.expectedFound {
assert.Equal(t, tt.expectedMatch, match)
}
})
}
}
func TestQueryRangeRequestUnmarshalWithSuggestions(t *testing.T) {
tests := []struct {
name string
jsonData string
expectedErr string
}{
{
name: "valid request",
jsonData: `{
"schemaVersion": "v5",
"start": 1000,
"end": 2000,
"requestType": "timeseries",
"compositeQuery": {
"queries": []
}
}`,
expectedErr: "",
},
{
name: "typo in start field",
jsonData: `{
"schemaVersion": "v5",
"strt": 1000,
"end": 2000,
"requestType": "timeseries",
"compositeQuery": {
"queries": []
}
}`,
expectedErr: `unknown field "strt"`,
},
{
name: "typo in schemaVersion",
jsonData: `{
"schemaVerson": "v5",
"start": 1000,
"end": 2000,
"requestType": "timeseries",
"compositeQuery": {
"queries": []
}
}`,
expectedErr: `unknown field "schemaVerson"`,
},
{
name: "requestype instead of requestType",
jsonData: `{
"schemaVersion": "v5",
"start": 1000,
"end": 2000,
"requestype": "timeseries",
"compositeQuery": {
"queries": []
}
}`,
expectedErr: `unknown field "requestype"`,
},
{
name: "composite_query instead of compositeQuery",
jsonData: `{
"schemaVersion": "v5",
"start": 1000,
"end": 2000,
"requestType": "timeseries",
"composite_query": {
"queries": []
}
}`,
expectedErr: `unknown field "composite_query"`,
},
{
name: "no_cache instead of noCache",
jsonData: `{
"schemaVersion": "v5",
"start": 1000,
"end": 2000,
"requestType": "timeseries",
"compositeQuery": {
"queries": []
},
"no_cache": true
}`,
expectedErr: `unknown field "no_cache"`,
},
{
name: "format_options instead of formatOptions",
jsonData: `{
"schemaVersion": "v5",
"start": 1000,
"end": 2000,
"requestType": "timeseries",
"compositeQuery": {
"queries": []
},
"format_options": {}
}`,
expectedErr: `unknown field "format_options"`,
},
{
name: "completely unknown field with no good suggestion",
jsonData: `{
"schemaVersion": "v5",
"completely_unknown_field_xyz": 1000,
"end": 2000,
"requestType": "timeseries",
"compositeQuery": {
"queries": []
}
}`,
expectedErr: `unknown field "completely_unknown_field_xyz"`,
},
{
name: "common mistake: limit instead of variables",
jsonData: `{
"schemaVersion": "v5",
"start": 1000,
"end": 2000,
"requestType": "timeseries",
"compositeQuery": {
"queries": []
},
"limit": 100
}`,
expectedErr: `unknown field "limit"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var req QueryRangeRequest
err := json.Unmarshal([]byte(tt.jsonData), &req)
if tt.expectedErr == "" {
require.NoError(t, err)
} else {
require.Error(t, err)
assert.Contains(t, err.Error(), tt.expectedErr)
}
})
}
}
func TestGetJSONFieldNames(t *testing.T) {
type TestStruct struct {
Field1 string `json:"field1"`
Field2 int `json:"field2,omitempty"`
Field3 bool `json:"-"`
Field4 string `json:""`
Field5 string // no json tag
}
fields := getJSONFieldNames(&TestStruct{})
expected := []string{"field1", "field2"}
assert.ElementsMatch(t, expected, fields)
}
func TestUnmarshalJSONWithSuggestions(t *testing.T) {
type TestRequest struct {
SchemaVersion string `json:"schemaVersion"`
Start int64 `json:"start"`
End int64 `json:"end"`
Limit int `json:"limit,omitempty"`
}
tests := []struct {
name string
jsonData string
expectedErr string
}{
{
name: "valid JSON",
jsonData: `{
"schemaVersion": "v1",
"start": 1000,
"end": 2000
}`,
expectedErr: "",
},
{
name: "typo in field name",
jsonData: `{
"schemaVerson": "v1",
"start": 1000,
"end": 2000
}`,
expectedErr: `unknown field "schemaVerson"`,
},
{
name: "multiple typos - only first is reported",
jsonData: `{
"strt": 1000,
"ed": 2000
}`,
expectedErr: `unknown field "strt"`,
},
{
name: "case sensitivity",
jsonData: `{
"schema_version": "v1",
"start": 1000,
"end": 2000
}`,
expectedErr: `unknown field "schema_version"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var req TestRequest
err := UnmarshalJSONWithSuggestions([]byte(tt.jsonData), &req)
if tt.expectedErr == "" {
require.NoError(t, err)
} else {
require.Error(t, err)
// Clean up the error message for comparison
errMsg := strings.ReplaceAll(err.Error(), "\n", " ")
assert.Contains(t, errMsg, tt.expectedErr)
}
})
}
}

View File

@@ -2,6 +2,7 @@ package querybuildertypesv5
import (
"encoding/json"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -17,12 +18,11 @@ type QueryEnvelope struct {
// implement custom json unmarshaler for the QueryEnvelope
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
var shadow struct {
Name string `json:"name"`
Type QueryType `json:"type"`
Spec json.RawMessage `json:"spec"`
}
if err := json.Unmarshal(data, &shadow); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid query envelope")
if err := UnmarshalJSONWithSuggestions(data, &shadow); err != nil {
return err
}
q.Type = shadow.Type
@@ -34,62 +34,169 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
Signal telemetrytypes.Signal `json:"signal"`
}
if err := json.Unmarshal(shadow.Spec, &header); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "cannot detect builder signal")
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"cannot detect builder signal: %v",
err,
)
}
switch header.Signal {
case telemetrytypes.SignalTraces:
var spec QueryBuilderQuery[TraceAggregation]
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid trace builder query spec")
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "query spec"); err != nil {
// If it's already one of our wrapped errors with additional context, return as-is
_, _, _, _, _, additionals := errors.Unwrapb(err)
if len(additionals) > 0 {
return err
}
// Preserve helpful error messages about unknown fields
if strings.Contains(err.Error(), "unknown field") {
return err
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid trace builder query spec: %v",
err,
)
}
q.Spec = spec
case telemetrytypes.SignalLogs:
var spec QueryBuilderQuery[LogAggregation]
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid log builder query spec")
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "query spec"); err != nil {
// If it's already one of our wrapped errors with additional context, return as-is
_, _, _, _, _, additionals := errors.Unwrapb(err)
if len(additionals) > 0 {
return err
}
// Preserve helpful error messages about unknown fields
if strings.Contains(err.Error(), "unknown field") {
return err
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid log builder query spec: %v",
err,
)
}
q.Spec = spec
case telemetrytypes.SignalMetrics:
var spec QueryBuilderQuery[MetricAggregation]
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid metric builder query spec")
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "query spec"); err != nil {
// If it's already one of our wrapped errors with additional context, return as-is
_, _, _, _, _, additionals := errors.Unwrapb(err)
if len(additionals) > 0 {
return err
}
// Preserve helpful error messages about unknown fields
if strings.Contains(err.Error(), "unknown field") {
return err
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid metric builder query spec: %v",
err,
)
}
q.Spec = spec
default:
return errors.WrapInvalidInputf(nil, errors.CodeInvalidInput, "unknown builder signal %q", header.Signal)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown builder signal %q",
header.Signal,
).WithAdditional(
"Valid signals are: traces, logs, metrics",
)
}
case QueryTypeFormula:
var spec QueryBuilderFormula
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid formula spec")
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "formula spec"); err != nil {
// If it's already one of our wrapped errors with additional context, return as-is
_, _, _, _, _, additionals := errors.Unwrapb(err)
if len(additionals) > 0 {
return err
}
// Preserve helpful error messages about unknown fields
if strings.Contains(err.Error(), "unknown field") {
return err
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid formula spec: %v",
err,
)
}
q.Spec = spec
case QueryTypeJoin:
var spec QueryBuilderJoin
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid join spec")
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "join spec"); err != nil {
// If it's already one of our wrapped errors with additional context, return as-is
_, _, _, _, _, additionals := errors.Unwrapb(err)
if len(additionals) > 0 {
return err
}
// Preserve helpful error messages about unknown fields
if strings.Contains(err.Error(), "unknown field") {
return err
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid join spec: %v",
err,
)
}
q.Spec = spec
case QueryTypePromQL:
var spec PromQuery
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid PromQL spec")
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "PromQL spec"); err != nil {
// If it's already one of our wrapped errors with additional context, return as-is
_, _, _, _, _, additionals := errors.Unwrapb(err)
if len(additionals) > 0 {
return err
}
// Preserve helpful error messages about unknown fields
if strings.Contains(err.Error(), "unknown field") {
return err
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid PromQL spec: %v",
err,
)
}
q.Spec = spec
case QueryTypeClickHouseSQL:
var spec ClickHouseQuery
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid ClickHouse SQL spec")
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "ClickHouse SQL spec"); err != nil {
// If it's already one of our wrapped errors with additional context, return as-is
_, _, _, _, _, additionals := errors.Unwrapb(err)
if len(additionals) > 0 {
return err
}
// Preserve helpful error messages about unknown fields
if strings.Contains(err.Error(), "unknown field") {
return err
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid ClickHouse SQL spec: %v",
err,
)
}
q.Spec = spec
default:
return errors.WrapInvalidInputf(nil, errors.CodeInvalidInput, "unknown query type %q", shadow.Type)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type %q",
shadow.Type,
).WithAdditional(
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, promql, clickhouse_sql",
)
}
return nil
@@ -100,6 +207,59 @@ type CompositeQuery struct {
Queries []QueryEnvelope `json:"queries"`
}
// UnmarshalJSON implements custom JSON unmarshaling to provide better error messages
func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
type Alias CompositeQuery
// First do a normal unmarshal without DisallowUnknownFields
var temp Alias
if err := json.Unmarshal(data, &temp); err != nil {
return err
}
// Then check for unknown fields at this level only
var check map[string]json.RawMessage
if err := json.Unmarshal(data, &check); err != nil {
return err
}
// Check for unknown fields at this level
validFields := map[string]bool{
"queries": true,
}
for field := range check {
if !validFields[field] {
// Find closest match
var fieldNames []string
for f := range validFields {
fieldNames = append(fieldNames, f)
}
if suggestion, found := findClosestMatch(field, fieldNames, 3); found {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown field %q in composite query",
field,
).WithAdditional(
"Did you mean '" + suggestion + "'?",
)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown field %q in composite query",
field,
).WithAdditional(
"Valid fields are: " + strings.Join(fieldNames, ", "),
)
}
}
*c = CompositeQuery(temp)
return nil
}
type QueryRangeRequest struct {
// SchemaVersion is the version of the schema to use for the request payload.
SchemaVersion string `json:"schemaVersion"`
@@ -120,6 +280,69 @@ type QueryRangeRequest struct {
FormatOptions *FormatOptions `json:"formatOptions,omitempty"`
}
// UnmarshalJSON implements custom JSON unmarshaling to disallow unknown fields
func (r *QueryRangeRequest) UnmarshalJSON(data []byte) error {
// Define a type alias to avoid infinite recursion
type Alias QueryRangeRequest
// First do a normal unmarshal without DisallowUnknownFields to let nested structures handle their own validation
var temp Alias
if err := json.Unmarshal(data, &temp); err != nil {
return err
}
// Then check for unknown fields at this level only
var check map[string]json.RawMessage
if err := json.Unmarshal(data, &check); err != nil {
return err
}
// Check for unknown fields at the top level
validFields := map[string]bool{
"schemaVersion": true,
"start": true,
"end": true,
"requestType": true,
"compositeQuery": true,
"variables": true,
"noCache": true,
"formatOptions": true,
}
for field := range check {
if !validFields[field] {
// Find closest match
var fieldNames []string
for f := range validFields {
fieldNames = append(fieldNames, f)
}
if suggestion, found := findClosestMatch(field, fieldNames, 3); found {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown field %q",
field,
).WithAdditional(
"Did you mean '" + suggestion + "'?",
)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown field %q",
field,
).WithAdditional(
"Valid fields are: " + strings.Join(fieldNames, ", "),
)
}
}
// Copy the decoded values back to the original struct
*r = QueryRangeRequest(temp)
return nil
}
type FormatOptions struct {
FillGaps bool `json:"fillGaps,omitempty"`
FormatTableResultForUI bool `json:"formatTableResultForUI,omitempty"`

View File

@@ -0,0 +1,150 @@
package querybuildertypesv5
import (
"encoding/json"
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQueryRangeRequest_UnmarshalJSON_ErrorMessages(t *testing.T) {
tests := []struct {
name string
jsonData string
wantErrMsg string
wantAdditionalHints []string
}{
{
name: "unknown field 'function' in query spec",
jsonData: `{
"schemaVersion": "v1",
"start": 1749290340000,
"end": 1749293940000,
"requestType": "scalar",
"compositeQuery": {
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"aggregations": [{
"expression": "count()",
"alias": "spans_count"
}],
"function": [{
"name": "absolute",
"args": []
}]
}
}]
}
}`,
wantErrMsg: `unknown field "function" in query spec`,
wantAdditionalHints: []string{
"Did you mean 'functions'?",
},
},
{
name: "unknown field 'filters' in query spec",
jsonData: `{
"schemaVersion": "v1",
"start": 1749290340000,
"end": 1749293940000,
"requestType": "scalar",
"compositeQuery": {
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{
"metricName": "test"
}],
"filters": {
"expression": "test = 1"
}
}
}]
}
}`,
wantErrMsg: `unknown field "filters" in query spec`,
wantAdditionalHints: []string{
"Did you mean 'filter'?",
},
},
{
name: "unknown field at top level",
jsonData: `{
"schemaVersion": "v1",
"start": 1749290340000,
"end": 1749293940000,
"requestType": "scalar",
"compositeQueries": {
"queries": []
}
}`,
wantErrMsg: `unknown field "compositeQueries"`,
wantAdditionalHints: []string{
"Did you mean 'compositeQuery'?",
},
},
{
name: "unknown field with no good suggestion",
jsonData: `{
"schemaVersion": "v1",
"start": 1749290340000,
"end": 1749293940000,
"requestType": "scalar",
"compositeQuery": {
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{
"metricName": "test"
}],
"randomField": "value"
}
}]
}
}`,
wantErrMsg: `unknown field "randomField" in query spec`,
wantAdditionalHints: []string{
"Valid fields are:",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var req QueryRangeRequest
err := json.Unmarshal([]byte(tt.jsonData), &req)
require.Error(t, err)
// Check main error message
assert.Contains(t, err.Error(), tt.wantErrMsg)
// Check if it's an error from our package using Unwrapb
_, _, _, _, _, additionals := errors.Unwrapb(err)
// Check additional hints if we have any
if len(additionals) > 0 {
for _, hint := range tt.wantAdditionalHints {
found := false
for _, additional := range additionals {
if strings.Contains(additional, hint) {
found = true
break
}
}
assert.True(t, found, "Expected to find hint '%s' in additionals: %v", hint, additionals)
}
}
})
}
}

View File

@@ -120,8 +120,8 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
"expression": "severity_text = 'ERROR'"
},
"selectFields": [{
"key": "body",
"type": "log"
"name": "body",
"fieldContext": "log"
}],
"limit": 50,
"offset": 10
@@ -177,8 +177,8 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
}],
"stepInterval": 120,
"groupBy": [{
"key": "method",
"type": "tag"
"name": "method",
"fieldContext": "tag"
}]
}
}]
@@ -436,10 +436,9 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
}
},
{
"name": "B",
"type": "builder_formula",
"spec": {
"name": "rate",
"name": "B",
"expression": "A * 100"
}
}
@@ -465,7 +464,7 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "rate",
Name: "B",
Expression: "A * 100",
},
},

View File

@@ -0,0 +1,783 @@
package querybuildertypesv5
import (
"fmt"
"slices"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if spec.Name != "" {
return fmt.Sprintf("query '%s'", spec.Name)
}
return fmt.Sprintf("trace query at position %d", index+1)
case QueryBuilderQuery[LogAggregation]:
if spec.Name != "" {
return fmt.Sprintf("query '%s'", spec.Name)
}
return fmt.Sprintf("log query at position %d", index+1)
case QueryBuilderQuery[MetricAggregation]:
if spec.Name != "" {
return fmt.Sprintf("query '%s'", spec.Name)
}
return fmt.Sprintf("metric query at position %d", index+1)
}
case QueryTypeFormula:
if spec, ok := envelope.Spec.(QueryBuilderFormula); ok && spec.Name != "" {
return fmt.Sprintf("formula '%s'", spec.Name)
}
return fmt.Sprintf("formula at position %d", index+1)
case QueryTypeJoin:
if spec, ok := envelope.Spec.(QueryBuilderJoin); ok && spec.Name != "" {
return fmt.Sprintf("join '%s'", spec.Name)
}
return fmt.Sprintf("join at position %d", index+1)
case QueryTypePromQL:
if spec, ok := envelope.Spec.(PromQuery); ok && spec.Name != "" {
return fmt.Sprintf("PromQL query '%s'", spec.Name)
}
return fmt.Sprintf("PromQL query at position %d", index+1)
case QueryTypeClickHouseSQL:
if spec, ok := envelope.Spec.(ClickHouseQuery); ok && spec.Name != "" {
return fmt.Sprintf("ClickHouse query '%s'", spec.Name)
}
return fmt.Sprintf("ClickHouse query at position %d", index+1)
}
return fmt.Sprintf("query at position %d", index+1)
}
const (
// Maximum limit for query results
MaxQueryLimit = 10000
)
// ValidateFunctionName checks if the function name is valid
func ValidateFunctionName(name FunctionName) error {
validFunctions := []FunctionName{
FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
}
if slices.Contains(validFunctions, name) {
return nil
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid function name: %s",
name.StringValue(),
).WithAdditional(fmt.Sprintf("valid functions are: %v", validFunctions))
}
// Validate performs preliminary validation on QueryBuilderQuery
func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
// Validate signal
if err := q.validateSignal(); err != nil {
return err
}
// Validate aggregations only for non-raw request types
if requestType != RequestTypeRaw {
if err := q.validateAggregations(); err != nil {
return err
}
}
// Validate limit and pagination
if err := q.validateLimitAndPagination(); err != nil {
return err
}
// Validate functions
if err := q.validateFunctions(); err != nil {
return err
}
// Validate secondary aggregations
if err := q.validateSecondaryAggregations(); err != nil {
return err
}
// Validate order by
if err := q.validateOrderBy(); err != nil {
return err
}
return nil
}
func (q *QueryBuilderQuery[T]) validateSignal() error {
// Signal validation is handled during unmarshaling in req.go
// Valid signals are: metrics, traces, logs
switch q.Signal {
case telemetrytypes.SignalMetrics,
telemetrytypes.SignalTraces,
telemetrytypes.SignalLogs,
telemetrytypes.SignalUnspecified: // Empty is allowed for backward compatibility
return nil
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid signal type: %s",
q.Signal,
).WithAdditional(
"Valid signals are: metrics, traces, logs",
)
}
}
func (q *QueryBuilderQuery[T]) validateAggregations() error {
// At least one aggregation required for non-disabled queries
if len(q.Aggregations) == 0 && !q.Disabled {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"at least one aggregation is required",
)
}
// Check for duplicate aliases
aliases := make(map[string]bool)
for i, agg := range q.Aggregations {
// Type-specific validation based on T
switch v := any(agg).(type) {
case MetricAggregation:
if v.MetricName == "" {
aggId := fmt.Sprintf("aggregation #%d", i+1)
if q.Name != "" {
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"metric name is required for %s",
aggId,
)
}
// Validate metric-specific aggregations
if err := validateMetricAggregation(v); err != nil {
// Extract the underlying error details
_, _, innerMsg, _, _, additionals := errors.Unwrapb(err)
// Create a new error with friendly identifier
aggId := fmt.Sprintf("aggregation #%d", i+1)
if q.Name != "" {
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
}
newErr := errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid metric %s: %s",
aggId,
innerMsg,
)
// Add any additional context from the inner error
if len(additionals) > 0 {
newErr = newErr.WithAdditional(additionals...)
}
return newErr
}
case TraceAggregation:
if v.Expression == "" {
aggId := fmt.Sprintf("aggregation #%d", i+1)
if q.Name != "" {
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for trace %s",
aggId,
)
}
if v.Alias != "" {
if aliases[v.Alias] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate aggregation alias: %s",
v.Alias,
)
}
aliases[v.Alias] = true
}
case LogAggregation:
if v.Expression == "" {
aggId := fmt.Sprintf("aggregation #%d", i+1)
if q.Name != "" {
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for log %s",
aggId,
)
}
if v.Alias != "" {
if aliases[v.Alias] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate aggregation alias: %s",
v.Alias,
)
}
aliases[v.Alias] = true
}
}
}
return nil
}
func (q *QueryBuilderQuery[T]) validateLimitAndPagination() error {
// Validate limit
if q.Limit < 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"limit must be non-negative, got %d",
q.Limit,
)
}
if q.Limit > MaxQueryLimit {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"limit exceeds maximum allowed value of %d",
MaxQueryLimit,
).WithAdditional(
fmt.Sprintf("Provided limit: %d", q.Limit),
)
}
// Validate offset
if q.Offset < 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"offset must be non-negative, got %d",
q.Offset,
)
}
return nil
}
func (q *QueryBuilderQuery[T]) validateFunctions() error {
for i, fn := range q.Functions {
if err := ValidateFunctionName(fn.Name); err != nil {
// Extract the underlying error details
_, _, innerMsg, _, _, additionals := errors.Unwrapb(err)
// Create a new error with friendly identifier
fnId := fmt.Sprintf("function #%d", i+1)
if q.Name != "" {
fnId = fmt.Sprintf("function #%d in query '%s'", i+1, q.Name)
}
newErr := errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
fnId,
innerMsg,
)
// Add any additional context from the inner error
if len(additionals) > 0 {
newErr = newErr.WithAdditional(additionals...)
}
return newErr
}
}
return nil
}
func (q *QueryBuilderQuery[T]) validateSecondaryAggregations() error {
for i, secAgg := range q.SecondaryAggregations {
// Secondary aggregation expression can be empty - we allow it per requirements
// Just validate structure
if secAgg.Limit < 0 {
secAggId := fmt.Sprintf("secondary aggregation #%d", i+1)
if q.Name != "" {
secAggId = fmt.Sprintf("secondary aggregation #%d in query '%s'", i+1, q.Name)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"%s: limit must be non-negative",
secAggId,
)
}
}
return nil
}
func (q *QueryBuilderQuery[T]) validateOrderBy() error {
for i, order := range q.Order {
// Direction validation is handled by the OrderDirection type
if order.Direction != OrderDirectionAsc && order.Direction != OrderDirectionDesc {
orderId := fmt.Sprintf("order by clause #%d", i+1)
if q.Name != "" {
orderId = fmt.Sprintf("order by clause #%d in query '%s'", i+1, q.Name)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid direction for %s: %s",
orderId,
order.Direction.StringValue(),
).WithAdditional(
"Valid directions are: asc, desc",
)
}
}
return nil
}
// ValidateQueryRangeRequest validates the entire query range request
func (r *QueryRangeRequest) Validate() error {
// Validate time range
if r.Start >= r.End {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"start time must be before end time",
)
}
// Validate request type
switch r.RequestType {
case RequestTypeRaw, RequestTypeTimeSeries, RequestTypeScalar:
// Valid request types
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid request type: %s",
r.RequestType,
).WithAdditional(
"Valid request types are: raw, timeseries, scalar",
)
}
// Validate composite query
if err := r.validateCompositeQuery(); err != nil {
return err
}
return nil
}
func (r *QueryRangeRequest) validateCompositeQuery() error {
// Validate queries in composite query
if len(r.CompositeQuery.Queries) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"at least one query is required",
)
}
// Track query names for uniqueness (only for non-formula queries)
queryNames := make(map[string]bool)
// Validate each query based on its type
for i, envelope := range r.CompositeQuery.Queries {
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
// Validate based on the concrete type
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
// Extract the underlying error details
_, _, innerMsg, _, _, additionals := errors.Unwrapb(err)
// Create a new error with friendly query identifier
queryId := getQueryIdentifier(envelope, i)
newErr := errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
innerMsg,
)
// Add any additional context from the inner error
if len(additionals) > 0 {
newErr = newErr.WithAdditional(additionals...)
}
return newErr
}
// Check name uniqueness for non-formula context
if spec.Name != "" {
if queryNames[spec.Name] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate query name '%s'",
spec.Name,
)
}
queryNames[spec.Name] = true
}
case QueryBuilderQuery[LogAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
// Extract the underlying error details
_, _, innerMsg, _, _, additionals := errors.Unwrapb(err)
// Create a new error with friendly query identifier
queryId := getQueryIdentifier(envelope, i)
newErr := errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
innerMsg,
)
// Add any additional context from the inner error
if len(additionals) > 0 {
newErr = newErr.WithAdditional(additionals...)
}
return newErr
}
// Check name uniqueness for non-formula context
if spec.Name != "" {
if queryNames[spec.Name] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate query name '%s'",
spec.Name,
)
}
queryNames[spec.Name] = true
}
case QueryBuilderQuery[MetricAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
// Extract the underlying error details
_, _, innerMsg, _, _, additionals := errors.Unwrapb(err)
// Create a new error with friendly query identifier
queryId := getQueryIdentifier(envelope, i)
newErr := errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
innerMsg,
)
// Add any additional context from the inner error
if len(additionals) > 0 {
newErr = newErr.WithAdditional(additionals...)
}
return newErr
}
// Check name uniqueness for non-formula context
if spec.Name != "" {
if queryNames[spec.Name] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate query name '%s'",
spec.Name,
)
}
queryNames[spec.Name] = true
}
default:
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown spec type for %s",
queryId,
)
}
case QueryTypeFormula:
// Formula validation is handled separately
spec, ok := envelope.Spec.(QueryBuilderFormula)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Expression == "" {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for %s",
queryId,
)
}
case QueryTypeJoin:
// Join validation is handled separately
_, ok := envelope.Spec.(QueryBuilderJoin)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
case QueryTypePromQL:
// PromQL validation is handled separately
spec, ok := envelope.Spec.(PromQuery)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
case QueryTypeClickHouseSQL:
// ClickHouse SQL validation is handled separately
spec, ok := envelope.Spec.(ClickHouseQuery)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
default:
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type '%s' for %s",
envelope.Type,
queryId,
).WithAdditional(
"Valid query types are: builder_query, builder_formula, builder_join, promql, clickhouse_sql",
)
}
}
return nil
}
// Validate performs validation on CompositeQuery
func (c *CompositeQuery) Validate(requestType RequestType) error {
if len(c.Queries) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"at least one query is required",
)
}
// Validate each query
for i, envelope := range c.Queries {
if err := validateQueryEnvelope(envelope, requestType); err != nil {
// Extract the underlying error details
_, _, innerMsg, _, _, additionals := errors.Unwrapb(err)
// Create a new error with friendly query identifier
queryId := getQueryIdentifier(envelope, i)
newErr := errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
innerMsg,
)
// Add any additional context from the inner error
if len(additionals) > 0 {
newErr = newErr.WithAdditional(additionals...)
}
return newErr
}
}
return nil
}
func validateQueryEnvelope(envelope QueryEnvelope, requestType RequestType) error {
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.Validate(requestType)
case QueryBuilderQuery[LogAggregation]:
return spec.Validate(requestType)
case QueryBuilderQuery[MetricAggregation]:
return spec.Validate(requestType)
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query spec type",
)
}
case QueryTypeFormula:
spec, ok := envelope.Spec.(QueryBuilderFormula)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid formula spec",
)
}
if spec.Expression == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"formula expression is required",
)
}
return nil
case QueryTypeJoin:
_, ok := envelope.Spec.(QueryBuilderJoin)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid join spec",
)
}
return nil
case QueryTypePromQL:
spec, ok := envelope.Spec.(PromQuery)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid PromQL spec",
)
}
if spec.Query == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"PromQL query is required",
)
}
return nil
case QueryTypeClickHouseSQL:
spec, ok := envelope.Spec.(ClickHouseQuery)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid ClickHouse SQL spec",
)
}
if spec.Query == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"ClickHouse SQL query is required",
)
}
return nil
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type: %s",
envelope.Type,
).WithAdditional(
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, promql, clickhouse_sql",
)
}
}
// validateMetricAggregation validates metric-specific aggregation parameters
func validateMetricAggregation(agg MetricAggregation) error {
// Validate that rate/increase are only used with appropriate temporalities
if agg.TimeAggregation == metrictypes.TimeAggregationRate || agg.TimeAggregation == metrictypes.TimeAggregationIncrease {
// For gauge metrics (Unspecified temporality), rate/increase doesn't make sense
if agg.Temporality == metrictypes.Unspecified {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"rate/increase aggregation cannot be used with gauge metrics (unspecified temporality)",
)
}
}
// Validate percentile aggregations are only used with histogram types
if agg.SpaceAggregation.IsPercentile() {
if agg.Type != metrictypes.HistogramType && agg.Type != metrictypes.ExpHistogramType && agg.Type != metrictypes.SummaryType {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"percentile aggregation can only be used with histogram or summary metric types",
)
}
}
// Validate time aggregation values
validTimeAggregations := []metrictypes.TimeAggregation{
metrictypes.TimeAggregationUnspecified,
metrictypes.TimeAggregationLatest,
metrictypes.TimeAggregationSum,
metrictypes.TimeAggregationAvg,
metrictypes.TimeAggregationMin,
metrictypes.TimeAggregationMax,
metrictypes.TimeAggregationCount,
metrictypes.TimeAggregationCountDistinct,
metrictypes.TimeAggregationRate,
metrictypes.TimeAggregationIncrease,
}
validTimeAgg := slices.Contains(validTimeAggregations, agg.TimeAggregation)
if !validTimeAgg {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid time aggregation: %s",
agg.TimeAggregation.StringValue(),
).WithAdditional(
"Valid time aggregations: latest, sum, avg, min, max, count, count_distinct, rate, increase",
)
}
// Validate space aggregation values
validSpaceAggregations := []metrictypes.SpaceAggregation{
metrictypes.SpaceAggregationUnspecified,
metrictypes.SpaceAggregationSum,
metrictypes.SpaceAggregationAvg,
metrictypes.SpaceAggregationMin,
metrictypes.SpaceAggregationMax,
metrictypes.SpaceAggregationCount,
metrictypes.SpaceAggregationPercentile50,
metrictypes.SpaceAggregationPercentile75,
metrictypes.SpaceAggregationPercentile90,
metrictypes.SpaceAggregationPercentile95,
metrictypes.SpaceAggregationPercentile99,
}
validSpaceAgg := slices.Contains(validSpaceAggregations, agg.SpaceAggregation)
if !validSpaceAgg {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid space aggregation: %s",
agg.SpaceAggregation.StringValue(),
).WithAdditional(
"Valid space aggregations: sum, avg, min, max, count, p50, p75, p90, p95, p99",
)
}
return nil
}

View File

@@ -0,0 +1,213 @@
package querybuildertypesv5
import (
"testing"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func TestValidateMetricAggregation(t *testing.T) {
tests := []struct {
name string
agg MetricAggregation
wantErr bool
errMsg string
}{
{
name: "valid sum aggregation",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.SumType,
Temporality: metrictypes.Cumulative,
TimeAggregation: metrictypes.TimeAggregationSum,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
wantErr: false,
},
{
name: "invalid rate on gauge",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.GaugeType,
Temporality: metrictypes.Unspecified,
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
wantErr: true,
errMsg: "rate/increase aggregation cannot be used with gauge metrics",
},
{
name: "invalid increase on gauge",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.GaugeType,
Temporality: metrictypes.Unspecified,
TimeAggregation: metrictypes.TimeAggregationIncrease,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
wantErr: true,
errMsg: "rate/increase aggregation cannot be used with gauge metrics",
},
{
name: "valid rate on cumulative",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.SumType,
Temporality: metrictypes.Cumulative,
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
wantErr: false,
},
{
name: "valid rate on delta",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.SumType,
Temporality: metrictypes.Delta,
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
wantErr: false,
},
{
name: "invalid percentile on non-histogram",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.SumType,
Temporality: metrictypes.Cumulative,
TimeAggregation: metrictypes.TimeAggregationSum,
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
},
wantErr: true,
errMsg: "percentile aggregation can only be used with histogram",
},
{
name: "valid percentile on histogram",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.HistogramType,
Temporality: metrictypes.Delta,
TimeAggregation: metrictypes.TimeAggregationSum,
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
},
wantErr: false,
},
{
name: "valid percentile on exp histogram",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.ExpHistogramType,
Temporality: metrictypes.Delta,
TimeAggregation: metrictypes.TimeAggregationSum,
SpaceAggregation: metrictypes.SpaceAggregationPercentile99,
},
wantErr: false,
},
{
name: "valid percentile on summary",
agg: MetricAggregation{
MetricName: "test_metric",
Type: metrictypes.SummaryType,
Temporality: metrictypes.Delta,
TimeAggregation: metrictypes.TimeAggregationSum,
SpaceAggregation: metrictypes.SpaceAggregationPercentile50,
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := validateMetricAggregation(tt.agg)
if tt.wantErr {
assert.Error(t, err)
if tt.errMsg != "" {
assert.Contains(t, err.Error(), tt.errMsg)
}
} else {
assert.NoError(t, err)
}
})
}
}
func TestQueryBuilderQuery_ValidateMetrics(t *testing.T) {
tests := []struct {
name string
query QueryBuilderQuery[MetricAggregation]
reqType RequestType
wantErr bool
errMsg string
}{
{
name: "valid metric query",
query: QueryBuilderQuery[MetricAggregation]{
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{
{
MetricName: "test_metric",
Type: metrictypes.SumType,
Temporality: metrictypes.Cumulative,
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
},
},
reqType: RequestTypeTimeSeries,
wantErr: false,
},
{
name: "invalid metric query - rate on gauge",
query: QueryBuilderQuery[MetricAggregation]{
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{
{
MetricName: "test_metric",
Type: metrictypes.GaugeType,
Temporality: metrictypes.Unspecified,
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
},
},
reqType: RequestTypeTimeSeries,
wantErr: true,
errMsg: "rate/increase aggregation cannot be used with gauge metrics",
},
{
name: "empty metric name",
query: QueryBuilderQuery[MetricAggregation]{
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{
{
MetricName: "",
Type: metrictypes.SumType,
Temporality: metrictypes.Cumulative,
TimeAggregation: metrictypes.TimeAggregationSum,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
},
},
reqType: RequestTypeTimeSeries,
wantErr: true,
errMsg: "metric name is required",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.query.Validate(tt.reqType)
if tt.wantErr {
assert.Error(t, err)
if tt.errMsg != "" {
assert.Contains(t, err.Error(), tt.errMsg)
}
} else {
assert.NoError(t, err)
}
})
}
}