chore: Query filter extraction API (#9617)
This commit is contained in:
committed by
GitHub
parent
ed70e3c5f5
commit
3d42b0058e
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
@@ -60,6 +61,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
|
||||
"io"
|
||||
"math"
|
||||
@@ -146,6 +147,8 @@ type APIHandler struct {
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
}
|
||||
|
||||
@@ -176,6 +179,8 @@ type APIHandlerOpts struct {
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
}
|
||||
|
||||
@@ -238,6 +243,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
Signoz: opts.Signoz,
|
||||
FieldsAPI: opts.FieldsAPI,
|
||||
QuerierAPI: opts.QuerierAPI,
|
||||
QueryParserAPI: opts.QueryParserAPI,
|
||||
}
|
||||
|
||||
logsQueryBuilder := logsv4.PrepareLogsQuery
|
||||
@@ -632,6 +638,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
|
||||
|
||||
// Query Filter Analyzer api used to extract metric names and grouping columns from a query
|
||||
router.HandleFunc("/api/v1/query_filter/analyze", am.ViewAccess(aH.QueryParserAPI.AnalyzeQueryFilter)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
@@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
49
pkg/queryparser/api.go
Normal file
49
pkg/queryparser/api.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/parsertypes"
|
||||
)
|
||||
|
||||
type API struct {
|
||||
queryParser QueryParser
|
||||
settings factory.ProviderSettings
|
||||
}
|
||||
|
||||
func NewAPI(settings factory.ProviderSettings, queryParser QueryParser) *API {
|
||||
return &API{settings: settings, queryParser: queryParser}
|
||||
}
|
||||
|
||||
// AnalyzeQueryFilter analyzes a query and extracts metric names and grouping columns
|
||||
func (a *API) AnalyzeQueryFilter(w http.ResponseWriter, r *http.Request) {
|
||||
// Limit request body size to 255 KB (CH query limit is 256 KB)
|
||||
r.Body = http.MaxBytesReader(w, r.Body, 255*1024)
|
||||
|
||||
var req parsertypes.QueryFilterAnalyzeRequest
|
||||
if err := binding.JSON.BindBody(r.Body, &req); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
result, err := a.queryParser.AnalyzeQueryFilter(r.Context(), req.QueryType, req.Query)
|
||||
if err != nil {
|
||||
a.settings.Logger.ErrorContext(r.Context(), "failed to analyze query filter", "error", err)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// prepare the response
|
||||
var resp parsertypes.QueryFilterAnalyzeResponse
|
||||
for _, group := range result.GroupByColumns {
|
||||
resp.Groups = append(resp.Groups, parsertypes.ColumnInfoResponse{
|
||||
Name: group.Name,
|
||||
Alias: group.Alias,
|
||||
}) // add the group name and alias to the response
|
||||
}
|
||||
resp.MetricNames = append(resp.MetricNames, result.MetricNames...) // add the metric names to the response
|
||||
render.Success(w, http.StatusOK, resp)
|
||||
}
|
||||
258
pkg/queryparser/api_test.go
Normal file
258
pkg/queryparser/api_test.go
Normal file
@@ -0,0 +1,258 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/types/parsertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func TestAPI_AnalyzeQueryFilter(t *testing.T) {
|
||||
queryParser := New(instrumentationtest.New().ToProviderSettings())
|
||||
aH := NewAPI(instrumentationtest.New().ToProviderSettings(), queryParser)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
requestBody parsertypes.QueryFilterAnalyzeRequest
|
||||
expectedStatus int
|
||||
expectedStatusStr string
|
||||
expectedError bool
|
||||
errorContains string
|
||||
expectedMetrics []string
|
||||
expectedGroups []parsertypes.ColumnInfoResponse
|
||||
}{
|
||||
{
|
||||
name: "PromQL - Nested aggregation inside subquery",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `max_over_time(sum(rate(cpu_usage_total[5m]))[1h:5m])`,
|
||||
QueryType: querybuildertypesv5.QueryTypePromQL,
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
expectedStatusStr: "success",
|
||||
expectedError: false,
|
||||
expectedMetrics: []string{"cpu_usage_total"},
|
||||
expectedGroups: []parsertypes.ColumnInfoResponse{},
|
||||
},
|
||||
{
|
||||
name: "PromQL - Subquery with multiple metrics",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `avg_over_time((foo + bar)[10m:1m])`,
|
||||
QueryType: querybuildertypesv5.QueryTypePromQL,
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
expectedStatusStr: "success",
|
||||
expectedError: false,
|
||||
expectedMetrics: []string{"bar", "foo"},
|
||||
expectedGroups: []parsertypes.ColumnInfoResponse{},
|
||||
},
|
||||
{
|
||||
name: "PromQL - Simple meta-metric with grouping",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `sum by (pod) (up)`,
|
||||
QueryType: querybuildertypesv5.QueryTypePromQL,
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
expectedStatusStr: "success",
|
||||
expectedError: false,
|
||||
expectedMetrics: []string{"up"},
|
||||
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "pod", Alias: ""}},
|
||||
},
|
||||
{
|
||||
name: "ClickHouse - Simple CTE with GROUP BY",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `WITH aggregated AS (
|
||||
SELECT region as region_alias, sum(value) AS total
|
||||
FROM metrics
|
||||
WHERE metric_name = 'cpu_usage'
|
||||
GROUP BY region
|
||||
)
|
||||
SELECT * FROM aggregated`,
|
||||
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
expectedStatusStr: "success",
|
||||
expectedError: false,
|
||||
expectedMetrics: []string{"cpu_usage"},
|
||||
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "region", Alias: "region_alias"}},
|
||||
},
|
||||
{
|
||||
name: "ClickHouse - CTE chain with last GROUP BY + Alias should be returned if exists",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `WITH step1 AS (
|
||||
SELECT service as service_alias, timestamp as ts, value
|
||||
FROM metrics
|
||||
WHERE metric_name = 'requests'
|
||||
GROUP BY service, timestamp
|
||||
),
|
||||
step2 AS (
|
||||
SELECT ts, avg(value) AS avg_value
|
||||
FROM step1
|
||||
GROUP BY ts
|
||||
)
|
||||
SELECT * FROM step2`,
|
||||
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
expectedStatusStr: "success",
|
||||
expectedError: false,
|
||||
expectedMetrics: []string{"requests"},
|
||||
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "ts", Alias: ""}},
|
||||
},
|
||||
{
|
||||
name: "ClickHouse - Outer GROUP BY overrides CTE GROUP BY + Alias should be returned if exists",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `WITH cte AS (
|
||||
SELECT region, service, value
|
||||
FROM metrics
|
||||
WHERE metric_name = 'memory'
|
||||
GROUP BY region, service
|
||||
)
|
||||
SELECT region as region_alias, sum(value) as total
|
||||
FROM cte
|
||||
GROUP BY region`,
|
||||
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
expectedStatusStr: "success",
|
||||
expectedError: false,
|
||||
expectedMetrics: []string{"memory"},
|
||||
expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "region", Alias: "region_alias"}},
|
||||
},
|
||||
{
|
||||
name: "ClickHouse - Invalid query should return error",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `SELECT WHERE metric_name = 'memory' GROUP BY region, service`,
|
||||
QueryType: querybuildertypesv5.QueryTypeClickHouseSQL,
|
||||
},
|
||||
expectedStatus: http.StatusBadRequest,
|
||||
expectedStatusStr: "error",
|
||||
expectedError: true,
|
||||
errorContains: "failed to parse clickhouse query",
|
||||
},
|
||||
{
|
||||
name: "Empty query should return error",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: "",
|
||||
QueryType: querybuildertypesv5.QueryTypePromQL,
|
||||
},
|
||||
expectedStatus: http.StatusBadRequest,
|
||||
expectedStatusStr: "error",
|
||||
expectedError: true,
|
||||
errorContains: "query is required and cannot be empty",
|
||||
},
|
||||
{
|
||||
name: "Invalid queryType should return error",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `sum(rate(cpu_usage[5m]))`,
|
||||
QueryType: querybuildertypesv5.QueryTypeUnknown,
|
||||
},
|
||||
expectedStatus: http.StatusBadRequest,
|
||||
expectedStatusStr: "error",
|
||||
expectedError: true,
|
||||
errorContains: "unsupported queryType",
|
||||
},
|
||||
{
|
||||
name: "Invalid PromQL syntax should return error",
|
||||
requestBody: parsertypes.QueryFilterAnalyzeRequest{
|
||||
Query: `sum by ((foo)(bar))(http_requests_total)`,
|
||||
QueryType: querybuildertypesv5.QueryTypePromQL,
|
||||
},
|
||||
expectedStatus: http.StatusBadRequest,
|
||||
expectedStatusStr: "error",
|
||||
expectedError: true,
|
||||
errorContains: "failed to parse promql query",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Create request body
|
||||
reqBody, err := json.Marshal(tt.requestBody)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to marshal request body: %v", err)
|
||||
}
|
||||
|
||||
// Create HTTP request
|
||||
req := httptest.NewRequestWithContext(context.Background(), http.MethodPost, "/api/v1/query_filter/analyze", bytes.NewBuffer(reqBody))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
// Create response recorder
|
||||
rr := httptest.NewRecorder()
|
||||
|
||||
// Call handler
|
||||
aH.AnalyzeQueryFilter(rr, req)
|
||||
|
||||
// Check status code
|
||||
if rr.Code != tt.expectedStatus {
|
||||
t.Errorf("expected status %d, got %d", tt.expectedStatus, rr.Code)
|
||||
}
|
||||
|
||||
// Parse response
|
||||
var resp map[string]interface{}
|
||||
if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil {
|
||||
t.Fatalf("failed to unmarshal response: %v, body: %s", err, rr.Body.String())
|
||||
}
|
||||
|
||||
// Check status string
|
||||
if resp["status"] != tt.expectedStatusStr {
|
||||
t.Errorf("expected status '%s', got %v", tt.expectedStatusStr, resp["status"])
|
||||
}
|
||||
|
||||
if tt.expectedError {
|
||||
errorObj, ok := resp["error"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("expected error to be a map, got %T", resp["error"])
|
||||
}
|
||||
errorMsg, ok := errorObj["message"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("expected error message to be a string, got %T", errorObj["message"])
|
||||
}
|
||||
if !strings.Contains(errorMsg, tt.errorContains) {
|
||||
t.Errorf("expected error message to contain '%s', got '%s'", tt.errorContains, errorMsg)
|
||||
}
|
||||
} else {
|
||||
// Validate success response
|
||||
data, ok := resp["data"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("expected data to be a map, got %T", resp["data"])
|
||||
}
|
||||
|
||||
// Marshal data back to JSON and unmarshal into QueryFilterAnalyzeResponse struct
|
||||
dataBytes, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to marshal data: %v", err)
|
||||
}
|
||||
|
||||
var responseData parsertypes.QueryFilterAnalyzeResponse
|
||||
if err := json.Unmarshal(dataBytes, &responseData); err != nil {
|
||||
t.Fatalf("failed to unmarshal data into QueryFilterAnalyzeResponse: %v", err)
|
||||
}
|
||||
|
||||
// Sort the arrays for comparison
|
||||
gotMetrics := make([]string, len(responseData.MetricNames))
|
||||
copy(gotMetrics, responseData.MetricNames)
|
||||
sort.Strings(gotMetrics)
|
||||
|
||||
gotGroups := make([]parsertypes.ColumnInfoResponse, len(responseData.Groups))
|
||||
copy(gotGroups, responseData.Groups)
|
||||
|
||||
// Compare using deep equal
|
||||
if !reflect.DeepEqual(gotMetrics, tt.expectedMetrics) {
|
||||
t.Errorf("expected metricNames %v, got %v", tt.expectedMetrics, gotMetrics)
|
||||
}
|
||||
if !reflect.DeepEqual(gotGroups, tt.expectedGroups) {
|
||||
t.Errorf("expected groups %v, got %v", tt.expectedGroups, gotGroups)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,11 +4,18 @@
|
||||
// This is useful for metrics discovery, and query analysis.
|
||||
package queryfilterextractor
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/errors"
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const (
|
||||
ExtractorCH = "qfe_ch"
|
||||
ExtractorPromQL = "qfe_promql"
|
||||
type ExtractorType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
ExtractorTypeClickHouseSQL = ExtractorType{valuer.NewString("qfe_ch")}
|
||||
ExtractorTypePromQL = ExtractorType{valuer.NewString("qfe_promql")}
|
||||
)
|
||||
|
||||
// ColumnInfo represents a column in the query
|
||||
@@ -46,13 +53,13 @@ type FilterExtractor interface {
|
||||
Extract(query string) (*FilterResult, error)
|
||||
}
|
||||
|
||||
func NewExtractor(extractorType string) (FilterExtractor, error) {
|
||||
func NewExtractor(extractorType ExtractorType) (FilterExtractor, error) {
|
||||
switch extractorType {
|
||||
case ExtractorCH:
|
||||
case ExtractorTypeClickHouseSQL:
|
||||
return NewClickHouseFilterExtractor(), nil
|
||||
case ExtractorPromQL:
|
||||
case ExtractorTypePromQL:
|
||||
return NewPromQLFilterExtractor(), nil
|
||||
default:
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid extractor type: %s", extractorType)
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid extractor type: %s", extractorType)
|
||||
}
|
||||
}
|
||||
14
pkg/queryparser/queryparser.go
Normal file
14
pkg/queryparser/queryparser.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// QueryParser defines the interface for parsing and analyzing queries.
|
||||
type QueryParser interface {
|
||||
// AnalyzeQueryFilter extracts filter conditions from a given query string.
|
||||
AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error)
|
||||
}
|
||||
40
pkg/queryparser/queryparser_impl.go
Normal file
40
pkg/queryparser/queryparser_impl.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
type queryParserImpl struct {
|
||||
settings factory.ProviderSettings
|
||||
}
|
||||
|
||||
// New creates a new implementation of the QueryParser service.
|
||||
func New(settings factory.ProviderSettings) QueryParser {
|
||||
return &queryParserImpl{
|
||||
settings: settings,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error) {
|
||||
var extractorType queryfilterextractor.ExtractorType
|
||||
switch queryType {
|
||||
case querybuildertypesv5.QueryTypePromQL:
|
||||
extractorType = queryfilterextractor.ExtractorTypePromQL
|
||||
case querybuildertypesv5.QueryTypeClickHouseSQL:
|
||||
extractorType = queryfilterextractor.ExtractorTypeClickHouseSQL
|
||||
default:
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %s. Supported values are '%s' and '%s'", queryType, querybuildertypesv5.QueryTypePromQL, querybuildertypesv5.QueryTypeClickHouseSQL)
|
||||
}
|
||||
|
||||
// Create extractor
|
||||
extractor, err := queryfilterextractor.NewExtractor(extractorType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return extractor.Extract(query)
|
||||
}
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sqlmigration"
|
||||
"github.com/SigNoz/signoz/pkg/sqlmigrator"
|
||||
@@ -62,6 +63,7 @@ type SigNoz struct {
|
||||
Authz authz.AuthZ
|
||||
Modules Modules
|
||||
Handlers Handlers
|
||||
QueryParser queryparser.QueryParser
|
||||
}
|
||||
|
||||
func New(
|
||||
@@ -309,6 +311,9 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize query parser
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
|
||||
// Initialize authns
|
||||
store := sqlauthnstore.NewStore(sqlstore)
|
||||
authNs, err := authNsCallback(ctx, providerSettings, store, licensing)
|
||||
@@ -402,5 +407,6 @@ func New(
|
||||
Authz: authz,
|
||||
Modules: modules,
|
||||
Handlers: handlers,
|
||||
QueryParser: queryParser,
|
||||
}, nil
|
||||
}
|
||||
|
||||
49
pkg/types/parsertypes/queryfilteranalyzer.go
Normal file
49
pkg/types/parsertypes/queryfilteranalyzer.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package parsertypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// QueryFilterAnalyzeRequest represents the request body for query filter analysis
|
||||
type QueryFilterAnalyzeRequest struct {
|
||||
Query string `json:"query"`
|
||||
QueryType querybuildertypesv5.QueryType `json:"queryType"`
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements custom JSON unmarshaling with validation and normalization
|
||||
func (q *QueryFilterAnalyzeRequest) UnmarshalJSON(data []byte) error {
|
||||
// Use a temporary struct to avoid infinite recursion
|
||||
type Alias QueryFilterAnalyzeRequest
|
||||
aux := (*Alias)(q)
|
||||
|
||||
if err := json.Unmarshal(data, aux); err != nil {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse json: %v", err)
|
||||
}
|
||||
|
||||
// Trim and validate query is not empty
|
||||
q.Query = strings.TrimSpace(aux.Query)
|
||||
if q.Query == "" {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "query is required and cannot be empty")
|
||||
}
|
||||
|
||||
// Validate query type
|
||||
if aux.QueryType != querybuildertypesv5.QueryTypeClickHouseSQL && aux.QueryType != querybuildertypesv5.QueryTypePromQL {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %v. Supported values are '%s' and '%s'", aux.QueryType, querybuildertypesv5.QueryTypePromQL, querybuildertypesv5.QueryTypeClickHouseSQL)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type ColumnInfoResponse struct {
|
||||
Name string `json:"columnName"`
|
||||
Alias string `json:"columnAlias"`
|
||||
}
|
||||
|
||||
// QueryFilterAnalyzeResponse represents the response body for query filter analysis
|
||||
type QueryFilterAnalyzeResponse struct {
|
||||
MetricNames []string `json:"metricNames"`
|
||||
Groups []ColumnInfoResponse `json:"groups"`
|
||||
}
|
||||
Reference in New Issue
Block a user