chore(metrics-explorer): address follow-up comments (#9730)
This commit is contained in:
1
frontend/src/auto-import-registry.d.ts
vendored
1
frontend/src/auto-import-registry.d.ts
vendored
@@ -14,6 +14,7 @@ import '@signozhq/badge';
|
||||
import '@signozhq/button';
|
||||
import '@signozhq/calendar';
|
||||
import '@signozhq/callout';
|
||||
import '@signozhq/checkbox';
|
||||
import '@signozhq/command';
|
||||
import '@signozhq/design-tokens';
|
||||
import '@signozhq/input';
|
||||
|
||||
35
pkg/modules/metricsexplorer/config.go
Normal file
35
pkg/modules/metricsexplorer/config.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package metricsexplorer
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
// TelemetryStore is the telemetrystore configuration
|
||||
TelemetryStore TelemetryStoreConfig `mapstructure:"telemetrystore"`
|
||||
}
|
||||
|
||||
type TelemetryStoreConfig struct {
|
||||
// Threads is the number of threads to use for ClickHouse queries
|
||||
Threads int `mapstructure:"threads"`
|
||||
}
|
||||
|
||||
func NewConfigFactory() factory.ConfigFactory {
|
||||
return factory.NewConfigFactory(factory.MustNewName("metricsexplorer"), newConfig)
|
||||
}
|
||||
|
||||
func newConfig() factory.Config {
|
||||
return Config{
|
||||
TelemetryStore: TelemetryStoreConfig{
|
||||
Threads: 8, // Default value
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (c Config) Validate() error {
|
||||
if c.TelemetryStore.Threads <= 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metricsexplorer.telemetrystore.threads must be positive, got %d", c.TelemetryStore.Threads)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -21,7 +21,7 @@ func generateMetricMetadataCacheKey(metricName string) string {
|
||||
|
||||
func getStatsOrderByColumn(order *qbtypes.OrderBy) (string, string, error) {
|
||||
if order == nil {
|
||||
return sqlColumnTimeSeries, qbtypes.OrderDirectionDesc.StringValue(), nil
|
||||
return sqlColumnSamples, qbtypes.OrderDirectionDesc.StringValue(), nil
|
||||
}
|
||||
|
||||
var columnName string
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -31,10 +32,11 @@ type module struct {
|
||||
condBuilder qbtypes.ConditionBuilder
|
||||
logger *slog.Logger
|
||||
cache cache.Cache
|
||||
config metricsexplorer.Config
|
||||
}
|
||||
|
||||
// NewModule constructs the metrics module with the provided dependencies.
|
||||
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, providerSettings factory.ProviderSettings) metricsexplorer.Module {
|
||||
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
|
||||
fieldMapper := telemetrymetrics.NewFieldMapper()
|
||||
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
|
||||
return &module{
|
||||
@@ -44,6 +46,7 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
|
||||
logger: providerSettings.Logger,
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
cache: cache,
|
||||
config: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,7 +99,6 @@ func (m *module) GetStats(ctx context.Context, orgID valuer.UUID, req *metricsex
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetTreemap will return metrics treemap information once implemented.
|
||||
func (m *module) GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
@@ -108,7 +110,7 @@ func (m *module) GetTreemap(ctx context.Context, orgID valuer.UUID, req *metrics
|
||||
}
|
||||
|
||||
resp := &metricsexplorertypes.TreemapResponse{}
|
||||
switch req.Treemap {
|
||||
switch req.Mode {
|
||||
case metricsexplorertypes.TreemapModeSamples:
|
||||
entries, err := m.computeSamplesTreemap(ctx, req, filterWhereClause)
|
||||
if err != nil {
|
||||
@@ -306,8 +308,9 @@ func (m *module) fetchUpdatedMetadata(ctx context.Context, orgID valuer.UUID, me
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
rows, err := db.Query(valueCtx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch updated metrics metadata")
|
||||
}
|
||||
@@ -351,11 +354,11 @@ func (m *module) fetchTimeseriesMetadata(ctx context.Context, orgID valuer.UUID,
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(
|
||||
"metric_name",
|
||||
"ANY_VALUE(description) AS description",
|
||||
"ANY_VALUE(type) AS metric_type",
|
||||
"ANY_VALUE(unit) AS metric_unit",
|
||||
"ANY_VALUE(temporality) AS temporality",
|
||||
"ANY_VALUE(is_monotonic) AS is_monotonic",
|
||||
"anyLast(description) AS description",
|
||||
"anyLast(type) AS metric_type",
|
||||
"anyLast(unit) AS metric_unit",
|
||||
"anyLast(temporality) AS temporality",
|
||||
"anyLast(is_monotonic) AS is_monotonic",
|
||||
)
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.TimeseriesV4TableName))
|
||||
sb.Where(sb.In("metric_name", args...))
|
||||
@@ -363,8 +366,9 @@ func (m *module) fetchTimeseriesMetadata(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
rows, err := db.Query(valueCtx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch metrics metadata from timeseries table")
|
||||
}
|
||||
@@ -448,7 +452,7 @@ func (m *module) validateMetricLabels(ctx context.Context, req *metricsexplorert
|
||||
return err
|
||||
}
|
||||
if !hasLabel {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as histogram type", req.MetricName)
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as histogram type: histogram metrics require the 'le' (less than or equal) label for bucket boundaries", req.MetricName)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -458,7 +462,7 @@ func (m *module) validateMetricLabels(ctx context.Context, req *metricsexplorert
|
||||
return err
|
||||
}
|
||||
if !hasLabel {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as summary type", req.MetricName)
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as summary type: summary metrics require the 'quantile' label for quantile values", req.MetricName)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -475,9 +479,10 @@ func (m *module) checkForLabelInMetric(ctx context.Context, metricName string, l
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
var hasLabel bool
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
err := db.QueryRow(ctx, query, args...).Scan(&hasLabel)
|
||||
err := db.QueryRow(valueCtx, query, args...).Scan(&hasLabel)
|
||||
if err != nil {
|
||||
return false, errors.WrapInternalf(err, errors.CodeInternal, "error checking metric label %q", label)
|
||||
}
|
||||
@@ -503,8 +508,9 @@ func (m *module) insertMetricsMetadata(ctx context.Context, orgID valuer.UUID, r
|
||||
|
||||
query, args := ib.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
if err := db.Exec(ctx, query, args...); err != nil {
|
||||
if err := db.Exec(valueCtx, query, args...); err != nil {
|
||||
return errors.WrapInternalf(err, errors.CodeInternal, "failed to insert metrics metadata")
|
||||
}
|
||||
|
||||
@@ -533,7 +539,6 @@ func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter,
|
||||
return sqlbuilder.NewWhereClause(), nil
|
||||
}
|
||||
|
||||
// TODO(nikhilmantri0902, srikanthccv): if this is the right way of dealing with whereClauseSelectors
|
||||
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(expression)
|
||||
for idx := range whereClauseSelectors {
|
||||
whereClauseSelectors[idx].Signal = telemetrytypes.SignalMetrics
|
||||
@@ -558,8 +563,8 @@ func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter,
|
||||
FieldKeys: keys,
|
||||
}
|
||||
|
||||
startNs := uint64(startMillis * 1_000_000)
|
||||
endNs := uint64(endMillis * 1_000_000)
|
||||
startNs := querybuilder.ToNanoSecs(uint64(startMillis))
|
||||
endNs := querybuilder.ToNanoSecs(uint64(endMillis))
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(expression, opts, startNs, endNs)
|
||||
if err != nil {
|
||||
@@ -656,8 +661,9 @@ func (m *module) fetchMetricsStatsWithSamples(
|
||||
|
||||
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
rows, err := db.Query(valueCtx, query, args...)
|
||||
if err != nil {
|
||||
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute metrics stats with samples query")
|
||||
}
|
||||
@@ -725,8 +731,9 @@ func (m *module) computeTimeseriesTreemap(ctx context.Context, req *metricsexplo
|
||||
|
||||
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
rows, err := db.Query(valueCtx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute timeseries treemap query")
|
||||
}
|
||||
@@ -784,7 +791,7 @@ func (m *module) computeSamplesTreemap(ctx context.Context, req *metricsexplorer
|
||||
)
|
||||
sampleCountsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTable))
|
||||
sampleCountsSB.Where(sampleCountsSB.Between("unix_milli", req.Start, req.End))
|
||||
sampleCountsSB.Where("metric_name IN (SELECT metric_name FROM __metric_candidates)")
|
||||
sampleCountsSB.Where("metric_name GLOBAL IN (SELECT metric_name FROM __metric_candidates)")
|
||||
|
||||
if filterWhereClause != nil {
|
||||
fingerprintSB := sqlbuilder.NewSelectBuilder()
|
||||
@@ -794,7 +801,7 @@ func (m *module) computeSamplesTreemap(ctx context.Context, req *metricsexplorer
|
||||
fingerprintSB.Where("NOT startsWith(metric_name, 'signoz')")
|
||||
fingerprintSB.Where(fingerprintSB.E("__normalized", false))
|
||||
fingerprintSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
|
||||
fingerprintSB.Where("metric_name IN (SELECT metric_name FROM __metric_candidates)")
|
||||
fingerprintSB.Where("metric_name GLOBAL IN (SELECT metric_name FROM __metric_candidates)")
|
||||
fingerprintSB.GroupBy("fingerprint")
|
||||
|
||||
sampleCountsSB.Where("fingerprint IN (SELECT fingerprint FROM __filtered_fingerprints)")
|
||||
@@ -824,8 +831,9 @@ func (m *module) computeSamplesTreemap(ctx context.Context, req *metricsexplorer
|
||||
|
||||
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
rows, err := db.Query(valueCtx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute samples treemap query")
|
||||
}
|
||||
@@ -858,7 +866,8 @@ func (m *module) getMetricDataPoints(ctx context.Context, metricName string) (ui
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
var dataPoints uint64
|
||||
err := db.QueryRow(ctx, query, args...).Scan(&dataPoints)
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
err := db.QueryRow(valueCtx, query, args...).Scan(&dataPoints)
|
||||
if err != nil {
|
||||
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get metrics data points")
|
||||
}
|
||||
@@ -876,7 +885,8 @@ func (m *module) getMetricLastReceived(ctx context.Context, metricName string) (
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
var lastReceived sql.NullInt64
|
||||
err := db.QueryRow(ctx, query, args...).Scan(&lastReceived)
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
err := db.QueryRow(valueCtx, query, args...).Scan(&lastReceived)
|
||||
if err != nil {
|
||||
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get last received timestamp")
|
||||
}
|
||||
@@ -899,7 +909,8 @@ func (m *module) getTotalTimeSeriesForMetricName(ctx context.Context, metricName
|
||||
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
var timeSeriesCount uint64
|
||||
err := db.QueryRow(ctx, query, args...).Scan(&timeSeriesCount)
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
err := db.QueryRow(valueCtx, query, args...).Scan(&timeSeriesCount)
|
||||
if err != nil {
|
||||
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get total time series count")
|
||||
}
|
||||
@@ -919,8 +930,9 @@ func (m *module) getActiveTimeSeriesForMetricName(ctx context.Context, metricNam
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
var activeTimeSeries uint64
|
||||
err := db.QueryRow(ctx, query, args...).Scan(&activeTimeSeries)
|
||||
err := db.QueryRow(valueCtx, query, args...).Scan(&activeTimeSeries)
|
||||
if err != nil {
|
||||
return 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to get active time series count")
|
||||
}
|
||||
@@ -953,8 +965,10 @@ func (m *module) fetchMetricAttributes(ctx context.Context, metricName string, s
|
||||
sb.GroupBy("attr_name")
|
||||
sb.OrderBy("valueCount DESC")
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||
db := m.telemetryStore.ClickhouseDB()
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
rows, err := db.Query(valueCtx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch metric attributes")
|
||||
}
|
||||
|
||||
@@ -672,7 +672,7 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
|
||||
router.HandleFunc("/api/v2/metrics/treemap", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetTreemap)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/attributes", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricAttributes)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricMetadata)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.EditAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metric/highlights", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricHighlights)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/ruler"
|
||||
@@ -97,6 +98,9 @@ type Config struct {
|
||||
|
||||
// Tokenizer config
|
||||
Tokenizer tokenizer.Config `mapstructure:"tokenizer"`
|
||||
|
||||
// MetricsExplorer config
|
||||
MetricsExplorer metricsexplorer.Config `mapstructure:"metricsexplorer"`
|
||||
}
|
||||
|
||||
// DeprecatedFlags are the flags that are deprecated and scheduled for removal.
|
||||
@@ -156,6 +160,7 @@ func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.R
|
||||
statsreporter.NewConfigFactory(),
|
||||
gateway.NewConfigFactory(),
|
||||
tokenizer.NewConfigFactory(),
|
||||
metricsexplorer.NewConfigFactory(),
|
||||
}
|
||||
|
||||
conf, err := config.New(ctx, resolverConfig, configFactories)
|
||||
@@ -336,12 +341,12 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
|
||||
}
|
||||
}
|
||||
|
||||
func (config Config)Collect(_ context.Context, _ valuer.UUID) (map[string]any, error){
|
||||
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {
|
||||
stats := make(map[string]any)
|
||||
|
||||
// SQL Store Config Stats
|
||||
stats["config.sqlstore.provider"] = config.SQLStore.Provider
|
||||
|
||||
|
||||
// Tokenizer Config Stats
|
||||
stats["config.tokenizer.provider"] = config.Tokenizer.Provider
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ func TestNewHandlers(t *testing.T) {
|
||||
tokenizer := tokenizertest.New()
|
||||
emailing := emailingtest.New()
|
||||
require.NoError(t, err)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil)
|
||||
|
||||
|
||||
@@ -79,6 +79,7 @@ func NewModules(
|
||||
authNs map[authtypes.AuthNProvider]authn.AuthN,
|
||||
authz authz.AuthZ,
|
||||
cache cache.Cache,
|
||||
config Config,
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
@@ -101,6 +102,6 @@ func NewModules(
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings, config.MetricsExplorer),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ func TestNewModules(t *testing.T) {
|
||||
tokenizer := tokenizertest.New()
|
||||
emailing := emailingtest.New()
|
||||
require.NoError(t, err)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
|
||||
|
||||
reflectVal := reflect.ValueOf(modules)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -344,7 +344,7 @@ func New(
|
||||
)
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, config)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing)
|
||||
|
||||
@@ -140,11 +140,11 @@ type UpdateMetricMetadataRequest struct {
|
||||
|
||||
// TreemapRequest represents the payload for the metrics treemap endpoint.
|
||||
type TreemapRequest struct {
|
||||
Filter *qbtypes.Filter `json:"filter,omitempty"`
|
||||
Start int64 `json:"start"`
|
||||
End int64 `json:"end"`
|
||||
Limit int `json:"limit"`
|
||||
Treemap TreemapMode `json:"treemap"`
|
||||
Filter *qbtypes.Filter `json:"filter,omitempty"`
|
||||
Start int64 `json:"start"`
|
||||
End int64 `json:"end"`
|
||||
Limit int `json:"limit"`
|
||||
Mode TreemapMode `json:"mode"`
|
||||
}
|
||||
|
||||
// Validate enforces basic constraints on TreemapRequest.
|
||||
@@ -182,11 +182,11 @@ func (req *TreemapRequest) Validate() error {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000")
|
||||
}
|
||||
|
||||
if req.Treemap != TreemapModeSamples && req.Treemap != TreemapModeTimeSeries {
|
||||
if req.Mode != TreemapModeSamples && req.Mode != TreemapModeTimeSeries {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid treemap mode %q: supported values are %q or %q",
|
||||
req.Treemap,
|
||||
req.Mode,
|
||||
TreemapModeSamples,
|
||||
TreemapModeTimeSeries,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user