Merge branch 'main' into issue/9362

This commit is contained in:
Abhi kumar
2025-12-16 16:43:27 +05:30
committed by GitHub
15 changed files with 488 additions and 65 deletions

View File

@@ -393,15 +393,21 @@ function ExplorerOptions({
backwardCompatibleOptions = omit(options, 'version');
}
// Use the correct default columns based on the current data source
const defaultColumns =
sourcepage === DataSource.TRACES
? defaultTraceSelectedColumns
: defaultLogsSelectedColumns;
if (extraData.selectColumns?.length) {
handleOptionsChange({
...backwardCompatibleOptions,
selectColumns: extraData.selectColumns,
});
} else if (!isEqual(defaultTraceSelectedColumns, options.selectColumns)) {
} else if (!isEqual(defaultColumns, options.selectColumns)) {
handleOptionsChange({
...backwardCompatibleOptions,
selectColumns: defaultTraceSelectedColumns,
selectColumns: defaultColumns,
});
}
};

View File

@@ -18,6 +18,16 @@ jest.mock('api/browser/localstorage/get', () => ({
default: jest.fn((key: string) => mockLocalStorage[key] || null),
}));
const mockLogsColumns = [
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
{ name: 'body', signal: 'logs', fieldContext: 'log' },
];
const mockTracesColumns = [
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
{ name: 'name', signal: 'traces', fieldContext: 'span' },
];
describe('logsLoaderConfig', () => {
// Save original location object
const originalWindowLocation = window.location;
@@ -157,4 +167,83 @@ describe('logsLoaderConfig', () => {
} as FormattingOptions,
});
});
describe('Column validation - filtering Traces columns', () => {
it('should filter out Traces columns (name with traces signal) from URL', async () => {
const mixedColumns = [...mockLogsColumns, ...mockTracesColumns];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: mixedColumns,
}),
)}`;
const result = await logsLoaderConfig.url();
// Should only keep logs columns
expect(result.columns).toEqual(mockLogsColumns);
});
it('should filter out Traces columns from localStorage', async () => {
const tracesColumns = [...mockTracesColumns];
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
selectColumns: tracesColumns,
});
const result = await logsLoaderConfig.local();
// Should filter out all Traces columns
expect(result.columns).toEqual([]);
});
it('should accept valid Logs columns from URL', async () => {
const logsColumns = [...mockLogsColumns];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: logsColumns,
}),
)}`;
const result = await logsLoaderConfig.url();
expect(result.columns).toEqual(logsColumns);
});
it('should fall back to defaults when all columns are filtered out from URL', async () => {
const tracesColumns = [...mockTracesColumns];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: tracesColumns,
}),
)}`;
const result = await logsLoaderConfig.url();
// Should return empty array, which triggers fallback to defaults in preferencesLoader
expect(result.columns).toEqual([]);
});
it('should handle columns without signal field (legacy data)', async () => {
const columnsWithoutSignal = [
{ name: 'body', fieldContext: 'log' },
{ name: 'service.name', fieldContext: 'resource' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: columnsWithoutSignal,
}),
)}`;
const result = await logsLoaderConfig.url();
// Without signal field, columns pass through validation
// This matches the current implementation behavior where only columns
// with signal !== 'logs' are filtered out
expect(result.columns).toEqual(columnsWithoutSignal);
});
});
});

View File

@@ -1,3 +1,4 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { LOCALSTORAGE } from 'constants/localStorage';
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
import {
@@ -126,4 +127,112 @@ describe('tracesLoaderConfig', () => {
columns: defaultTraceSelectedColumns as TelemetryFieldKey[],
});
});
describe('Column validation - filtering Logs columns', () => {
it('should filter out Logs columns (body) from URL', async () => {
const logsColumns = [
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
{ name: 'body', signal: 'logs', fieldContext: 'log' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: logsColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Should filter out all Logs columns
expect(result.columns).toEqual([]);
});
it('should filter out Logs columns (timestamp with logs signal) from URL', async () => {
const mixedColumns = [
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: mixedColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Should only keep trace columns
expect(result.columns).toEqual([
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
]);
});
it('should filter out Logs columns from localStorage', async () => {
const logsColumns = [
{ name: 'body', signal: 'logs', fieldContext: 'log' },
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
];
mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = JSON.stringify({
selectColumns: logsColumns,
});
const result = await tracesLoaderConfig.local();
// Should filter out all Logs columns
expect(result.columns).toEqual([]);
});
it('should accept valid Trace columns from URL', async () => {
const traceColumns = [
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
{ name: 'name', signal: 'traces', fieldContext: 'span' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: traceColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
expect(result.columns).toEqual(traceColumns);
});
it('should fall back to defaults when all columns are filtered out from URL', async () => {
const logsColumns = [{ name: 'body', signal: 'logs' }];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: logsColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Should return empty array, which triggers fallback to defaults in preferencesLoader
expect(result.columns).toEqual([]);
});
it('should handle columns without signal field (legacy data)', async () => {
const columnsWithoutSignal = [
{ name: 'service.name', fieldContext: 'resource' },
{ name: 'body', fieldContext: 'log' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: columnsWithoutSignal,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Without signal field, columns pass through validation
// This matches the current implementation behavior where only columns
// with signal !== 'traces' are filtered out
expect(result.columns).toEqual(columnsWithoutSignal);
});
});
});

View File

@@ -8,6 +8,18 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
import { FormattingOptions } from '../types';
/**
* Validates if a column is valid for Logs Explorer
* Filters out Traces-specific columns that would cause query failures
*/
const isValidLogColumn = (col: {
name?: string;
signal?: string;
[key: string]: unknown;
}): boolean =>
// If column has signal field, it must be 'logs'
!(col?.signal && col.signal !== 'logs');
// --- LOGS preferences loader config ---
const logsLoaders = {
local: (): {
@@ -18,8 +30,14 @@ const logsLoaders = {
if (local) {
try {
const parsed = JSON.parse(local);
const localColumns = parsed.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
const validLogColumns = localColumns.filter(isValidLogColumn);
return {
columns: parsed.selectColumns || [],
columns: validLogColumns.length > 0 ? validLogColumns : [],
formatting: {
maxLines: parsed.maxLines ?? 2,
format: parsed.format ?? 'table',
@@ -38,8 +56,14 @@ const logsLoaders = {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
const urlColumns = options.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
const validLogColumns = urlColumns.filter(isValidLogColumn);
return {
columns: options.selectColumns || [],
columns: validLogColumns.length > 0 ? validLogColumns : [],
formatting: {
maxLines: options.maxLines ?? 2,
format: options.format ?? 'table',

View File

@@ -5,6 +5,18 @@ import { LOCALSTORAGE } from 'constants/localStorage';
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
/**
* Validates if a column is valid for Traces Explorer
* Filters out Logs-specific columns that would cause query failures
*/
const isValidTraceColumn = (col: {
name?: string;
signal?: string;
[key: string]: unknown;
}): boolean =>
// If column has signal field, it must be 'traces'
!(col?.signal && col.signal !== 'traces');
// --- TRACES preferences loader config ---
const tracesLoaders = {
local: (): {
@@ -14,8 +26,13 @@ const tracesLoaders = {
if (local) {
try {
const parsed = JSON.parse(local);
const localColumns = parsed.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
const validTraceColumns = localColumns.filter(isValidTraceColumn);
return {
columns: parsed.selectColumns || [],
columns: validTraceColumns.length > 0 ? validTraceColumns : [],
};
} catch {}
}
@@ -27,8 +44,15 @@ const tracesLoaders = {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
const urlColumns = options.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns)
// Only accept columns that are valid for Traces (signal='traces' or columns without signal that aren't logs-specific)
const validTraceColumns = urlColumns.filter(isValidTraceColumn);
// Only return columns if we have valid trace columns, otherwise return empty to fall back to defaults
return {
columns: options.selectColumns || [],
columns: validTraceColumns.length > 0 ? validTraceColumns : [],
};
} catch {}
return { columns: [] };

View File

@@ -3,7 +3,7 @@ package impldashboard
import (
"context"
"maps"
"strings"
"slices"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
@@ -11,30 +11,34 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type module struct {
store dashboardtypes.Store
settings factory.ScopedProviderSettings
analytics analytics.Analytics
orgGetter organization.Getter
role role.Module
store dashboardtypes.Store
settings factory.ScopedProviderSettings
analytics analytics.Analytics
orgGetter organization.Getter
role role.Module
queryParser queryparser.QueryParser
}
func NewModule(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module) dashboard.Module {
func NewModule(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module, queryParser queryparser.QueryParser) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/modules/impldashboard")
return &module{
store: NewStore(sqlstore),
settings: scopedProviderSettings,
analytics: analytics,
orgGetter: orgGetter,
role: role,
store: NewStore(sqlstore),
settings: scopedProviderSettings,
analytics: analytics,
orgGetter: orgGetter,
role: role,
queryParser: queryParser,
}
}
@@ -269,13 +273,10 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return nil, err
}
// Initialize result map for each metric
result := make(map[string][]map[string]string)
// Process the JSON data in Go
for _, dashboard := range dashboards {
var dashData = dashboard.Data
dashData := dashboard.Data
dashTitle, _ := dashData["title"].(string)
widgets, ok := dashData["widgets"].([]interface{})
if !ok {
@@ -296,44 +297,22 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
continue
}
builder, ok := query["builder"].(map[string]interface{})
if !ok {
continue
}
// Track which metrics were found in this widget
foundMetrics := make(map[string]bool)
queryData, ok := builder["queryData"].([]interface{})
if !ok {
continue
}
// Check all three query types
module.checkBuilderQueriesForMetricNames(query, metricNames, foundMetrics)
module.checkClickHouseQueriesForMetricNames(ctx, query, metricNames, foundMetrics)
module.checkPromQLQueriesForMetricNames(ctx, query, metricNames, foundMetrics)
for _, qd := range queryData {
data, ok := qd.(map[string]interface{})
if !ok {
continue
}
if dataSource, ok := data["dataSource"].(string); !ok || dataSource != "metrics" {
continue
}
aggregateAttr, ok := data["aggregateAttribute"].(map[string]interface{})
if !ok {
continue
}
if key, ok := aggregateAttr["key"].(string); ok {
// Check if this metric is in our list of interest
for _, metricName := range metricNames {
if strings.TrimSpace(key) == metricName {
result[metricName] = append(result[metricName], map[string]string{
"dashboard_id": dashboard.ID,
"widget_name": widgetTitle,
"widget_id": widgetID,
"dashboard_name": dashTitle,
})
}
}
}
// Add widget to results for all found metrics
for metricName := range foundMetrics {
result[metricName] = append(result[metricName], map[string]string{
"dashboard_id": dashboard.ID,
"widget_name": widgetTitle,
"widget_id": widgetID,
"dashboard_name": dashTitle,
})
}
}
}
@@ -361,3 +340,120 @@ func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
// checkBuilderQueriesForMetricNames checks builder.queryData[] for aggregations[].metricName
func (module *module) checkBuilderQueriesForMetricNames(query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
builder, ok := query["builder"].(map[string]interface{})
if !ok {
return
}
queryData, ok := builder["queryData"].([]interface{})
if !ok {
return
}
for _, qd := range queryData {
data, ok := qd.(map[string]interface{})
if !ok {
continue
}
// Check dataSource is metrics
if dataSource, ok := data["dataSource"].(string); !ok || dataSource != "metrics" {
continue
}
// Check aggregations[].metricName
aggregations, ok := data["aggregations"].([]interface{})
if !ok {
continue
}
for _, agg := range aggregations {
aggMap, ok := agg.(map[string]interface{})
if !ok {
continue
}
metricName, ok := aggMap["metricName"].(string)
if !ok || metricName == "" {
continue
}
if slices.Contains(metricNames, metricName) {
foundMetrics[metricName] = true
}
}
}
}
// checkClickHouseQueriesForMetricNames checks clickhouse_sql[] array for metric names in query strings
func (module *module) checkClickHouseQueriesForMetricNames(ctx context.Context, query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
clickhouseSQL, ok := query["clickhouse_sql"].([]interface{})
if !ok {
return
}
for _, chQuery := range clickhouseSQL {
chQueryMap, ok := chQuery.(map[string]interface{})
if !ok {
continue
}
queryStr, ok := chQueryMap["query"].(string)
if !ok || queryStr == "" {
continue
}
// Parse query to extract metric names
result, err := module.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypeClickHouseSQL, queryStr)
if err != nil {
// Log warning and continue - parsing errors shouldn't break the search
module.settings.Logger().WarnContext(ctx, "failed to parse ClickHouse query", "query", queryStr, "error", err)
continue
}
// Check if any of the search metric names are in the extracted metric names
for _, metricName := range metricNames {
if slices.Contains(result.MetricNames, metricName) {
foundMetrics[metricName] = true
}
}
}
}
// checkPromQLQueriesForMetricNames checks promql[] array for metric names in query strings
func (module *module) checkPromQLQueriesForMetricNames(ctx context.Context, query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
promQL, ok := query["promql"].([]interface{})
if !ok {
return
}
for _, promQuery := range promQL {
promQueryMap, ok := promQuery.(map[string]interface{})
if !ok {
continue
}
queryStr, ok := promQueryMap["query"].(string)
if !ok || queryStr == "" {
continue
}
// Parse query to extract metric names
result, err := module.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypePromQL, queryStr)
if err != nil {
// Log warning and continue - parsing errors shouldn't break the search
module.settings.Logger().WarnContext(ctx, "failed to parse PromQL query", "query", queryStr, "error", err)
continue
}
// Check if any of the search metric names are in the extracted metric names
for _, metricName := range metricNames {
if slices.Contains(result.MetricNames, metricName) {
foundMetrics[metricName] = true
}
}
}
}

View File

@@ -137,6 +137,28 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
render.Success(rw, http.StatusOK, metadata)
}
func (h *handler) GetMetricDashboards(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
if metricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricDashboards(req.Context(), orgID, metricName)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}
func (h *handler) GetMetricHighlights(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
@@ -165,7 +187,6 @@ func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request)
render.Error(rw, err)
return
}
var in metricsexplorertypes.MetricAttributesRequest
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)

View File

@@ -11,6 +11,7 @@ import (
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
@@ -32,11 +33,12 @@ type module struct {
condBuilder qbtypes.ConditionBuilder
logger *slog.Logger
cache cache.Cache
dashboardModule dashboard.Module
config metricsexplorer.Config
}
// NewModule constructs the metrics module with the provided dependencies.
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, dashboardModule dashboard.Module, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
fieldMapper := telemetrymetrics.NewFieldMapper()
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
return &module{
@@ -46,6 +48,7 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
logger: providerSettings.Logger,
telemetryMetadataStore: telemetryMetadataStore,
cache: cache,
dashboardModule: dashboardModule,
config: cfg,
}
}
@@ -194,6 +197,34 @@ func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, re
return nil
}
func (m *module) GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error) {
if metricName == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName is required")
}
data, err := m.dashboardModule.GetByMetricNames(ctx, orgID, []string{metricName})
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get dashboards for metric")
}
dashboards := make([]metricsexplorertypes.MetricDashboard, 0)
if dashboardList, ok := data[metricName]; ok {
dashboards = make([]metricsexplorertypes.MetricDashboard, 0, len(dashboardList))
for _, item := range dashboardList {
dashboards = append(dashboards, metricsexplorertypes.MetricDashboard{
DashboardName: item["dashboard_name"],
DashboardID: item["dashboard_id"],
WidgetID: item["widget_id"],
WidgetName: item["widget_name"],
})
}
}
return &metricsexplorertypes.MetricDashboardsResponse{
Dashboards: dashboards,
}, nil
}
// GetMetricHighlights returns highlights for a metric including data points, last received, total time series, and active time series.
func (m *module) GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error) {
if metricName == "" {

View File

@@ -15,6 +15,7 @@ type Handler interface {
GetMetricMetadata(http.ResponseWriter, *http.Request)
GetMetricAttributes(http.ResponseWriter, *http.Request)
UpdateMetricMetadata(http.ResponseWriter, *http.Request)
GetMetricDashboards(http.ResponseWriter, *http.Request)
GetMetricHighlights(http.ResponseWriter, *http.Request)
}
@@ -24,6 +25,7 @@ type Module interface {
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error)
GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error)
GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error)
}

View File

@@ -630,6 +630,7 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricMetadata)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.EditAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metric/highlights", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricHighlights)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metric/dashboards", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricDashboards)).Methods(http.MethodGet)
}
func Intersection(a, b []int) (c []int) {

View File

@@ -12,6 +12,7 @@ import (
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
"github.com/SigNoz/signoz/pkg/sqlstore"
@@ -35,8 +36,9 @@ func TestNewHandlers(t *testing.T) {
require.NoError(t, err)
tokenizer := tokenizertest.New()
emailing := emailingtest.New()
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
handlers := NewHandlers(modules, providerSettings, nil, nil)

View File

@@ -38,6 +38,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/tokenizer"
@@ -79,12 +80,14 @@ func NewModules(
authNs map[authtypes.AuthNProvider]authn.AuthN,
authz authz.AuthZ,
cache cache.Cache,
queryParser queryparser.QueryParser,
config Config,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, analytics)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
dashboard := impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil), queryParser)
return Modules{
OrgGetter: orgGetter,
@@ -92,7 +95,7 @@ func NewModules(
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
SavedView: implsavedview.NewModule(sqlstore),
Apdex: implapdex.NewModule(sqlstore),
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil)),
Dashboard: dashboard,
User: user,
UserGetter: userGetter,
QuickFilter: quickfilter,
@@ -102,6 +105,6 @@ func NewModules(
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings, config.MetricsExplorer),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, dashboard, providerSettings, config.MetricsExplorer),
}
}

View File

@@ -12,6 +12,7 @@ import (
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
"github.com/SigNoz/signoz/pkg/sqlstore"
@@ -35,8 +36,9 @@ func TestNewModules(t *testing.T) {
require.NoError(t, err)
tokenizer := tokenizertest.New()
emailing := emailingtest.New()
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -346,7 +346,7 @@ func New(
)
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, config)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing)

View File

@@ -221,6 +221,19 @@ type TreemapResponse struct {
Samples []TreemapEntry `json:"samples"`
}
// MetricDashboard represents a dashboard/widget referencing a metric.
type MetricDashboard struct {
DashboardName string `json:"dashboardName"`
DashboardID string `json:"dashboardId"`
WidgetID string `json:"widgetId"`
WidgetName string `json:"widgetName"`
}
// MetricDashboardsResponse represents the response for metric dashboards endpoint.
type MetricDashboardsResponse struct {
Dashboards []MetricDashboard `json:"dashboards"`
}
// MetricHighlightsResponse is the output structure for the metric highlights endpoint.
type MetricHighlightsResponse struct {
DataPoints uint64 `json:"dataPoints"`