mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-28 04:22:12 +00:00
Compare commits
10 Commits
fix/valida
...
SIG-3496
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d228d8c1e0 | ||
|
|
10ba210d2a | ||
|
|
dba038c6e0 | ||
|
|
bca761498a | ||
|
|
0e6bd90fdf | ||
|
|
f3256aeac4 | ||
|
|
c9f1526e33 | ||
|
|
dba536578b | ||
|
|
15ceb228fa | ||
|
|
6b3c6fc722 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -49,6 +49,7 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
bin/
|
||||
|
||||
6
Makefile
6
Makefile
@@ -72,6 +72,12 @@ devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhou
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
.PHONY: devenv-clickhouse-clean
|
||||
devenv-clickhouse-clean: ## Clean all ClickHouse data from filesystem
|
||||
@echo "Removing ClickHouse data..."
|
||||
@rm -rf .devenv/docker/clickhouse/fs/tmp/*
|
||||
@echo "ClickHouse data cleaned!"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
|
||||
@@ -849,6 +849,75 @@ paths:
|
||||
summary: Deprecated create session by email password
|
||||
tags:
|
||||
- sessions
|
||||
/api/v1/logs/promote_paths:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesPromotePath'
|
||||
nullable: true
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesPromotePath'
|
||||
nullable: true
|
||||
type: array
|
||||
responses:
|
||||
"201":
|
||||
description: Created
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
/api/v1/org/preferences:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -2137,6 +2206,26 @@ components:
|
||||
type: object
|
||||
PreferencetypesValue:
|
||||
type: object
|
||||
PromotetypesPromotePath:
|
||||
properties:
|
||||
indexes:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesWrappedIndex'
|
||||
type: array
|
||||
path:
|
||||
type: string
|
||||
promote:
|
||||
type: boolean
|
||||
type: object
|
||||
PromotetypesWrappedIndex:
|
||||
properties:
|
||||
column_type:
|
||||
type: string
|
||||
granularity:
|
||||
type: integer
|
||||
type:
|
||||
type: string
|
||||
type: object
|
||||
RenderErrorResponse:
|
||||
properties:
|
||||
error:
|
||||
|
||||
@@ -376,7 +376,6 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
||||
RuleStore: ruleStore,
|
||||
MaintenanceStore: maintenanceStore,
|
||||
SqlStore: sqlstore,
|
||||
QueryParser: queryParser,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -207,42 +207,6 @@ func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
}
|
||||
|
||||
// filterNewSeries filters out new series based on the first_seen timestamp.
|
||||
func (r *AnomalyRule) filterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
|
||||
// Convert []*v3.Series to []v3.Series for filtering
|
||||
v3Series := make([]v3.Series, 0, len(series))
|
||||
for _, s := range series {
|
||||
v3Series = append(v3Series, *s)
|
||||
}
|
||||
|
||||
// Get indexes to skip
|
||||
skipIndexes, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, v3Series)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
|
||||
// if no series are skipped, return the original series
|
||||
if len(skipIndexes) == 0 {
|
||||
return series, nil
|
||||
}
|
||||
|
||||
// Create a map of skip indexes for efficient lookup
|
||||
skippedIdxMap := make(map[int]struct{}, len(skipIndexes))
|
||||
for _, idx := range skipIndexes {
|
||||
skippedIdxMap[idx] = struct{}{}
|
||||
}
|
||||
|
||||
// Filter out skipped series
|
||||
oldSeries := make([]*v3.Series, 0, len(series)-len(skipIndexes))
|
||||
for i, s := range series {
|
||||
if _, shouldSkip := skippedIdxMap[i]; !shouldSkip {
|
||||
oldSeries = append(oldSeries, s)
|
||||
}
|
||||
}
|
||||
return oldSeries, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ctx, ts)
|
||||
@@ -275,18 +239,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.AnomalyScores
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.filterNewSeries(ctx, ts, seriesToProcess)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if r.Condition() != nil && r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.Condition().RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
@@ -338,18 +291,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.AnomalyScores
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.filterNewSeries(ctx, ts, seriesToProcess)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.Condition().RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
|
||||
@@ -37,7 +37,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.SLogger,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -60,7 +59,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -84,7 +82,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
if err != nil {
|
||||
return task, err
|
||||
@@ -143,7 +140,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -164,7 +160,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -184,7 +179,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
|
||||
@@ -6,6 +6,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import AppLoading from 'components/AppLoading/AppLoading';
|
||||
import { CmdKPalette } from 'components/cmdKPalette/cmdKPalette';
|
||||
import NotFound from 'components/NotFound';
|
||||
import { ShiftHoldOverlayController } from 'components/ShiftOverlay/ShiftHoldOverlayController';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
@@ -368,6 +369,9 @@ function App(): JSX.Element {
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
{isLoggedInState && <CmdKPalette userRole={user.role} />}
|
||||
{isLoggedInState && (
|
||||
<ShiftHoldOverlayController userRole={user.role} />
|
||||
)}
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricAlerts.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricAlerts.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricAlertsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricAlerts = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricAlertsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/alerts', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
37
frontend/src/api/metricsExplorer/v2/getMetricAttributes.ts
Normal file
37
frontend/src/api/metricsExplorer/v2/getMetricAttributes.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAttributesRequest,
|
||||
GetMetricAttributesResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricAttributes = async (
|
||||
{ metricName, start, end }: GetMetricAttributesRequest,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricAttributesResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.post(
|
||||
'/metrics/attributes',
|
||||
{
|
||||
metricName: encodedMetricName,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
signal,
|
||||
headers,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricDashboards.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricDashboards.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricDashboardsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricDashboards = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricDashboardsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/dashboards', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricHighlights.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricHighlights.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricHighlightsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricHighlights = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricHighlightsResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metric/highlights', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
29
frontend/src/api/metricsExplorer/v2/getMetricMetadata.ts
Normal file
29
frontend/src/api/metricsExplorer/v2/getMetricMetadata.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricMetadataResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export const getMetricMetadata = async (
|
||||
metricName: string,
|
||||
signal?: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<GetMetricMetadataResponse>> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get('/metrics/metadata', {
|
||||
params: {
|
||||
metricName: encodedMetricName,
|
||||
},
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
28
frontend/src/api/metricsExplorer/v2/updateMetricMetadata.ts
Normal file
28
frontend/src/api/metricsExplorer/v2/updateMetricMetadata.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
UpdateMetricMetadataRequest,
|
||||
UpdateMetricMetadataResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
const updateMetricMetadata = async (
|
||||
metricName: string,
|
||||
props: UpdateMetricMetadataRequest,
|
||||
): Promise<SuccessResponseV2<UpdateMetricMetadataResponse>> => {
|
||||
try {
|
||||
const response = await axios.post(`/metrics/${metricName}/metadata`, {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateMetricMetadata;
|
||||
@@ -1,11 +1,15 @@
|
||||
.log-field-key {
|
||||
padding-right: 5px;
|
||||
.log-field-container {
|
||||
display: flex;
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
align-items: baseline;
|
||||
}
|
||||
.log-field-key,
|
||||
.log-field-key-colon {
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
|
||||
&.small {
|
||||
font-size: 11px;
|
||||
@@ -22,6 +26,20 @@
|
||||
line-height: 24px;
|
||||
}
|
||||
}
|
||||
.log-field-key {
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
white-space: nowrap;
|
||||
display: inline-block;
|
||||
max-width: 20vw;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
margin: 0;
|
||||
}
|
||||
.log-field-key-colon {
|
||||
min-width: 0.8rem;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.log-value {
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
font-size: 14px;
|
||||
@@ -158,7 +176,8 @@
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.log-field-key {
|
||||
.log-field-key,
|
||||
.log-field-key-colon {
|
||||
color: var(--text-slate-400);
|
||||
}
|
||||
.log-value {
|
||||
@@ -170,3 +189,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dark {
|
||||
.log-field-key,
|
||||
.log-field-key-colon {
|
||||
color: rgba(255, 255, 255, 0.45);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,13 +25,7 @@ import LogLinesActionButtons from '../LogLinesActionButtons/LogLinesActionButton
|
||||
import LogStateIndicator from '../LogStateIndicator/LogStateIndicator';
|
||||
import { getLogIndicatorType } from '../LogStateIndicator/utils';
|
||||
// styles
|
||||
import {
|
||||
Container,
|
||||
LogContainer,
|
||||
LogText,
|
||||
Text,
|
||||
TextContainer,
|
||||
} from './styles';
|
||||
import { Container, LogContainer, LogText } from './styles';
|
||||
import { isValidLogField } from './util';
|
||||
|
||||
interface LogFieldProps {
|
||||
@@ -58,16 +52,18 @@ function LogGeneralField({
|
||||
);
|
||||
|
||||
return (
|
||||
<TextContainer>
|
||||
<Text ellipsis type="secondary" className={cx('log-field-key', fontSize)}>
|
||||
{`${fieldKey} : `}
|
||||
</Text>
|
||||
<div className="log-field-container">
|
||||
<p className={cx('log-field-key', fontSize)} title={fieldKey}>
|
||||
{fieldKey}
|
||||
</p>
|
||||
<span className={cx('log-field-key-colon', fontSize)}> : </span>
|
||||
<LogText
|
||||
dangerouslySetInnerHTML={html}
|
||||
className={cx('log-value', fontSize)}
|
||||
title={fieldValue}
|
||||
linesPerRow={linesPerRow > 1 ? linesPerRow : undefined}
|
||||
/>
|
||||
</TextContainer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { Card, Typography } from 'antd';
|
||||
import { Card } from 'antd';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import styled from 'styled-components';
|
||||
import { getActiveLogBackground } from 'utils/logs';
|
||||
@@ -46,19 +46,6 @@ export const Container = styled(Card)<{
|
||||
getActiveLogBackground($isActiveLog, $isDarkMode, $logType)}
|
||||
`;
|
||||
|
||||
export const Text = styled(Typography.Text)`
|
||||
&&& {
|
||||
min-width: 2.5rem;
|
||||
white-space: nowrap;
|
||||
}
|
||||
`;
|
||||
|
||||
export const TextContainer = styled.div`
|
||||
display: flex;
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
`;
|
||||
|
||||
export const LogContainer = styled.div<LogContainerProps>`
|
||||
margin-left: 0.5rem;
|
||||
display: flex;
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
import { createShortcutActions } from '../../constants/shortcutActions';
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
import { ShiftOverlay } from './ShiftOverlay';
|
||||
import { useShiftHoldOverlay } from './useShiftHoldOverlay';
|
||||
|
||||
type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
export function ShiftHoldOverlayController({
|
||||
userRole,
|
||||
}: {
|
||||
userRole: UserRole;
|
||||
}): JSX.Element | null {
|
||||
const { open: isCmdKOpen } = useCmdK();
|
||||
const noop = (): void => undefined;
|
||||
|
||||
const actions = createShortcutActions({
|
||||
navigate: noop,
|
||||
handleThemeChange: noop,
|
||||
});
|
||||
|
||||
const visible = useShiftHoldOverlay({
|
||||
isModalOpen: isCmdKOpen,
|
||||
});
|
||||
|
||||
return (
|
||||
<ShiftOverlay visible={visible} actions={actions} userRole={userRole} />
|
||||
);
|
||||
}
|
||||
77
frontend/src/components/ShiftOverlay/ShiftOverlay.tsx
Normal file
77
frontend/src/components/ShiftOverlay/ShiftOverlay.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import './shiftOverlay.scss';
|
||||
|
||||
import { useMemo } from 'react';
|
||||
import ReactDOM from 'react-dom';
|
||||
|
||||
import { formatShortcut } from './formatShortcut';
|
||||
|
||||
export type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
export type CmdAction = {
|
||||
id: string;
|
||||
name: string;
|
||||
shortcut?: string[];
|
||||
keywords?: string;
|
||||
section?: string;
|
||||
roles?: UserRole[];
|
||||
perform: () => void;
|
||||
};
|
||||
|
||||
interface ShortcutProps {
|
||||
label: string;
|
||||
keyHint: React.ReactNode;
|
||||
}
|
||||
|
||||
function Shortcut({ label, keyHint }: ShortcutProps): JSX.Element {
|
||||
return (
|
||||
<div className="shift-overlay__item">
|
||||
<span className="shift-overlay__label">{label}</span>
|
||||
<kbd className="shift-overlay__kbd">{keyHint}</kbd>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface ShiftOverlayProps {
|
||||
visible: boolean;
|
||||
actions: CmdAction[];
|
||||
userRole: UserRole;
|
||||
}
|
||||
|
||||
export function ShiftOverlay({
|
||||
visible,
|
||||
actions,
|
||||
userRole,
|
||||
}: ShiftOverlayProps): JSX.Element | null {
|
||||
const navigationActions = useMemo(() => {
|
||||
// RBAC filter: show action if no roles set OR current user role is included
|
||||
const permitted = actions.filter(
|
||||
(a) => !a.roles || a.roles.includes(userRole),
|
||||
);
|
||||
|
||||
// Navigation only + must have shortcut
|
||||
return permitted.filter(
|
||||
(a) =>
|
||||
a.section?.toLowerCase() === 'navigation' &&
|
||||
a.shortcut &&
|
||||
a.shortcut.length > 0,
|
||||
);
|
||||
}, [actions, userRole]);
|
||||
|
||||
if (!visible || navigationActions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return ReactDOM.createPortal(
|
||||
<div className="shift-overlay">
|
||||
<div className="shift-overlay__panel">
|
||||
{navigationActions.map((action) => (
|
||||
<Shortcut
|
||||
key={action.id}
|
||||
label={action.name.replace(/^Go to\s+/i, '')}
|
||||
keyHint={formatShortcut(action.shortcut)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>,
|
||||
document.body,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import type { CmdAction } from '../ShiftOverlay';
|
||||
import { ShiftOverlay } from '../ShiftOverlay';
|
||||
|
||||
jest.mock('../formatShortcut', () => ({
|
||||
formatShortcut: (shortcut: string[]): string => shortcut.join('+'),
|
||||
}));
|
||||
|
||||
const baseActions: CmdAction[] = [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Go to Traces',
|
||||
section: 'navigation',
|
||||
shortcut: ['Shift', 'T'],
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Go to Metrics',
|
||||
section: 'navigation',
|
||||
shortcut: ['Shift', 'M'],
|
||||
roles: ['ADMIN'], // ✅ now UserRole[]
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Create Alert',
|
||||
section: 'actions',
|
||||
shortcut: ['A'],
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
name: 'Go to Logs',
|
||||
section: 'navigation',
|
||||
perform: jest.fn(),
|
||||
},
|
||||
];
|
||||
|
||||
describe('ShiftOverlay', () => {
|
||||
it('renders nothing when not visible', () => {
|
||||
const { container } = render(
|
||||
<ShiftOverlay visible={false} actions={baseActions} userRole="ADMIN" />,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
it('renders nothing when no navigation shortcuts exist', () => {
|
||||
const { container } = render(
|
||||
<ShiftOverlay
|
||||
visible
|
||||
actions={[
|
||||
{
|
||||
id: 'x',
|
||||
name: 'Create Alert',
|
||||
section: 'actions',
|
||||
perform: jest.fn(),
|
||||
},
|
||||
]}
|
||||
userRole="ADMIN"
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
it('renders navigation shortcuts in a portal', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(document.body.querySelector('.shift-overlay')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.getByText('Metrics')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Shift+T')).toBeInTheDocument();
|
||||
expect(screen.getByText('Shift+M')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies RBAC filtering correctly', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="VIEWER" />);
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Metrics')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('strips "Go to" prefix from labels', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Go to Traces')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render actions without shortcuts', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(screen.queryByText('Logs')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,144 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
|
||||
import { useShiftHoldOverlay } from '../useShiftHoldOverlay';
|
||||
|
||||
jest.useFakeTimers();
|
||||
|
||||
function pressShift(target: EventTarget = window): void {
|
||||
const event = new KeyboardEvent('keydown', {
|
||||
key: 'Shift',
|
||||
bubbles: true,
|
||||
});
|
||||
Object.defineProperty(event, 'target', { value: target });
|
||||
window.dispatchEvent(event);
|
||||
}
|
||||
|
||||
function releaseShift(): void {
|
||||
window.dispatchEvent(
|
||||
new KeyboardEvent('keyup', {
|
||||
key: 'Shift',
|
||||
bubbles: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
describe('useShiftHoldOverlay', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers();
|
||||
});
|
||||
|
||||
it('shows overlay after holding Shift for 600ms', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('does not show overlay if Shift is released early', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(300);
|
||||
releaseShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('hides overlay on Shift key release', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
releaseShift();
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does not activate when modal is open', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useShiftHoldOverlay({ isModalOpen: true }),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does not activate in typing context (input)', () => {
|
||||
const input = document.createElement('input');
|
||||
document.body.appendChild(input);
|
||||
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift(input);
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
|
||||
document.body.removeChild(input);
|
||||
});
|
||||
|
||||
it('cleans up on window blur', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
window.dispatchEvent(new Event('blur'));
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('cleans up on document visibility change', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
document.dispatchEvent(new Event('visibilitychange'));
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does nothing when disabled', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({ disabled: true }));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
});
|
||||
44
frontend/src/components/ShiftOverlay/formatShortcut.tsx
Normal file
44
frontend/src/components/ShiftOverlay/formatShortcut.tsx
Normal file
@@ -0,0 +1,44 @@
|
||||
import './shiftOverlay.scss';
|
||||
|
||||
import { ArrowUp, ChevronUp, Command, Option } from 'lucide-react';
|
||||
import { ReactNode } from 'react';
|
||||
|
||||
export function formatShortcut(shortcut?: string[]): ReactNode {
|
||||
if (!shortcut || shortcut.length === 0) return null;
|
||||
|
||||
const combo = shortcut.find((s) => typeof s === 'string' && s.trim());
|
||||
if (!combo) return null;
|
||||
|
||||
return combo.split('+').map((key) => {
|
||||
const k = key.trim().toLowerCase();
|
||||
|
||||
let node: ReactNode;
|
||||
switch (k) {
|
||||
case 'shift':
|
||||
node = <ArrowUp size={14} />;
|
||||
break;
|
||||
case 'cmd':
|
||||
case 'meta':
|
||||
node = <Command size={14} />;
|
||||
break;
|
||||
case 'alt':
|
||||
node = <Option size={14} />;
|
||||
break;
|
||||
case 'ctrl':
|
||||
case 'control':
|
||||
node = <ChevronUp size={14} />;
|
||||
break;
|
||||
case 'arrowup':
|
||||
node = <ArrowUp size={14} />;
|
||||
break;
|
||||
default:
|
||||
node = k.toUpperCase();
|
||||
}
|
||||
|
||||
return (
|
||||
<span key={`shortcut-${k}`} className="shift-overlay__key">
|
||||
{node}
|
||||
</span>
|
||||
);
|
||||
});
|
||||
}
|
||||
75
frontend/src/components/ShiftOverlay/shiftOverlay.scss
Normal file
75
frontend/src/components/ShiftOverlay/shiftOverlay.scss
Normal file
@@ -0,0 +1,75 @@
|
||||
.shift-overlay {
|
||||
position: fixed;
|
||||
bottom: 20px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
z-index: 9999;
|
||||
pointer-events: none;
|
||||
|
||||
&__panel {
|
||||
display: flex;
|
||||
gap: 20px;
|
||||
padding: 8px 12px;
|
||||
|
||||
background: var(--bg-ink-500);
|
||||
color: var(--bg-vanilla-300);
|
||||
|
||||
border-radius: 8px;
|
||||
font-size: 13px;
|
||||
line-height: 1.2;
|
||||
|
||||
box-shadow: 0 6px 20px var(--bg-ink-500);
|
||||
animation: shift-overlay-fade-in 120ms ease-out;
|
||||
}
|
||||
|
||||
&__item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
&__label {
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
&__kbd {
|
||||
font-family: monospace;
|
||||
font-size: 12px;
|
||||
padding: 2px 6px;
|
||||
display: flex;
|
||||
|
||||
border-radius: 4px;
|
||||
background: var(--bg-slate-100);
|
||||
}
|
||||
|
||||
&__key {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
min-width: 15px;
|
||||
height: 20px;
|
||||
|
||||
border-radius: 4px;
|
||||
|
||||
background-color: var(--bg-slate-100);
|
||||
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
line-height: 1;
|
||||
color: var(--bg-vanilla-300);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes shift-overlay-fade-in {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(-4px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
87
frontend/src/components/ShiftOverlay/useShiftHoldOverlay.ts
Normal file
87
frontend/src/components/ShiftOverlay/useShiftHoldOverlay.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
|
||||
const HOLD_DELAY_MS = 500;
|
||||
|
||||
function isTypingContext(target: EventTarget | null): boolean {
|
||||
if (!(target instanceof HTMLElement)) return false;
|
||||
|
||||
const tag = target.tagName;
|
||||
return tag === 'INPUT' || tag === 'TEXTAREA' || target.isContentEditable;
|
||||
}
|
||||
|
||||
interface UseShiftHoldOverlayOptions {
|
||||
disabled?: boolean;
|
||||
isModalOpen?: boolean;
|
||||
}
|
||||
|
||||
export function useShiftHoldOverlay({
|
||||
disabled = false,
|
||||
isModalOpen = false,
|
||||
}: UseShiftHoldOverlayOptions): boolean {
|
||||
const [visible, setVisible] = useState<boolean>(false);
|
||||
|
||||
const timerRef = useRef<number | null>(null);
|
||||
const isHoldingRef = useRef<boolean>(false);
|
||||
|
||||
useEffect((): (() => void) | void => {
|
||||
if (disabled) return;
|
||||
|
||||
function cleanup(): void {
|
||||
isHoldingRef.current = false;
|
||||
|
||||
if (timerRef.current !== null) {
|
||||
window.clearTimeout(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
setVisible(false);
|
||||
}
|
||||
|
||||
function onKeyDown(e: KeyboardEvent): void {
|
||||
if (e.key !== 'Shift') return;
|
||||
if (e.repeat) return;
|
||||
|
||||
// Suppress in bad contexts
|
||||
if (
|
||||
isModalOpen ||
|
||||
e.metaKey ||
|
||||
e.ctrlKey ||
|
||||
e.altKey ||
|
||||
isTypingContext(e.target)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
isHoldingRef.current = true;
|
||||
|
||||
timerRef.current = window.setTimeout(() => {
|
||||
if (isHoldingRef.current) {
|
||||
setVisible(true);
|
||||
}
|
||||
}, HOLD_DELAY_MS);
|
||||
}
|
||||
|
||||
function onKeyUp(e: KeyboardEvent): void {
|
||||
if (e.key !== 'Shift') return;
|
||||
cleanup();
|
||||
}
|
||||
|
||||
function onBlur(): void {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
window.addEventListener('keydown', onKeyDown);
|
||||
window.addEventListener('keyup', onKeyUp);
|
||||
window.addEventListener('blur', onBlur);
|
||||
document.addEventListener('visibilitychange', cleanup);
|
||||
|
||||
return (): void => {
|
||||
window.removeEventListener('keydown', onKeyDown);
|
||||
window.removeEventListener('keyup', onKeyUp);
|
||||
window.removeEventListener('blur', onBlur);
|
||||
document.removeEventListener('visibilitychange', cleanup);
|
||||
};
|
||||
}, [disabled, isModalOpen]);
|
||||
|
||||
return visible;
|
||||
}
|
||||
@@ -159,7 +159,6 @@ describe('CmdKPalette', () => {
|
||||
|
||||
expect(screen.getByText(HOME_LABEL)).toBeInTheDocument();
|
||||
expect(screen.getByText('Go to Dashboards')).toBeInTheDocument();
|
||||
expect(screen.getByText('Open Sidebar')).toBeInTheDocument();
|
||||
expect(screen.getByText('Switch to Dark Mode')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
|
||||
@@ -9,34 +9,12 @@ import {
|
||||
CommandList,
|
||||
CommandShortcut,
|
||||
} from '@signozhq/command';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import updateUserPreference from 'api/v1/user/preferences/name/update';
|
||||
import { AxiosError } from 'axios';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { USER_PREFERENCES } from 'constants/userPreferences';
|
||||
import { useThemeMode } from 'hooks/useDarkMode';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import {
|
||||
BellDot,
|
||||
BugIcon,
|
||||
DraftingCompass,
|
||||
Expand,
|
||||
HardDrive,
|
||||
Home,
|
||||
LayoutGrid,
|
||||
ListMinus,
|
||||
ScrollText,
|
||||
Settings,
|
||||
} from 'lucide-react';
|
||||
import React, { useEffect } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { UserPreference } from 'types/api/preferences/preference';
|
||||
import { showErrorNotification } from 'utils/error';
|
||||
|
||||
import { useAppContext } from '../../providers/App/App';
|
||||
import { createShortcutActions } from '../../constants/shortcutActions';
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
|
||||
type CmdAction = {
|
||||
@@ -58,19 +36,8 @@ export function CmdKPalette({
|
||||
}): JSX.Element | null {
|
||||
const { open, setOpen } = useCmdK();
|
||||
|
||||
const { updateUserPreferenceInContext } = useAppContext();
|
||||
const { notifications } = useNotifications();
|
||||
const { setAutoSwitch, setTheme, theme } = useThemeMode();
|
||||
|
||||
const { mutate: updateUserPreferenceMutation } = useMutation(
|
||||
updateUserPreference,
|
||||
{
|
||||
onError: (error) => {
|
||||
showErrorNotification(notifications, error as AxiosError);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// toggle palette with ⌘/Ctrl+K
|
||||
function handleGlobalCmdK(
|
||||
e: KeyboardEvent,
|
||||
@@ -111,164 +78,10 @@ export function CmdKPalette({
|
||||
history.push(key);
|
||||
}
|
||||
|
||||
function handleOpenSidebar(): void {
|
||||
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, 'true');
|
||||
const save = { name: USER_PREFERENCES.SIDENAV_PINNED, value: true };
|
||||
updateUserPreferenceInContext(save as UserPreference);
|
||||
updateUserPreferenceMutation({
|
||||
name: USER_PREFERENCES.SIDENAV_PINNED,
|
||||
value: true,
|
||||
});
|
||||
}
|
||||
|
||||
function handleCloseSidebar(): void {
|
||||
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, 'false');
|
||||
const save = { name: USER_PREFERENCES.SIDENAV_PINNED, value: false };
|
||||
updateUserPreferenceInContext(save as UserPreference);
|
||||
updateUserPreferenceMutation({
|
||||
name: USER_PREFERENCES.SIDENAV_PINNED,
|
||||
value: false,
|
||||
});
|
||||
}
|
||||
|
||||
const actions: CmdAction[] = [
|
||||
{
|
||||
id: 'home',
|
||||
name: 'Go to Home',
|
||||
shortcut: ['shift + h'],
|
||||
keywords: 'home',
|
||||
section: 'Navigation',
|
||||
icon: <Home size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.HOME),
|
||||
},
|
||||
{
|
||||
id: 'dashboards',
|
||||
name: 'Go to Dashboards',
|
||||
shortcut: ['shift + d'],
|
||||
keywords: 'dashboards',
|
||||
section: 'Navigation',
|
||||
icon: <LayoutGrid size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.ALL_DASHBOARD),
|
||||
},
|
||||
{
|
||||
id: 'services',
|
||||
name: 'Go to Services',
|
||||
shortcut: ['shift + s'],
|
||||
keywords: 'services monitoring',
|
||||
section: 'Navigation',
|
||||
icon: <HardDrive size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.APPLICATION),
|
||||
},
|
||||
{
|
||||
id: 'traces',
|
||||
name: 'Go to Traces',
|
||||
shortcut: ['shift + t'],
|
||||
keywords: 'traces',
|
||||
section: 'Navigation',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.TRACES_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs',
|
||||
shortcut: ['shift + l'],
|
||||
keywords: 'logs',
|
||||
section: 'Navigation',
|
||||
icon: <ScrollText size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.LOGS),
|
||||
},
|
||||
{
|
||||
id: 'alerts',
|
||||
name: 'Go to Alerts',
|
||||
shortcut: ['shift + a'],
|
||||
keywords: 'alerts',
|
||||
section: 'Navigation',
|
||||
icon: <BellDot size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.LIST_ALL_ALERT),
|
||||
},
|
||||
{
|
||||
id: 'exceptions',
|
||||
name: 'Go to Exceptions',
|
||||
shortcut: ['shift + e'],
|
||||
keywords: 'exceptions errors',
|
||||
section: 'Navigation',
|
||||
icon: <BugIcon size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.ALL_ERROR),
|
||||
},
|
||||
{
|
||||
id: 'messaging-queues',
|
||||
name: 'Go to Messaging Queues',
|
||||
shortcut: ['shift + m'],
|
||||
keywords: 'messaging queues mq',
|
||||
section: 'Navigation',
|
||||
icon: <ListMinus size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.MESSAGING_QUEUES_OVERVIEW),
|
||||
},
|
||||
{
|
||||
id: 'my-settings',
|
||||
name: 'Go to Account Settings',
|
||||
keywords: 'account settings',
|
||||
section: 'Navigation',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.MY_SETTINGS),
|
||||
},
|
||||
|
||||
// Settings
|
||||
{
|
||||
id: 'open-sidebar',
|
||||
name: 'Open Sidebar',
|
||||
keywords: 'sidebar navigation menu expand',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleOpenSidebar(),
|
||||
},
|
||||
{
|
||||
id: 'collapse-sidebar',
|
||||
name: 'Collapse Sidebar',
|
||||
keywords: 'sidebar navigation menu collapse',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleCloseSidebar(),
|
||||
},
|
||||
{
|
||||
id: 'dark-mode',
|
||||
name: 'Switch to Dark Mode',
|
||||
keywords: 'theme dark mode appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.DARK),
|
||||
},
|
||||
{
|
||||
id: 'light-mode',
|
||||
name: 'Switch to Light Mode [Beta]',
|
||||
keywords: 'theme light mode appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.LIGHT),
|
||||
},
|
||||
{
|
||||
id: 'system-theme',
|
||||
name: 'Switch to System Theme',
|
||||
keywords: 'system theme appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.SYSTEM),
|
||||
},
|
||||
];
|
||||
const actions = createShortcutActions({
|
||||
navigate: onClickHandler,
|
||||
handleThemeChange,
|
||||
});
|
||||
|
||||
// RBAC filter: show action if no roles set OR current user role is included
|
||||
const permitted = actions.filter(
|
||||
|
||||
@@ -56,6 +56,13 @@ export const REACT_QUERY_KEY = {
|
||||
GET_RELATED_METRICS: 'GET_RELATED_METRICS',
|
||||
GET_INSPECT_METRICS_DETAILS: 'GET_INSPECT_METRICS_DETAILS',
|
||||
|
||||
// Metrics Explorer V2 Query Keys
|
||||
GET_METRIC_HIGHLIGHTS: 'GET_METRIC_HIGHLIGHTS',
|
||||
GET_METRIC_METADATA: 'GET_METRIC_METADATA',
|
||||
GET_METRIC_ATTRIBUTES: 'GET_METRIC_ATTRIBUTES',
|
||||
GET_METRIC_ALERTS: 'GET_METRIC_ALERTS',
|
||||
GET_METRIC_DASHBOARDS: 'GET_METRIC_DASHBOARDS',
|
||||
|
||||
// Traces Funnels Query Keys
|
||||
GET_DOMAINS_LIST: 'GET_DOMAINS_LIST',
|
||||
GET_DOMAIN_METRICS_DATA: 'GET_DOMAIN_METRICS_DATA',
|
||||
|
||||
263
frontend/src/constants/shortcutActions.tsx
Normal file
263
frontend/src/constants/shortcutActions.tsx
Normal file
@@ -0,0 +1,263 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import { GlobalShortcutsName } from 'constants/shortcuts/globalShortcuts';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import {
|
||||
BarChart2,
|
||||
BellDot,
|
||||
BugIcon,
|
||||
Compass,
|
||||
DraftingCompass,
|
||||
Expand,
|
||||
HardDrive,
|
||||
Home,
|
||||
LayoutGrid,
|
||||
ListMinus,
|
||||
ScrollText,
|
||||
Settings,
|
||||
TowerControl,
|
||||
Workflow,
|
||||
} from 'lucide-react';
|
||||
import React from 'react';
|
||||
|
||||
export type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
|
||||
export type CmdAction = {
|
||||
id: string;
|
||||
name: string;
|
||||
shortcut?: string[];
|
||||
keywords?: string;
|
||||
section?: string;
|
||||
icon?: React.ReactNode;
|
||||
roles?: UserRole[];
|
||||
perform: () => void;
|
||||
};
|
||||
|
||||
type ActionDeps = {
|
||||
navigate: (path: string) => void;
|
||||
handleThemeChange: (mode: string) => void;
|
||||
};
|
||||
|
||||
export function createShortcutActions(deps: ActionDeps): CmdAction[] {
|
||||
const { navigate, handleThemeChange } = deps;
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'home',
|
||||
name: 'Go to Home',
|
||||
shortcut: [GlobalShortcutsName.NavigateToHome],
|
||||
keywords: 'home',
|
||||
section: 'Navigation',
|
||||
icon: <Home size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.HOME),
|
||||
},
|
||||
{
|
||||
id: 'dashboards',
|
||||
name: 'Go to Dashboards',
|
||||
shortcut: [GlobalShortcutsName.NavigateToDashboards],
|
||||
keywords: 'dashboards',
|
||||
section: 'Navigation',
|
||||
icon: <LayoutGrid size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.ALL_DASHBOARD),
|
||||
},
|
||||
{
|
||||
id: 'services',
|
||||
name: 'Go to Services',
|
||||
shortcut: [GlobalShortcutsName.NavigateToServices],
|
||||
keywords: 'services monitoring',
|
||||
section: 'Navigation',
|
||||
icon: <HardDrive size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.APPLICATION),
|
||||
},
|
||||
{
|
||||
id: 'alerts',
|
||||
name: 'Go to Alerts',
|
||||
shortcut: [GlobalShortcutsName.NavigateToAlerts],
|
||||
keywords: 'alerts',
|
||||
section: 'Navigation',
|
||||
icon: <BellDot size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LIST_ALL_ALERT),
|
||||
},
|
||||
{
|
||||
id: 'exceptions',
|
||||
name: 'Go to Exceptions',
|
||||
shortcut: [GlobalShortcutsName.NavigateToExceptions],
|
||||
keywords: 'exceptions errors',
|
||||
section: 'Navigation',
|
||||
icon: <BugIcon size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.ALL_ERROR),
|
||||
},
|
||||
{
|
||||
id: 'messaging-queues',
|
||||
name: 'Go to Messaging Queues',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMessagingQueues],
|
||||
keywords: 'messaging queues mq',
|
||||
section: 'Navigation',
|
||||
icon: <ListMinus size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.MESSAGING_QUEUES_OVERVIEW),
|
||||
},
|
||||
|
||||
// logs
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogs],
|
||||
keywords: 'logs',
|
||||
section: 'Logs',
|
||||
icon: <ScrollText size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs Pipelines',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogsPipelines],
|
||||
keywords: 'logs pipelines',
|
||||
section: 'Logs',
|
||||
icon: <Workflow size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS_PIPELINES),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs Views',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogsViews],
|
||||
keywords: 'logs views',
|
||||
section: 'Logs',
|
||||
icon: <TowerControl size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS_SAVE_VIEWS),
|
||||
},
|
||||
|
||||
// metrics
|
||||
{
|
||||
id: 'metrics-summary',
|
||||
name: 'Go to Metrics Summary',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsSummary],
|
||||
keywords: 'metrics summary',
|
||||
section: 'Metrics',
|
||||
icon: <BarChart2 size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'metrics-explorer',
|
||||
name: 'Go to Metrics Explorer',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsExplorer],
|
||||
keywords: 'metrics explorer',
|
||||
section: 'Metrics',
|
||||
icon: <Compass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'metrics-views',
|
||||
name: 'Go to Metrics Views',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsViews],
|
||||
keywords: 'metrics views',
|
||||
section: 'Metrics',
|
||||
icon: <TowerControl size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER_VIEWS),
|
||||
},
|
||||
|
||||
// Traces
|
||||
{
|
||||
id: 'traces',
|
||||
name: 'Go to Traces',
|
||||
shortcut: [GlobalShortcutsName.NavigateToTraces],
|
||||
keywords: 'traces',
|
||||
section: 'Traces',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.TRACES_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'traces-funnel',
|
||||
name: 'Go to Traces Funnels',
|
||||
shortcut: [GlobalShortcutsName.NavigateToTracesFunnel],
|
||||
keywords: 'traces funnel',
|
||||
section: 'Traces',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.TRACES_FUNNELS),
|
||||
},
|
||||
|
||||
// Common actions
|
||||
{
|
||||
id: 'dark-mode',
|
||||
name: 'Switch to Dark Mode',
|
||||
keywords: 'theme dark mode appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.DARK),
|
||||
},
|
||||
{
|
||||
id: 'light-mode',
|
||||
name: 'Switch to Light Mode [Beta]',
|
||||
keywords: 'theme light mode appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.LIGHT),
|
||||
},
|
||||
{
|
||||
id: 'system-theme',
|
||||
name: 'Switch to System Theme',
|
||||
keywords: 'system theme appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.SYSTEM),
|
||||
},
|
||||
|
||||
// settings sub-pages
|
||||
{
|
||||
id: 'my-settings',
|
||||
name: 'Go to Account Settings',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettings],
|
||||
keywords: 'account settings',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.MY_SETTINGS),
|
||||
},
|
||||
{
|
||||
id: 'my-settings-ingestion',
|
||||
name: 'Go to Account Settings Ingestion',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsIngestion],
|
||||
keywords: 'account settings',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.INGESTION_SETTINGS),
|
||||
},
|
||||
|
||||
{
|
||||
id: 'my-settings-billing',
|
||||
name: 'Go to Account Settings Billing',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsBilling],
|
||||
keywords: 'account settings billing',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.BILLING),
|
||||
},
|
||||
{
|
||||
id: 'my-settings-api-keys',
|
||||
name: 'Go to Account Settings API Keys',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsAPIKeys],
|
||||
keywords: 'account settings api keys',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.API_KEYS),
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -1,25 +1,57 @@
|
||||
export const GlobalShortcuts = {
|
||||
NavigateToServices: 's+shift',
|
||||
NavigateToTraces: 't+shift',
|
||||
NavigateToLogs: 'l+shift',
|
||||
NavigateToDashboards: 'd+shift',
|
||||
NavigateToAlerts: 'a+shift',
|
||||
NavigateToExceptions: 'e+shift',
|
||||
NavigateToMessagingQueues: 'm+shift',
|
||||
ToggleSidebar: 'b+shift',
|
||||
NavigateToHome: 'h+shift',
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToDashboards: 'shift+d',
|
||||
NavigateToAlerts: 'shift+a',
|
||||
NavigateToExceptions: 'shift+e',
|
||||
NavigateToMessagingQueues: 'shift+q',
|
||||
ToggleSidebar: 'shift+b',
|
||||
NavigateToHome: 'shift+h',
|
||||
|
||||
// logs
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToLogsPipelines: 'shift+l+p',
|
||||
NavigateToLogsViews: 'shift+l+v',
|
||||
|
||||
// traces
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToTracesFunnel: 'shift+t+f',
|
||||
NavigateToTracesViews: 'shift+t+v',
|
||||
|
||||
// metrics
|
||||
NavigateToMetricsSummary: 'shift+m',
|
||||
NavigateToMetricsExplorer: 'shift+m+e',
|
||||
NavigateToMetricsViews: 'shift+m+v',
|
||||
|
||||
// settings
|
||||
NavigateToSettings: 'shift+g',
|
||||
NavigateToSettingsIngestion: 'shift+g+i',
|
||||
NavigateToSettingsBilling: 'shift+g+b',
|
||||
NavigateToSettingsAPIKeys: 'shift+g+k',
|
||||
NavigateToSettingsNotificationChannels: 'shift+g+n',
|
||||
};
|
||||
|
||||
export const GlobalShortcutsName = {
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToDashboards: 'shift+d',
|
||||
NavigateToAlerts: 'shift+a',
|
||||
NavigateToExceptions: 'shift+e',
|
||||
NavigateToMessagingQueues: 'shift+m',
|
||||
NavigateToMessagingQueues: 'shift+q',
|
||||
ToggleSidebar: 'shift+b',
|
||||
NavigateToHome: 'shift+h',
|
||||
NavigateToTracesFunnel: 'shift+t+f',
|
||||
NavigateToTracesViews: 'shift+t+v',
|
||||
NavigateToMetricsSummary: 'shift+m',
|
||||
NavigateToMetricsExplorer: 'shift+m+e',
|
||||
NavigateToMetricsViews: 'shift+m+v',
|
||||
NavigateToSettings: 'shift+g',
|
||||
NavigateToSettingsIngestion: 'shift+g+i',
|
||||
NavigateToSettingsBilling: 'shift+g+b',
|
||||
NavigateToSettingsAPIKeys: 'shift+g+k',
|
||||
NavigateToSettingsNotificationChannels: 'shift+g+n',
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToLogsPipelines: 'shift+l+p',
|
||||
NavigateToLogsViews: 'shift+l+v',
|
||||
};
|
||||
|
||||
export const GlobalShortcutsDescription = {
|
||||
@@ -32,4 +64,17 @@ export const GlobalShortcutsDescription = {
|
||||
NavigateToExceptions: 'Navigate to Exceptions List',
|
||||
NavigateToMessagingQueues: 'Navigate to Messaging Queues',
|
||||
ToggleSidebar: 'Toggle sidebar visibility',
|
||||
NavigateToTracesFunnel: 'Navigate to Traces Funnel',
|
||||
NavigateToTracesViews: 'Navigate to Traces Views',
|
||||
NavigateToMetricsSummary: 'Navigate to Metrics Summary',
|
||||
NavigateToMetricsExplorer: 'Navigate to Metrics Explorer',
|
||||
NavigateToMetricsViews: 'Navigate to Metrics Views',
|
||||
NavigateToSettings: 'Navigate to Settings',
|
||||
NavigateToSettingsIngestion: 'Navigate to Ingestion Settings',
|
||||
NavigateToSettingsBilling: 'Navigate to Billing Settings',
|
||||
NavigateToSettingsAPIKeys: 'Navigate to API Keys Settings',
|
||||
NavigateToSettingsNotificationChannels:
|
||||
'Navigate to Notification Channels Settings',
|
||||
NavigateToLogsPipelines: 'Navigate to Logs Pipelines',
|
||||
NavigateToLogsViews: 'Navigate to Logs Views',
|
||||
};
|
||||
|
||||
@@ -10,6 +10,20 @@ import {
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('providers/cmdKProvider', () => ({
|
||||
useCmdK: (): {
|
||||
open: boolean;
|
||||
setOpen: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
openCmdK: () => void;
|
||||
closeCmdK: () => void;
|
||||
} => ({
|
||||
open: false,
|
||||
setOpen: jest.fn(),
|
||||
openCmdK: jest.fn(),
|
||||
closeCmdK: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('api/common/logEvent', () => jest.fn());
|
||||
|
||||
// Mock the AppContext
|
||||
@@ -63,7 +77,7 @@ describe('Sidebar Toggle Shortcut', () => {
|
||||
|
||||
describe('Global Shortcuts Constants', () => {
|
||||
it('should have the correct shortcut key combination', () => {
|
||||
expect(GlobalShortcuts.ToggleSidebar).toBe('b+shift');
|
||||
expect(GlobalShortcuts.ToggleSidebar).toBe('shift+b');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -67,7 +67,6 @@ function WidgetGraphComponent({
|
||||
}: WidgetGraphComponentProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [deleteModal, setDeleteModal] = useState(false);
|
||||
const [hovered, setHovered] = useState(false);
|
||||
const { notifications } = useNotifications();
|
||||
const { pathname, search } = useLocation();
|
||||
|
||||
@@ -316,18 +315,6 @@ function WidgetGraphComponent({
|
||||
style={{
|
||||
height: '100%',
|
||||
}}
|
||||
onMouseOver={(): void => {
|
||||
setHovered(true);
|
||||
}}
|
||||
onFocus={(): void => {
|
||||
setHovered(true);
|
||||
}}
|
||||
onMouseOut={(): void => {
|
||||
setHovered(false);
|
||||
}}
|
||||
onBlur={(): void => {
|
||||
setHovered(false);
|
||||
}}
|
||||
id={widget.id}
|
||||
className="widget-graph-component-container"
|
||||
>
|
||||
@@ -377,7 +364,6 @@ function WidgetGraphComponent({
|
||||
|
||||
<div className="drag-handle">
|
||||
<WidgetHeader
|
||||
parentHover={hovered}
|
||||
title={widget?.title}
|
||||
widget={widget}
|
||||
onView={handleOnView}
|
||||
|
||||
@@ -99,6 +99,12 @@
|
||||
height: calc(100% - 30px);
|
||||
}
|
||||
}
|
||||
|
||||
&:hover {
|
||||
.widget-header-more-options {
|
||||
visibility: visible;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.widget-full-view {
|
||||
|
||||
@@ -51,10 +51,6 @@
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.widget-header-hover {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.widget-api-actions {
|
||||
padding-right: 0.25rem;
|
||||
}
|
||||
|
||||
@@ -181,7 +181,6 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -204,7 +203,6 @@ describe('WidgetHeader', () => {
|
||||
title="Empty Widget"
|
||||
widget={emptyWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -227,7 +225,6 @@ describe('WidgetHeader', () => {
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -255,7 +252,6 @@ describe('WidgetHeader', () => {
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -298,7 +294,6 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={errorResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -340,7 +335,6 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={warningResponse}
|
||||
isWarning
|
||||
isFetchingResponse={false}
|
||||
@@ -370,7 +364,6 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={fetchingResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse
|
||||
@@ -389,7 +382,6 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -414,7 +406,6 @@ describe('WidgetHeader', () => {
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -433,7 +424,6 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -454,7 +444,6 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
|
||||
@@ -48,7 +48,6 @@ interface IWidgetHeaderProps {
|
||||
onView: VoidFunction;
|
||||
onDelete?: VoidFunction;
|
||||
onClone?: VoidFunction;
|
||||
parentHover: boolean;
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown> & {
|
||||
warning?: Warning;
|
||||
@@ -69,7 +68,6 @@ function WidgetHeader({
|
||||
onView,
|
||||
onDelete,
|
||||
onClone,
|
||||
parentHover,
|
||||
queryResponse,
|
||||
threshold,
|
||||
headerMenuList,
|
||||
@@ -315,8 +313,6 @@ function WidgetHeader({
|
||||
<MoreOutlined
|
||||
data-testid="widget-header-options"
|
||||
className={`widget-header-more-options ${
|
||||
parentHover ? 'widget-header-hover' : ''
|
||||
} ${
|
||||
globalSearchAvailable ? 'widget-header-more-options-visible' : ''
|
||||
}`}
|
||||
/>
|
||||
|
||||
@@ -92,14 +92,14 @@ function BodyTitleRenderer({
|
||||
|
||||
if (isObject) {
|
||||
// For objects/arrays, stringify the entire structure
|
||||
copyText = `"${cleanedKey}": ${JSON.stringify(value, null, 2)}`;
|
||||
copyText = JSON.stringify(value, null, 2);
|
||||
} else if (parentIsArray) {
|
||||
// For array elements, copy just the value
|
||||
copyText = `"${cleanedKey}": ${value}`;
|
||||
// array elements
|
||||
copyText = `${value}`;
|
||||
} else {
|
||||
// For primitive values, format as JSON key-value pair
|
||||
const valueStr = typeof value === 'string' ? `"${value}"` : String(value);
|
||||
copyText = `"${cleanedKey}": ${valueStr}`;
|
||||
// primitive values
|
||||
const valueStr = typeof value === 'string' ? value : String(value);
|
||||
copyText = valueStr;
|
||||
}
|
||||
|
||||
setCopy(copyText);
|
||||
|
||||
@@ -60,7 +60,8 @@ const BodyContent: React.FC<{
|
||||
fieldData: Record<string, string>;
|
||||
record: DataType;
|
||||
bodyHtml: { __html: string };
|
||||
}> = React.memo(({ fieldData, record, bodyHtml }) => {
|
||||
textToCopy: string;
|
||||
}> = React.memo(({ fieldData, record, bodyHtml, textToCopy }) => {
|
||||
const { isLoading, treeData, error } = useAsyncJSONProcessing(
|
||||
fieldData.value,
|
||||
record.field === 'body',
|
||||
@@ -92,11 +93,13 @@ const BodyContent: React.FC<{
|
||||
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
<CopyClipboardHOC entityKey="body" textToCopy={textToCopy}>
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
</CopyClipboardHOC>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -172,7 +175,12 @@ export default function TableViewActions(
|
||||
switch (record.field) {
|
||||
case 'body':
|
||||
return (
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
<BodyContent
|
||||
fieldData={fieldData}
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
/>
|
||||
);
|
||||
|
||||
case 'timestamp':
|
||||
@@ -194,6 +202,7 @@ export default function TableViewActions(
|
||||
record,
|
||||
fieldData,
|
||||
bodyHtml,
|
||||
textToCopy,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
cleanTimestamp,
|
||||
]);
|
||||
@@ -202,7 +211,12 @@ export default function TableViewActions(
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
<BodyContent
|
||||
fieldData={fieldData}
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
/>
|
||||
{!isListViewPanel && !RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && (
|
||||
<span className="action-btn">
|
||||
<Tooltip title="Filter for value">
|
||||
|
||||
@@ -1,16 +1,54 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
|
||||
|
||||
import TableViewActions from '../TableViewActions';
|
||||
import useAsyncJSONProcessing from '../useAsyncJSONProcessing';
|
||||
|
||||
// Mock data for tests
|
||||
let mockCopyToClipboard: jest.Mock;
|
||||
let mockNotificationsSuccess: jest.Mock;
|
||||
|
||||
// Mock the components and hooks
|
||||
jest.mock('components/Logs/CopyClipboardHOC', () => ({
|
||||
__esModule: true,
|
||||
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
|
||||
<div className="CopyClipboardHOC">{children}</div>
|
||||
default: ({
|
||||
children,
|
||||
textToCopy,
|
||||
entityKey,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
textToCopy: string;
|
||||
entityKey: string;
|
||||
}): JSX.Element => (
|
||||
// eslint-disable-next-line jsx-a11y/click-events-have-key-events
|
||||
<div
|
||||
className="CopyClipboardHOC"
|
||||
data-testid={`copy-clipboard-${entityKey}`}
|
||||
data-text-to-copy={textToCopy}
|
||||
onClick={(): void => {
|
||||
if (mockCopyToClipboard) {
|
||||
mockCopyToClipboard(textToCopy);
|
||||
}
|
||||
if (mockNotificationsSuccess) {
|
||||
mockNotificationsSuccess({
|
||||
message: `${entityKey} copied to clipboard`,
|
||||
key: `${entityKey} copied to clipboard`,
|
||||
});
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('../useAsyncJSONProcessing', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
useTimezone: (): {
|
||||
formatTimezoneAdjustedTimestamp: (timestamp: string) => string;
|
||||
@@ -53,6 +91,19 @@ describe('TableViewActions', () => {
|
||||
onGroupByAttribute: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockCopyToClipboard = jest.fn();
|
||||
mockNotificationsSuccess = jest.fn();
|
||||
|
||||
// Default mock for useAsyncJSONProcessing
|
||||
const mockUseAsyncJSONProcessing = jest.mocked(useAsyncJSONProcessing);
|
||||
mockUseAsyncJSONProcessing.mockReturnValue({
|
||||
isLoading: false,
|
||||
treeData: null,
|
||||
error: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render without crashing', () => {
|
||||
render(
|
||||
<TableViewActions
|
||||
@@ -127,4 +178,60 @@ describe('TableViewActions', () => {
|
||||
container.querySelector(ACTION_BUTTON_TEST_ID),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should copy non-JSON body text without quotes when user clicks on body', () => {
|
||||
// Setup: body field with surrounding quotes
|
||||
const bodyValueWithQuotes =
|
||||
'"FeatureFlag \'kafkaQueueProblems\' is enabled, sleeping 1 second"';
|
||||
const expectedCopiedText =
|
||||
"FeatureFlag 'kafkaQueueProblems' is enabled, sleeping 1 second";
|
||||
|
||||
const bodyProps = {
|
||||
fieldData: {
|
||||
field: 'body',
|
||||
value: bodyValueWithQuotes,
|
||||
},
|
||||
record: {
|
||||
key: 'body-key',
|
||||
field: 'body',
|
||||
value: bodyValueWithQuotes,
|
||||
},
|
||||
isListViewPanel: false,
|
||||
isfilterInLoading: false,
|
||||
isfilterOutLoading: false,
|
||||
onClickHandler: jest.fn(),
|
||||
onGroupByAttribute: jest.fn(),
|
||||
};
|
||||
|
||||
// Render component with body field
|
||||
render(
|
||||
<TableViewActions
|
||||
fieldData={bodyProps.fieldData}
|
||||
record={bodyProps.record}
|
||||
isListViewPanel={bodyProps.isListViewPanel}
|
||||
isfilterInLoading={bodyProps.isfilterInLoading}
|
||||
isfilterOutLoading={bodyProps.isfilterOutLoading}
|
||||
onClickHandler={bodyProps.onClickHandler}
|
||||
onGroupByAttribute={bodyProps.onGroupByAttribute}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find the clickable copy area for body
|
||||
const copyArea = screen.getByTestId('copy-clipboard-body');
|
||||
|
||||
// Verify it has the correct text to copy (without quotes)
|
||||
expect(copyArea).toHaveAttribute('data-text-to-copy', expectedCopiedText);
|
||||
|
||||
// Action: User clicks on body content
|
||||
fireEvent.click(copyArea);
|
||||
|
||||
// Assert: Text was copied without surrounding quotes
|
||||
expect(mockCopyToClipboard).toHaveBeenCalledWith(expectedCopiedText);
|
||||
|
||||
// Assert: Success notification shown
|
||||
expect(mockNotificationsSuccess).toHaveBeenCalledWith({
|
||||
message: 'body copied to clipboard',
|
||||
key: 'body copied to clipboard',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -51,7 +51,7 @@ describe('BodyTitleRenderer', () => {
|
||||
await user.click(screen.getByText('name'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('"user.name": "John"');
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('John');
|
||||
expect(mockNotification).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: expect.stringContaining('user.name'),
|
||||
@@ -75,7 +75,7 @@ describe('BodyTitleRenderer', () => {
|
||||
await user.click(screen.getByText('0'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('"items[*].0": arrayElement');
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('arrayElement');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -96,9 +96,8 @@ describe('BodyTitleRenderer', () => {
|
||||
|
||||
await waitFor(() => {
|
||||
const callArg = mockSetCopy.mock.calls[0][0];
|
||||
expect(callArg).toContain('"user.metadata":');
|
||||
expect(callArg).toContain('"id": 123');
|
||||
expect(callArg).toContain('"active": true');
|
||||
const expectedJson = JSON.stringify(testObject, null, 2);
|
||||
expect(callArg).toBe(expectedJson);
|
||||
expect(mockNotification).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: expect.stringContaining('object copied'),
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
import { Button, Collapse, Input, Menu, Popover, Typography } from 'antd';
|
||||
import {
|
||||
Button,
|
||||
Collapse,
|
||||
Input,
|
||||
Menu,
|
||||
Popover,
|
||||
Skeleton,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useGetMetricAttributes } from 'hooks/metricsExplorer/v2/useGetMetricAttributes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Compass, Copy, Search } from 'lucide-react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
@@ -13,7 +22,9 @@ import ROUTES from '../../../constants/routes';
|
||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { AllAttributesProps, AllAttributesValueProps } from './types';
|
||||
import { getMetricDetailsQuery } from './utils';
|
||||
import { getMetricDetailsQuery, transformMetricAttributes } from './utils';
|
||||
|
||||
const ALL_ATTRIBUTES_KEY = 'all-attributes';
|
||||
|
||||
export function AllAttributesValue({
|
||||
filterKey,
|
||||
@@ -110,13 +121,20 @@ export function AllAttributesValue({
|
||||
|
||||
function AllAttributes({
|
||||
metricName,
|
||||
attributes,
|
||||
metricType,
|
||||
}: AllAttributesProps): JSX.Element {
|
||||
const [searchString, setSearchString] = useState('');
|
||||
const [activeKey, setActiveKey] = useState<string | string[]>(
|
||||
'all-attributes',
|
||||
);
|
||||
const [activeKey, setActiveKey] = useState<string[]>([ALL_ATTRIBUTES_KEY]);
|
||||
|
||||
const {
|
||||
data: attributesData,
|
||||
isLoading: isLoadingAttributes,
|
||||
isError: isErrorAttributes,
|
||||
} = useGetMetricAttributes({
|
||||
metricName,
|
||||
});
|
||||
|
||||
const { attributes } = transformMetricAttributes(attributesData);
|
||||
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
@@ -178,7 +196,7 @@ function AllAttributes({
|
||||
attributes.filter(
|
||||
(attribute) =>
|
||||
attribute.key.toLowerCase().includes(searchString.toLowerCase()) ||
|
||||
attribute.value.some((value) =>
|
||||
attribute.values.some((value) =>
|
||||
value.toLowerCase().includes(searchString.toLowerCase()),
|
||||
),
|
||||
),
|
||||
@@ -195,7 +213,7 @@ function AllAttributes({
|
||||
},
|
||||
value: {
|
||||
key: attribute.key,
|
||||
value: attribute.value,
|
||||
value: attribute.values,
|
||||
},
|
||||
}))
|
||||
: [],
|
||||
@@ -252,8 +270,38 @@ function AllAttributes({
|
||||
],
|
||||
);
|
||||
|
||||
const items = useMemo(
|
||||
() => [
|
||||
const emptyText = useMemo(
|
||||
() =>
|
||||
isErrorAttributes ? 'Error fetching attributes' : 'No attributes found',
|
||||
[isErrorAttributes],
|
||||
);
|
||||
|
||||
const items = useMemo(() => {
|
||||
let children;
|
||||
if (isLoadingAttributes) {
|
||||
children = (
|
||||
<div className="all-attributes-skeleton-container">
|
||||
<Skeleton active title={false} paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
children = (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
loading={isLoadingAttributes}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content all-attributes-content"
|
||||
scroll={{ y: 600 }}
|
||||
locale={{
|
||||
emptyText,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return [
|
||||
{
|
||||
label: (
|
||||
<div className="metrics-accordion-header">
|
||||
@@ -270,32 +318,22 @@ function AllAttributes({
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
disabled={isLoadingAttributes}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
key: 'all-attributes',
|
||||
children: (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content all-attributes-content"
|
||||
scroll={{ y: 600 }}
|
||||
/>
|
||||
),
|
||||
children,
|
||||
},
|
||||
],
|
||||
[columns, tableData, searchString],
|
||||
);
|
||||
];
|
||||
}, [searchString, columns, isLoadingAttributes, tableData, emptyText]);
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
bordered
|
||||
className="metrics-accordion metrics-metadata-accordion"
|
||||
className="metrics-accordion metrics-all-attributes-accordion"
|
||||
activeKey={activeKey}
|
||||
onChange={(keys): void => setActiveKey(keys)}
|
||||
onChange={(keys): void => setActiveKey(keys as string[])}
|
||||
items={items}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -1,37 +1,56 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Dropdown, Typography } from 'antd';
|
||||
import { Skeleton } from 'antd/lib';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetMetricAlerts } from 'hooks/metricsExplorer/v2/useGetMetricAlerts';
|
||||
import { useGetMetricDashboards } from 'hooks/metricsExplorer/v2/useGetMetricDashboards';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import { Bell, Grid } from 'lucide-react';
|
||||
import { useMemo } from 'react';
|
||||
import { generatePath } from 'react-router-dom';
|
||||
import { pluralize } from 'utils/pluralize';
|
||||
|
||||
import { DashboardsAndAlertsPopoverProps } from './types';
|
||||
import { transformMetricAlerts, transformMetricDashboards } from './utils';
|
||||
|
||||
function DashboardsAndAlertsPopover({
|
||||
alerts,
|
||||
dashboards,
|
||||
metricName,
|
||||
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const params = useUrlQuery();
|
||||
|
||||
const {
|
||||
data: alertsData,
|
||||
isLoading: isLoadingAlerts,
|
||||
isError: isErrorAlerts,
|
||||
} = useGetMetricAlerts(metricName);
|
||||
|
||||
const {
|
||||
data: dashboardsData,
|
||||
isLoading: isLoadingDashboards,
|
||||
isError: isErrorDashboards,
|
||||
} = useGetMetricDashboards(metricName);
|
||||
|
||||
const alerts = transformMetricAlerts(alertsData);
|
||||
const dashboards = transformMetricDashboards(dashboardsData);
|
||||
|
||||
const alertsPopoverContent = useMemo(() => {
|
||||
if (alerts && alerts.length > 0) {
|
||||
return alerts.map((alert) => ({
|
||||
key: alert.alert_id,
|
||||
key: alert.alertId,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={alert.alert_id}
|
||||
key={alert.alertId}
|
||||
onClick={(): void => {
|
||||
params.set(QueryParams.ruleId, alert.alert_id);
|
||||
params.set(QueryParams.ruleId, alert.alertId);
|
||||
history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{alert.alert_name || alert.alert_id}
|
||||
{alert.alertName || alert.alertId}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
@@ -39,41 +58,44 @@ function DashboardsAndAlertsPopover({
|
||||
return null;
|
||||
}, [alerts, params]);
|
||||
|
||||
const uniqueDashboards = useMemo(
|
||||
() =>
|
||||
dashboards?.filter(
|
||||
(item, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboard_id === item.dashboard_id),
|
||||
),
|
||||
[dashboards],
|
||||
);
|
||||
|
||||
const dashboardsPopoverContent = useMemo(() => {
|
||||
if (uniqueDashboards && uniqueDashboards.length > 0) {
|
||||
return uniqueDashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboard_id,
|
||||
if (dashboards && dashboards.length > 0) {
|
||||
return dashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboardId,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={dashboard.dashboard_id}
|
||||
key={dashboard.dashboardId}
|
||||
onClick={(): void => {
|
||||
safeNavigate(
|
||||
generatePath(ROUTES.DASHBOARD, {
|
||||
dashboardId: dashboard.dashboard_id,
|
||||
dashboardId: dashboard.dashboardId,
|
||||
}),
|
||||
);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{dashboard.dashboard_name || dashboard.dashboard_id}
|
||||
{dashboard.dashboardName || dashboard.dashboardId}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
}
|
||||
return null;
|
||||
}, [uniqueDashboards, safeNavigate]);
|
||||
}, [dashboards, safeNavigate]);
|
||||
|
||||
if (!dashboardsPopoverContent && !alertsPopoverContent) {
|
||||
return null;
|
||||
if (isLoadingAlerts || isLoadingDashboards) {
|
||||
return (
|
||||
<div className="dashboards-and-alerts-popover-container">
|
||||
<Skeleton title={false} paragraph={{ rows: 1 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// If there are no dashboards or alerts or both have errors, don't show the popover
|
||||
const hidePopover =
|
||||
(!dashboardsPopoverContent && !alertsPopoverContent) ||
|
||||
(isErrorAlerts && isErrorDashboards);
|
||||
if (hidePopover) {
|
||||
return <div className="dashboards-and-alerts-popover-container" />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -92,8 +114,7 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Grid size={12} color={Color.BG_SIENNA_500} />
|
||||
<Typography.Text>
|
||||
{uniqueDashboards?.length} dashboard
|
||||
{uniqueDashboards?.length === 1 ? '' : 's'}
|
||||
{pluralize(dashboards.length, 'dashboard', 'dashboards')}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
@@ -112,7 +133,7 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Bell size={12} color={Color.BG_SAKURA_500} />
|
||||
<Typography.Text>
|
||||
{alerts?.length} alert {alerts?.length === 1 ? 'rule' : 'rules'}
|
||||
{pluralize(alerts.length, 'alert rule', 'alert rules')}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
import { Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { useGetMetricHighlights } from 'hooks/metricsExplorer/v2/useGetMetricHighlights';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import { HighlightsProps } from './types';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
transformMetricHighlights,
|
||||
} from './utils';
|
||||
|
||||
function Highlights({ metricName }: HighlightsProps): JSX.Element {
|
||||
const {
|
||||
data: metricHighlightsData,
|
||||
isLoading: isLoadingMetricHighlights,
|
||||
isError: isErrorMetricHighlights,
|
||||
} = useGetMetricHighlights(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
|
||||
const metricHighlights = transformMetricHighlights(metricHighlightsData);
|
||||
|
||||
const dataPoints = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metricHighlights?.dataPoints.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metricHighlights?.dataPoints ?? 0)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
const timeSeriesActive = formatNumberToCompactFormat(
|
||||
metricHighlights.activeTimeSeries,
|
||||
);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(
|
||||
metricHighlights.totalTimeSeries,
|
||||
);
|
||||
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metricHighlights) return null;
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
const displayText = formatTimestampToReadableDate(
|
||||
metricHighlights.lastReceived,
|
||||
);
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={displayText}>{displayText}</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
if (isLoadingMetricHighlights) {
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<Skeleton title={false} paragraph={{ rows: 2 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
{dataPoints}
|
||||
{timeSeries}
|
||||
{lastReceived}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Highlights;
|
||||
@@ -1,19 +1,19 @@
|
||||
import { Button, Collapse, Input, Select, Typography } from 'antd';
|
||||
import { Button, Collapse, Input, Select, Skeleton, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { UpdateMetricMetadataProps } from 'api/metricsExplorer/updateMetricMetadata';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import FieldRenderer from 'container/LogDetailedView/FieldRenderer';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/v2/useUpdateMetricMetadata';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Edit2, Save, X } from 'lucide-react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
@@ -23,23 +23,22 @@ import {
|
||||
} from '../Summary/constants';
|
||||
import { MetricTypeRenderer } from '../Summary/utils';
|
||||
import { METRIC_METADATA_KEYS } from './constants';
|
||||
import { MetadataProps } from './types';
|
||||
import { determineIsMonotonic } from './utils';
|
||||
import { MetadataProps, MetricMetadataState, TableFields } from './types';
|
||||
import { transformUpdateMetricMetadataRequest } from './utils';
|
||||
|
||||
function Metadata({
|
||||
metricName,
|
||||
metadata,
|
||||
refetchMetricDetails,
|
||||
isErrorMetricMetadata,
|
||||
isLoadingMetricMetadata,
|
||||
}: MetadataProps): JSX.Element {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [
|
||||
metricMetadata,
|
||||
setMetricMetadata,
|
||||
] = useState<UpdateMetricMetadataProps>({
|
||||
metricType: metadata?.metric_type || MetricType.SUM,
|
||||
description: metadata?.description || '',
|
||||
temporality: metadata?.temporality,
|
||||
unit: metadata?.unit,
|
||||
|
||||
const [metricMetadata, setMetricMetadata] = useState<MetricMetadataState>({
|
||||
metricType: MetricType.SUM,
|
||||
description: '',
|
||||
temporality: undefined,
|
||||
unit: undefined,
|
||||
});
|
||||
const { notifications } = useNotifications();
|
||||
const {
|
||||
@@ -51,6 +50,18 @@ function Metadata({
|
||||
);
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Initialize state from metadata api data
|
||||
useEffect(() => {
|
||||
if (metadata) {
|
||||
setMetricMetadata({
|
||||
metricType: metadata.metricType,
|
||||
description: metadata.description,
|
||||
temporality: metadata.temporality,
|
||||
unit: metadata.unit,
|
||||
});
|
||||
}
|
||||
}, [metadata]);
|
||||
|
||||
const tableData = useMemo(
|
||||
() =>
|
||||
metadata
|
||||
@@ -59,7 +70,7 @@ function Metadata({
|
||||
temporality: metadata?.temporality,
|
||||
})
|
||||
// Filter out monotonic as user input is not required
|
||||
.filter((key) => key !== 'monotonic')
|
||||
.filter((key) => key !== TableFields.IS_MONOTONIC)
|
||||
.map((key) => ({
|
||||
key,
|
||||
value: {
|
||||
@@ -72,30 +83,37 @@ function Metadata({
|
||||
);
|
||||
|
||||
// Render un-editable field value
|
||||
const renderUneditableField = useCallback((key: string, value: string) => {
|
||||
if (key === 'metric_type') {
|
||||
return <MetricTypeRenderer type={value as MetricType} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === 'unit') {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
}, []);
|
||||
const renderUneditableField = useCallback(
|
||||
(key: keyof MetricMetadataState, value: string) => {
|
||||
if (isErrorMetricMetadata) {
|
||||
return <FieldRenderer field="-" />;
|
||||
}
|
||||
if (key === TableFields.METRIC_TYPE) {
|
||||
return <MetricTypeRenderer type={value as MetricType} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === TableFields.UNIT) {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
},
|
||||
[isErrorMetricMetadata],
|
||||
);
|
||||
|
||||
const renderColumnValue = useCallback(
|
||||
(field: { value: string; key: string }): JSX.Element => {
|
||||
(field: { value: string; key: keyof MetricMetadataState }): JSX.Element => {
|
||||
if (!isEditing) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
// Don't allow editing of unit if it's already set
|
||||
const metricUnitAlreadySet = field.key === 'unit' && Boolean(metadata?.unit);
|
||||
const metricUnitAlreadySet =
|
||||
field.key === TableFields.UNIT && Boolean(metadata?.unit);
|
||||
if (metricUnitAlreadySet) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
if (field.key === 'metric_type') {
|
||||
if (field.key === TableFields.METRIC_TYPE) {
|
||||
return (
|
||||
<Select
|
||||
data-testid="metric-type-select"
|
||||
@@ -113,7 +131,7 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === 'unit') {
|
||||
if (field.key === TableFields.UNIT) {
|
||||
return (
|
||||
<YAxisUnitSelector
|
||||
value={metricMetadata.unit}
|
||||
@@ -125,7 +143,7 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === 'temporality') {
|
||||
if (field.key === TableFields.Temporality) {
|
||||
return (
|
||||
<Select
|
||||
data-testid="temporality-select"
|
||||
@@ -143,16 +161,12 @@ function Metadata({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === 'description') {
|
||||
if (field.key === TableFields.DESCRIPTION) {
|
||||
return (
|
||||
<Input
|
||||
data-testid="description-input"
|
||||
name={field.key}
|
||||
defaultValue={
|
||||
metricMetadata[
|
||||
field.key as Exclude<keyof UpdateMetricMetadataProps, 'isMonotonic'>
|
||||
]
|
||||
}
|
||||
defaultValue={metricMetadata.description}
|
||||
onChange={(e): void => {
|
||||
setMetricMetadata((prev) => ({
|
||||
...prev,
|
||||
@@ -202,17 +216,11 @@ function Metadata({
|
||||
updateMetricMetadata(
|
||||
{
|
||||
metricName,
|
||||
payload: {
|
||||
...metricMetadata,
|
||||
isMonotonic: determineIsMonotonic(
|
||||
metricMetadata.metricType,
|
||||
metricMetadata.temporality,
|
||||
),
|
||||
},
|
||||
payload: transformUpdateMetricMetadataRequest(metricMetadata),
|
||||
},
|
||||
{
|
||||
onSuccess: (response): void => {
|
||||
if (response?.statusCode === 200) {
|
||||
if (response?.httpStatusCode === 200) {
|
||||
logEvent(MetricsExplorerEvents.MetricMetadataUpdated, {
|
||||
[MetricsExplorerEventKeys.MetricName]: metricName,
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
@@ -221,9 +229,12 @@ function Metadata({
|
||||
notifications.success({
|
||||
message: 'Metadata updated successfully',
|
||||
});
|
||||
refetchMetricDetails();
|
||||
setIsEditing(false);
|
||||
queryClient.invalidateQueries(['metricsList']);
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_METRICS_LIST]);
|
||||
queryClient.invalidateQueries([
|
||||
REACT_QUERY_KEY.GET_METRIC_METADATA,
|
||||
metricName,
|
||||
]);
|
||||
} else {
|
||||
notifications.error({
|
||||
message:
|
||||
@@ -243,21 +254,36 @@ function Metadata({
|
||||
metricName,
|
||||
metricMetadata,
|
||||
notifications,
|
||||
refetchMetricDetails,
|
||||
queryClient,
|
||||
]);
|
||||
|
||||
const cancelEdit = useCallback(
|
||||
(e: React.MouseEvent<HTMLElement, MouseEvent>): void => {
|
||||
e.stopPropagation();
|
||||
if (metadata) {
|
||||
setMetricMetadata({
|
||||
metricType: metadata.metricType,
|
||||
description: metadata.description,
|
||||
temporality: metadata.temporality,
|
||||
unit: metadata.unit,
|
||||
});
|
||||
}
|
||||
setIsEditing(false);
|
||||
},
|
||||
[metadata],
|
||||
);
|
||||
|
||||
const actionButton = useMemo(() => {
|
||||
if (isLoadingMetricMetadata) {
|
||||
return null;
|
||||
}
|
||||
if (isEditing) {
|
||||
return (
|
||||
<div className="action-menu">
|
||||
<Button
|
||||
className="action-button"
|
||||
type="text"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
setIsEditing(false);
|
||||
}}
|
||||
onClick={cancelEdit}
|
||||
disabled={isUpdatingMetricsMetadata}
|
||||
>
|
||||
<X size={14} />
|
||||
@@ -294,10 +320,35 @@ function Metadata({
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}, [handleSave, isEditing, isUpdatingMetricsMetadata]);
|
||||
}, [
|
||||
isLoadingMetricMetadata,
|
||||
isEditing,
|
||||
isUpdatingMetricsMetadata,
|
||||
cancelEdit,
|
||||
handleSave,
|
||||
]);
|
||||
|
||||
const items = useMemo(
|
||||
() => [
|
||||
const items = useMemo(() => {
|
||||
let children;
|
||||
if (isLoadingMetricMetadata) {
|
||||
children = (
|
||||
<div className="metrics-metadata-skeleton-container">
|
||||
<Skeleton active title={false} paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
children = (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content metrics-metadata-container"
|
||||
/>
|
||||
);
|
||||
}
|
||||
return [
|
||||
{
|
||||
label: (
|
||||
<div className="metrics-accordion-header metrics-metadata-header">
|
||||
@@ -306,20 +357,10 @@ function Metadata({
|
||||
</div>
|
||||
),
|
||||
key: 'metric-metadata',
|
||||
children: (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content metrics-metadata-container"
|
||||
/>
|
||||
),
|
||||
children,
|
||||
},
|
||||
],
|
||||
[actionButton, columns, tableData],
|
||||
);
|
||||
];
|
||||
}, [actionButton, columns, isLoadingMetricMetadata, tableData]);
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
|
||||
@@ -39,6 +39,7 @@
|
||||
gap: 12px;
|
||||
|
||||
.metric-details-content-grid {
|
||||
height: 50px;
|
||||
.labels-row,
|
||||
.values-row {
|
||||
display: grid;
|
||||
@@ -72,6 +73,7 @@
|
||||
.dashboards-and-alerts-popover-container {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
height: 32px;
|
||||
|
||||
.dashboards-and-alerts-popover {
|
||||
border-radius: 20px;
|
||||
@@ -148,7 +150,6 @@
|
||||
|
||||
.all-attributes-search-input {
|
||||
width: 300px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,6 +162,7 @@
|
||||
.ant-typography:first-child {
|
||||
font-family: 'Geist Mono';
|
||||
color: var(--bg-robin-400);
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
.all-attributes-contribution {
|
||||
@@ -237,6 +239,7 @@
|
||||
}
|
||||
|
||||
.metric-metadata-value {
|
||||
height: 67px;
|
||||
background: rgba(22, 25, 34, 0.4);
|
||||
overflow-x: scroll;
|
||||
.field-renderer-container {
|
||||
@@ -266,6 +269,33 @@
|
||||
border-top-width: 0.5px !important;
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-metadata-accordion {
|
||||
.ant-collapse-item {
|
||||
.ant-collapse-content {
|
||||
height: 268px;
|
||||
|
||||
.ant-collapse-content-box {
|
||||
.metrics-metadata-skeleton-container {
|
||||
margin: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-all-attributes-accordion {
|
||||
.ant-collapse-item {
|
||||
.ant-collapse-content {
|
||||
height: 600px;
|
||||
.ant-collapse-content-box {
|
||||
.all-attributes-skeleton-container {
|
||||
margin: 12px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select {
|
||||
@@ -330,18 +360,26 @@
|
||||
.metric-details-content {
|
||||
.metrics-accordion {
|
||||
.metrics-accordion-header {
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
.action-menu {
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-accordion-content {
|
||||
.metric-metadata-key {
|
||||
.field-renderer-container {
|
||||
.label {
|
||||
color: var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
.all-attributes-key {
|
||||
.ant-typography:last-child {
|
||||
color: var(--bg-slate-400);
|
||||
color: var(--bg-vanilla-200);
|
||||
background-color: var(--bg-robin-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,17 +2,9 @@ import './MetricDetails.styles.scss';
|
||||
import '../Summary/Summary.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import {
|
||||
Button,
|
||||
Divider,
|
||||
Drawer,
|
||||
Empty,
|
||||
Skeleton,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Button, Divider, Drawer, Empty, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetMetricDetails } from 'hooks/metricsExplorer/useGetMetricDetails';
|
||||
import { useGetMetricMetadata } from 'hooks/metricsExplorer/v2/useGetMetricMetadata';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Compass, Crosshair, X } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
@@ -22,16 +14,12 @@ import ROUTES from '../../../constants/routes';
|
||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { isInspectEnabled } from '../Inspect/utils';
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import AllAttributes from './AllAttributes';
|
||||
import DashboardsAndAlertsPopover from './DashboardsAndAlertsPopover';
|
||||
import Highlights from './Highlights';
|
||||
import Metadata from './Metadata';
|
||||
import { MetricDetailsProps } from './types';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
getMetricDetailsQuery,
|
||||
} from './utils';
|
||||
import { getMetricDetailsQuery, transformMetricMetadata } from './utils';
|
||||
|
||||
function MetricDetails({
|
||||
onClose,
|
||||
@@ -43,50 +31,25 @@ function MetricDetails({
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
const {
|
||||
data,
|
||||
isLoading,
|
||||
isFetching,
|
||||
error: metricDetailsError,
|
||||
refetch: refetchMetricDetails,
|
||||
} = useGetMetricDetails(metricName ?? '', {
|
||||
data: metricMetadataResponse,
|
||||
isLoading: isLoadingMetricMetadata,
|
||||
isError: isErrorMetricMetadata,
|
||||
} = useGetMetricMetadata(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
|
||||
const metric = data?.payload?.data;
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metric) return null;
|
||||
return formatTimestampToReadableDate(metric.lastReceived);
|
||||
}, [metric]);
|
||||
const metadata = transformMetricMetadata(metricMetadataResponse);
|
||||
|
||||
const showInspectFeature = useMemo(
|
||||
() => isInspectEnabled(metric?.metadata?.metric_type),
|
||||
[metric],
|
||||
() => isInspectEnabled(metadata?.metricType),
|
||||
[metadata],
|
||||
);
|
||||
|
||||
const isMetricDetailsLoading = isLoading || isFetching;
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metric) return null;
|
||||
const timeSeriesActive = formatNumberToCompactFormat(metric.timeSeriesActive);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(metric.timeSeriesTotal);
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
);
|
||||
}, [metric]);
|
||||
|
||||
const goToMetricsExplorerwithSelectedMetric = useCallback(() => {
|
||||
if (metricName) {
|
||||
const compositeQuery = getMetricDetailsQuery(
|
||||
metricName,
|
||||
metric?.metadata?.metric_type,
|
||||
metadata?.metricType,
|
||||
);
|
||||
handleExplorerTabChange(
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
@@ -103,9 +66,7 @@ function MetricDetails({
|
||||
[MetricsExplorerEventKeys.Modal]: 'metric-details',
|
||||
});
|
||||
}
|
||||
}, [metricName, handleExplorerTabChange, metric?.metadata?.metric_type]);
|
||||
|
||||
const isMetricDetailsError = metricDetailsError || !metric;
|
||||
}, [metricName, handleExplorerTabChange, metadata?.metricType]);
|
||||
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.ModalOpened, {
|
||||
@@ -113,6 +74,10 @@ function MetricDetails({
|
||||
});
|
||||
}, []);
|
||||
|
||||
if (!metricName) {
|
||||
return <Empty description="Metric not found" />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
@@ -120,7 +85,7 @@ function MetricDetails({
|
||||
<div className="metric-details-header">
|
||||
<div className="metric-details-title">
|
||||
<Divider type="vertical" />
|
||||
<Typography.Text>{metric?.name}</Typography.Text>
|
||||
<Typography.Text>{metricName}</Typography.Text>
|
||||
</div>
|
||||
<div className="metric-details-header-buttons">
|
||||
<Button
|
||||
@@ -138,8 +103,8 @@ function MetricDetails({
|
||||
aria-label="Inspect Metric"
|
||||
icon={<Crosshair size={18} />}
|
||||
onClick={(): void => {
|
||||
if (metric?.name) {
|
||||
openInspectModal(metric.name);
|
||||
if (metricName) {
|
||||
openInspectModal(metricName);
|
||||
}
|
||||
}}
|
||||
data-testid="inspect-metric-button"
|
||||
@@ -159,60 +124,17 @@ function MetricDetails({
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} />}
|
||||
>
|
||||
{isMetricDetailsLoading && (
|
||||
<div data-testid="metric-details-skeleton">
|
||||
<Skeleton active />
|
||||
</div>
|
||||
)}
|
||||
{isMetricDetailsError && !isMetricDetailsLoading && (
|
||||
<Empty description="Error fetching metric details" />
|
||||
)}
|
||||
{!isMetricDetailsLoading && !isMetricDetailsError && (
|
||||
<div className="metric-details-content">
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metric?.samples.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric?.samples)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={timeSeries}>{timeSeries}</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={lastReceived}>{lastReceived}</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
<DashboardsAndAlertsPopover
|
||||
dashboards={metric.dashboards}
|
||||
alerts={metric.alerts}
|
||||
/>
|
||||
<Metadata
|
||||
metricName={metric?.name}
|
||||
metadata={metric.metadata}
|
||||
refetchMetricDetails={refetchMetricDetails}
|
||||
/>
|
||||
{metric.attributes && (
|
||||
<AllAttributes
|
||||
metricName={metric?.name}
|
||||
attributes={metric.attributes}
|
||||
metricType={metric?.metadata?.metric_type}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className="metric-details-content">
|
||||
<Highlights metricName={metricName} />
|
||||
<DashboardsAndAlertsPopover metricName={metricName} />
|
||||
<Metadata
|
||||
metricName={metricName}
|
||||
metadata={metadata}
|
||||
isErrorMetricMetadata={isErrorMetricMetadata}
|
||||
isLoadingMetricMetadata={isLoadingMetricMetadata}
|
||||
/>
|
||||
<AllAttributes metricName={metricName} metricType={metadata?.metricType} />
|
||||
</div>
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export const METRIC_METADATA_KEYS = {
|
||||
description: 'Description',
|
||||
unit: 'Unit',
|
||||
metric_type: 'Metric Type',
|
||||
metricType: 'Metric Type',
|
||||
temporality: 'Temporality',
|
||||
};
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
import {
|
||||
MetricDetails,
|
||||
MetricDetailsAlert,
|
||||
MetricDetailsAttribute,
|
||||
MetricDetailsDashboard,
|
||||
} from 'api/metricsExplorer/getMetricDetails';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
export interface MetricDetailsProps {
|
||||
@@ -14,19 +9,21 @@ export interface MetricDetailsProps {
|
||||
openInspectModal: (metricName: string) => void;
|
||||
}
|
||||
|
||||
export interface HighlightsProps {
|
||||
metricName: string;
|
||||
}
|
||||
export interface DashboardsAndAlertsPopoverProps {
|
||||
dashboards: MetricDetailsDashboard[] | null;
|
||||
alerts: MetricDetailsAlert[] | null;
|
||||
metricName: string;
|
||||
}
|
||||
|
||||
export interface MetadataProps {
|
||||
metricName: string;
|
||||
metadata: MetricDetails['metadata'] | undefined;
|
||||
refetchMetricDetails: () => void;
|
||||
metadata: MetricMetadata | null;
|
||||
isErrorMetricMetadata: boolean;
|
||||
isLoadingMetricMetadata: boolean;
|
||||
}
|
||||
|
||||
export interface AllAttributesProps {
|
||||
attributes: MetricDetailsAttribute[];
|
||||
metricName: string;
|
||||
metricType: MetricType | undefined;
|
||||
}
|
||||
@@ -36,3 +33,51 @@ export interface AllAttributesValueProps {
|
||||
filterValue: string[];
|
||||
goToMetricsExploreWithAppliedAttribute: (key: string, value: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricHighlight {
|
||||
dataPoints: number;
|
||||
lastReceived: number;
|
||||
totalTimeSeries: number;
|
||||
activeTimeSeries: number;
|
||||
}
|
||||
|
||||
export interface MetricAlert {
|
||||
alertName: string;
|
||||
alertId: string;
|
||||
}
|
||||
|
||||
export interface MetricDashboard {
|
||||
dashboardName: string;
|
||||
dashboardId: string;
|
||||
widgetId: string;
|
||||
widgetName: string;
|
||||
}
|
||||
|
||||
export interface MetricMetadata {
|
||||
metricType: MetricType;
|
||||
description: string;
|
||||
unit: string;
|
||||
temporality: Temporality;
|
||||
isMonotonic: boolean;
|
||||
}
|
||||
|
||||
export interface MetricMetadataState {
|
||||
metricType: MetricType;
|
||||
description: string;
|
||||
temporality: Temporality | undefined;
|
||||
unit: string | undefined;
|
||||
}
|
||||
|
||||
export interface MetricAttribute {
|
||||
key: string;
|
||||
values: string[];
|
||||
valueCount: number;
|
||||
}
|
||||
|
||||
export enum TableFields {
|
||||
DESCRIPTION = 'description',
|
||||
UNIT = 'unit',
|
||||
METRIC_TYPE = 'metricType',
|
||||
Temporality = 'temporality',
|
||||
IS_MONOTONIC = 'isMonotonic',
|
||||
}
|
||||
|
||||
@@ -2,11 +2,29 @@ import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAlertsResponse,
|
||||
GetMetricAttributesResponse,
|
||||
GetMetricDashboardsResponse,
|
||||
GetMetricHighlightsResponse,
|
||||
GetMetricMetadataResponse,
|
||||
UpdateMetricMetadataRequest,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export function formatTimestampToReadableDate(timestamp: string): string {
|
||||
import {
|
||||
MetricAlert,
|
||||
MetricAttribute,
|
||||
MetricDashboard,
|
||||
MetricHighlight,
|
||||
MetricMetadata,
|
||||
MetricMetadataState,
|
||||
} from './types';
|
||||
|
||||
export function formatTimestampToReadableDate(timestamp: number): string {
|
||||
const date = new Date(timestamp);
|
||||
const now = new Date();
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000);
|
||||
@@ -154,3 +172,149 @@ export function getMetricDetailsQuery(
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricHighlights(
|
||||
apiData: SuccessResponseV2<GetMetricHighlightsResponse> | undefined,
|
||||
): MetricHighlight | null {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {
|
||||
dataPoints,
|
||||
lastReceived,
|
||||
totalTimeSeries,
|
||||
activeTimeSeries,
|
||||
} = apiData.data.data;
|
||||
|
||||
return {
|
||||
dataPoints,
|
||||
lastReceived,
|
||||
totalTimeSeries,
|
||||
activeTimeSeries,
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricAlerts(
|
||||
apiData: SuccessResponseV2<GetMetricAlertsResponse> | undefined,
|
||||
): MetricAlert[] {
|
||||
if (
|
||||
!apiData ||
|
||||
!apiData.data ||
|
||||
!apiData.data.data ||
|
||||
!apiData.data.data.alerts
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
return apiData.data.data.alerts.map((alert) => ({
|
||||
alertName: alert.alertName,
|
||||
alertId: alert.alertId,
|
||||
}));
|
||||
}
|
||||
|
||||
export function transformMetricDashboards(
|
||||
apiData: SuccessResponseV2<GetMetricDashboardsResponse> | undefined,
|
||||
): MetricDashboard[] {
|
||||
if (
|
||||
!apiData ||
|
||||
!apiData.data ||
|
||||
!apiData.data.data ||
|
||||
!apiData.data.data.dashboards
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
const dashboards = apiData.data.data.dashboards.map((dashboard) => ({
|
||||
dashboardName: dashboard.dashboardName,
|
||||
dashboardId: dashboard.dashboardId,
|
||||
widgetId: dashboard.widgetId,
|
||||
widgetName: dashboard.widgetName,
|
||||
}));
|
||||
// Remove duplicate dashboards
|
||||
return dashboards.filter(
|
||||
(dashboard, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboardId === dashboard.dashboardId),
|
||||
);
|
||||
}
|
||||
|
||||
export function transformTemporality(temporality: string): Temporality {
|
||||
switch (temporality) {
|
||||
case 'delta':
|
||||
return Temporality.DELTA;
|
||||
case 'cumulative':
|
||||
return Temporality.CUMULATIVE;
|
||||
default:
|
||||
return Temporality.DELTA;
|
||||
}
|
||||
}
|
||||
|
||||
export function transformMetricType(type: string): MetricType {
|
||||
switch (type) {
|
||||
case 'sum':
|
||||
return MetricType.SUM;
|
||||
case 'gauge':
|
||||
return MetricType.GAUGE;
|
||||
case 'summary':
|
||||
return MetricType.SUMMARY;
|
||||
case 'histogram':
|
||||
return MetricType.HISTOGRAM;
|
||||
case 'exponential_histogram':
|
||||
return MetricType.EXPONENTIAL_HISTOGRAM;
|
||||
default:
|
||||
return MetricType.SUM;
|
||||
}
|
||||
}
|
||||
|
||||
export function transformMetricMetadata(
|
||||
apiData: SuccessResponseV2<GetMetricMetadataResponse> | undefined,
|
||||
): MetricMetadata | null {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return null;
|
||||
}
|
||||
const {
|
||||
type,
|
||||
description,
|
||||
unit,
|
||||
temporality,
|
||||
isMonotonic,
|
||||
} = apiData.data.data;
|
||||
|
||||
return {
|
||||
metricType: transformMetricType(type),
|
||||
description,
|
||||
unit,
|
||||
temporality: transformTemporality(temporality),
|
||||
isMonotonic,
|
||||
};
|
||||
}
|
||||
|
||||
export function transformUpdateMetricMetadataRequest(
|
||||
metricMetadata: MetricMetadataState,
|
||||
): UpdateMetricMetadataRequest {
|
||||
return {
|
||||
type: metricMetadata.metricType,
|
||||
description: metricMetadata.description,
|
||||
unit: metricMetadata.unit || '',
|
||||
temporality: metricMetadata.temporality || '',
|
||||
isMonotonic: determineIsMonotonic(
|
||||
metricMetadata.metricType,
|
||||
metricMetadata.temporality,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export function transformMetricAttributes(
|
||||
apiData: SuccessResponseV2<GetMetricAttributesResponse> | undefined,
|
||||
): { attributes: MetricAttribute[]; totalKeys: number } {
|
||||
if (!apiData || !apiData.data || !apiData.data.data) {
|
||||
return { attributes: [], totalKeys: 0 };
|
||||
}
|
||||
const { attributes, totalKeys } = apiData.data.data;
|
||||
return {
|
||||
attributes: attributes.map((attribute) => ({
|
||||
key: attribute.key,
|
||||
values: attribute.values,
|
||||
valueCount: attribute.valueCount,
|
||||
})),
|
||||
totalKeys,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -363,7 +363,6 @@ export const WidgetHeaderProps: any = {
|
||||
title: 'Table - Panel',
|
||||
yAxisUnit: 'none',
|
||||
},
|
||||
parentHover: false,
|
||||
queryResponse: {
|
||||
status: 'success',
|
||||
isLoading: false,
|
||||
|
||||
@@ -679,7 +679,42 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
registerShortcut(GlobalShortcuts.NavigateToExceptions, () =>
|
||||
onClickHandler(ROUTES.ALL_ERROR, null),
|
||||
);
|
||||
|
||||
registerShortcut(GlobalShortcuts.NavigateToTracesFunnel, () =>
|
||||
onClickHandler(ROUTES.TRACES_FUNNELS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToTracesViews, () =>
|
||||
onClickHandler(ROUTES.TRACES_SAVE_VIEWS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsSummary, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsExplorer, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER_EXPLORER, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsViews, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER_VIEWS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettings, () =>
|
||||
onClickHandler(ROUTES.SETTINGS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsIngestion, () =>
|
||||
onClickHandler(ROUTES.INGESTION_SETTINGS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsBilling, () =>
|
||||
onClickHandler(ROUTES.BILLING, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsAPIKeys, () =>
|
||||
onClickHandler(ROUTES.API_KEYS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsNotificationChannels, () =>
|
||||
onClickHandler(ROUTES.ALL_CHANNELS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToLogsPipelines, () =>
|
||||
onClickHandler(ROUTES.LOGS_PIPELINES, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToLogsViews, () =>
|
||||
onClickHandler(ROUTES.LOGS_SAVE_VIEWS, null),
|
||||
);
|
||||
return (): void => {
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToHome);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToServices);
|
||||
@@ -689,6 +724,18 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToAlerts);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToExceptions);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMessagingQueues);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToTracesFunnel);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsSummary);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsExplorer);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsViews);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettings);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsIngestion);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsBilling);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsAPIKeys);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsNotificationChannels);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToLogsPipelines);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToLogsViews);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToTracesViews);
|
||||
};
|
||||
}, [deregisterShortcut, onClickHandler, registerShortcut]);
|
||||
|
||||
|
||||
@@ -5,16 +5,20 @@
|
||||
&-virtuoso {
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
&-list-container .logs-loading-skeleton {
|
||||
&-list-container {
|
||||
height: 100%;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-top: none;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 8px 0;
|
||||
|
||||
.logs-loading-skeleton {
|
||||
height: 100%;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-top: none;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 8px 0;
|
||||
}
|
||||
}
|
||||
|
||||
&-empty-content {
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { useEffect } from 'react';
|
||||
|
||||
import {
|
||||
KeyboardHotkeysProvider,
|
||||
useKeyboardHotkeys,
|
||||
} from '../useKeyboardHotkeys';
|
||||
|
||||
jest.mock('../../../providers/cmdKProvider', () => ({
|
||||
useCmdK: (): { open: boolean } => ({
|
||||
open: false,
|
||||
}),
|
||||
}));
|
||||
|
||||
function TestComponentWithRegister({
|
||||
handleShortcut,
|
||||
}: {
|
||||
@@ -13,14 +20,13 @@ function TestComponentWithRegister({
|
||||
}): JSX.Element {
|
||||
const { registerShortcut } = useKeyboardHotkeys();
|
||||
|
||||
registerShortcut('a', handleShortcut);
|
||||
useEffect(() => {
|
||||
registerShortcut('a', handleShortcut);
|
||||
}, [registerShortcut, handleShortcut]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span>Test Component</span>
|
||||
</div>
|
||||
);
|
||||
return <span>Test Component</span>;
|
||||
}
|
||||
|
||||
function TestComponentWithDeRegister({
|
||||
handleShortcut,
|
||||
}: {
|
||||
@@ -28,21 +34,18 @@ function TestComponentWithDeRegister({
|
||||
}): JSX.Element {
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
registerShortcut('b', handleShortcut);
|
||||
useEffect(() => {
|
||||
registerShortcut('b', handleShortcut);
|
||||
deregisterShortcut('b');
|
||||
}, [registerShortcut, deregisterShortcut, handleShortcut]);
|
||||
|
||||
// Deregister the shortcut before triggering it
|
||||
deregisterShortcut('b');
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span>Test Component</span>
|
||||
</div>
|
||||
);
|
||||
return <span>Test Component</span>;
|
||||
}
|
||||
|
||||
describe('KeyboardHotkeysProvider', () => {
|
||||
it('registers and triggers shortcuts correctly', async () => {
|
||||
const handleShortcut = jest.fn();
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<KeyboardHotkeysProvider>
|
||||
@@ -50,15 +53,15 @@ describe('KeyboardHotkeysProvider', () => {
|
||||
</KeyboardHotkeysProvider>,
|
||||
);
|
||||
|
||||
// Trigger the registered shortcut
|
||||
await userEvent.keyboard('a');
|
||||
// fires on keyup
|
||||
await user.keyboard('{a}');
|
||||
|
||||
// Assert that the handleShortcut function has been called
|
||||
expect(handleShortcut).toHaveBeenCalled();
|
||||
expect(handleShortcut).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('deregisters shortcuts correctly', () => {
|
||||
it('does not trigger deregistered shortcuts', async () => {
|
||||
const handleShortcut = jest.fn();
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<KeyboardHotkeysProvider>
|
||||
@@ -66,10 +69,8 @@ describe('KeyboardHotkeysProvider', () => {
|
||||
</KeyboardHotkeysProvider>,
|
||||
);
|
||||
|
||||
// Try to trigger the deregistered shortcut
|
||||
userEvent.keyboard('b');
|
||||
await user.keyboard('{b}');
|
||||
|
||||
// Assert that the handleShortcut function has NOT been called
|
||||
expect(handleShortcut).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,20 +8,21 @@ import {
|
||||
useRef,
|
||||
} from 'react';
|
||||
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
|
||||
interface KeyboardHotkeysContextReturnValue {
|
||||
/**
|
||||
* @param keyCombination provide the string for which the subsequent callback should be triggered. Example 'ctrl+a'
|
||||
* @param keyCombo provide the string for which the subsequent callback should be triggered. Example 'ctrl+a'
|
||||
* @param callback the callback that should be triggered when the above key combination is being pressed
|
||||
* @returns void
|
||||
*/
|
||||
registerShortcut: (keyCombination: string, callback: () => void) => void;
|
||||
|
||||
registerShortcut: (keyCombo: string, callback: () => void) => void;
|
||||
/**
|
||||
*
|
||||
* @param keyCombination provide the string for which we want to deregister the callback
|
||||
* @param keyCombo provide the string for which we want to deregister the callback
|
||||
* @returns void
|
||||
*/
|
||||
deregisterShortcut: (keyCombination: string) => void;
|
||||
deregisterShortcut: (keyCombo: string) => void;
|
||||
}
|
||||
|
||||
const KeyboardHotkeysContext = createContext<KeyboardHotkeysContextReturnValue>(
|
||||
@@ -33,7 +34,7 @@ const KeyboardHotkeysContext = createContext<KeyboardHotkeysContextReturnValue>(
|
||||
|
||||
const IGNORE_INPUTS = ['input', 'textarea', 'cm-editor']; // Inputs in which hotkey events will be ignored
|
||||
|
||||
const useKeyboardHotkeys = (): KeyboardHotkeysContextReturnValue => {
|
||||
export function useKeyboardHotkeys(): KeyboardHotkeysContextReturnValue {
|
||||
const context = useContext(KeyboardHotkeysContext);
|
||||
if (!context) {
|
||||
throw new Error(
|
||||
@@ -42,21 +43,45 @@ const useKeyboardHotkeys = (): KeyboardHotkeysContextReturnValue => {
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
||||
}
|
||||
|
||||
function KeyboardHotkeysProvider({
|
||||
/**
|
||||
* Normalize a set of keys into a stable combo
|
||||
* { shift, m, e } → "e+m+shift"
|
||||
*/
|
||||
function normalizeChord(keys: Set<string>): string {
|
||||
return Array.from(keys).sort().join('+');
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize registration strings
|
||||
* "shift+m+e" → "e+m+shift"
|
||||
*/
|
||||
function normalizeComboString(combo: string): string {
|
||||
return normalizeChord(new Set(combo.split('+')));
|
||||
}
|
||||
|
||||
export function KeyboardHotkeysProvider({
|
||||
children,
|
||||
}: {
|
||||
children: JSX.Element;
|
||||
}): JSX.Element {
|
||||
const { open: cmdKOpen } = useCmdK();
|
||||
const shortcuts = useRef<Record<string, () => void>>({});
|
||||
const pressedKeys = useRef<Set<string>>(new Set());
|
||||
|
||||
const handleKeyPress = (event: KeyboardEvent): void => {
|
||||
const { key, ctrlKey, altKey, shiftKey, metaKey, target } = event;
|
||||
// A detected valid shortcut waiting to fire
|
||||
const pendingCombo = useRef<string | null>(null);
|
||||
|
||||
// Tracks whether user extended the combo
|
||||
const wasExtended = useRef(false);
|
||||
|
||||
const handleKeyDown = (event: KeyboardEvent): void => {
|
||||
if (event.repeat) return;
|
||||
|
||||
const target = event.target as HTMLElement;
|
||||
const isCodeMirrorEditor =
|
||||
(target as HTMLElement).closest('.cm-editor') !== null;
|
||||
|
||||
if (
|
||||
IGNORE_INPUTS.includes((target as HTMLElement).tagName.toLowerCase()) ||
|
||||
isCodeMirrorEditor
|
||||
@@ -64,61 +89,110 @@ function KeyboardHotkeysProvider({
|
||||
return;
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/metaKey
|
||||
const modifiers = { ctrlKey, altKey, shiftKey, metaKey };
|
||||
const key = event.key?.toLowerCase();
|
||||
if (!key) return; // Skip if key is undefined
|
||||
|
||||
let shortcutKey = `${key.toLowerCase()}`;
|
||||
// If a pending combo exists and a new key is pressed → extension
|
||||
if (pendingCombo.current && !pressedKeys.current.has(key)) {
|
||||
wasExtended.current = true;
|
||||
}
|
||||
|
||||
const isAltKey = `${modifiers.altKey ? '+alt' : ''}`;
|
||||
const isShiftKey = `${modifiers.shiftKey ? '+shift' : ''}`;
|
||||
pressedKeys.current.add(key);
|
||||
|
||||
// ctrl and cmd have the same functionality for mac and windows parity
|
||||
const isMetaKey = `${modifiers.metaKey || modifiers.ctrlKey ? '+meta' : ''}`;
|
||||
if (event.shiftKey) pressedKeys.current.add('shift');
|
||||
if (event.metaKey || event.ctrlKey) pressedKeys.current.add('meta');
|
||||
if (event.altKey) pressedKeys.current.add('alt');
|
||||
|
||||
shortcutKey = shortcutKey + isAltKey + isShiftKey + isMetaKey;
|
||||
const combo = normalizeChord(pressedKeys.current);
|
||||
|
||||
if (shortcuts.current[shortcutKey]) {
|
||||
if (shortcuts.current[combo]) {
|
||||
event.preventDefault();
|
||||
event.stopImmediatePropagation();
|
||||
|
||||
shortcuts.current[shortcutKey]();
|
||||
event.stopPropagation();
|
||||
pendingCombo.current = combo;
|
||||
wasExtended.current = false;
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
document.addEventListener('keydown', handleKeyPress);
|
||||
const handleKeyUp = (event: KeyboardEvent): void => {
|
||||
const key = event.key?.toLowerCase();
|
||||
if (!key) return; // Skip if key is undefined
|
||||
|
||||
pressedKeys.current.delete(key);
|
||||
|
||||
if (!event.shiftKey) pressedKeys.current.delete('shift');
|
||||
if (!event.metaKey && !event.ctrlKey) pressedKeys.current.delete('meta');
|
||||
if (!event.altKey) pressedKeys.current.delete('alt');
|
||||
|
||||
if (!pendingCombo.current) return;
|
||||
|
||||
// Fire only if user did NOT extend the combo
|
||||
if (!wasExtended.current) {
|
||||
event.preventDefault();
|
||||
try {
|
||||
shortcuts.current[pendingCombo.current]?.();
|
||||
} catch (error) {
|
||||
console.error('Error executing hotkey callback:', error);
|
||||
}
|
||||
}
|
||||
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
};
|
||||
|
||||
useEffect((): (() => void) => {
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
document.addEventListener('keyup', handleKeyUp);
|
||||
|
||||
const reset = (): void => {
|
||||
pressedKeys.current.clear();
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
};
|
||||
|
||||
window.addEventListener('blur', reset);
|
||||
|
||||
return (): void => {
|
||||
document.removeEventListener('keydown', handleKeyPress);
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
document.removeEventListener('keyup', handleKeyUp);
|
||||
window.removeEventListener('blur', reset);
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!cmdKOpen) {
|
||||
// Reset when palette closes
|
||||
pressedKeys.current.clear();
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
}
|
||||
}, [cmdKOpen]);
|
||||
|
||||
const registerShortcut = useCallback(
|
||||
(keyCombination: string, callback: () => void): void => {
|
||||
if (!shortcuts.current[keyCombination]) {
|
||||
shortcuts.current[keyCombination] = callback;
|
||||
} else if (process.env.NODE_ENV === 'development') {
|
||||
throw new Error(
|
||||
`This shortcut is already present in current scope :- ${keyCombination}`,
|
||||
);
|
||||
(keyCombo: string, callback: () => void): void => {
|
||||
const normalized = normalizeComboString(keyCombo);
|
||||
|
||||
if (!shortcuts.current[normalized]) {
|
||||
shortcuts.current[normalized] = callback;
|
||||
return;
|
||||
}
|
||||
|
||||
const message = `This shortcut is already present in current scope :- ${keyCombo}`;
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
throw new Error(message);
|
||||
} else {
|
||||
console.error(
|
||||
`This shortcut is already present in current scope :- ${keyCombination}`,
|
||||
);
|
||||
console.error(message);
|
||||
}
|
||||
},
|
||||
[shortcuts],
|
||||
[],
|
||||
);
|
||||
|
||||
const deregisterShortcut = useCallback(
|
||||
(keyCombination: string): void => {
|
||||
if (shortcuts.current[keyCombination]) {
|
||||
unset(shortcuts.current, keyCombination);
|
||||
}
|
||||
},
|
||||
[shortcuts],
|
||||
);
|
||||
const deregisterShortcut = useCallback((keyCombo: string) => {
|
||||
const normalized = normalizeComboString(keyCombo);
|
||||
unset(shortcuts.current, normalized);
|
||||
}, []);
|
||||
|
||||
const contextValue = useMemo(
|
||||
const ctxValue = useMemo(
|
||||
() => ({
|
||||
registerShortcut,
|
||||
deregisterShortcut,
|
||||
@@ -127,10 +201,8 @@ function KeyboardHotkeysProvider({
|
||||
);
|
||||
|
||||
return (
|
||||
<KeyboardHotkeysContext.Provider value={contextValue}>
|
||||
<KeyboardHotkeysContext.Provider value={ctxValue}>
|
||||
{children}
|
||||
</KeyboardHotkeysContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export { KeyboardHotkeysProvider, useKeyboardHotkeys };
|
||||
|
||||
22
frontend/src/hooks/metricsExplorer/v2/useGetMetricAlerts.ts
Normal file
22
frontend/src/hooks/metricsExplorer/v2/useGetMetricAlerts.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { getMetricAlerts } from 'api/metricsExplorer/v2/getMetricAlerts';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricAlertsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricAlerts = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<SuccessResponseV2<GetMetricAlertsResponse>, Error>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricAlertsResponse>, Error>;
|
||||
|
||||
export const useGetMetricAlerts: UseGetMetricAlerts = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricAlertsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricAlerts(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_ALERTS, metricName],
|
||||
});
|
||||
@@ -0,0 +1,36 @@
|
||||
import { getMetricAttributes } from 'api/metricsExplorer/v2/getMetricAttributes';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
GetMetricAttributesRequest,
|
||||
GetMetricAttributesResponse,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricAttributes = (
|
||||
requestData: GetMetricAttributesRequest,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricAttributesResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricAttributesResponse>, Error>;
|
||||
|
||||
export const useGetMetricAttributes: UseGetMetricAttributes = (
|
||||
requestData,
|
||||
options,
|
||||
headers,
|
||||
) => {
|
||||
const queryKey = [
|
||||
REACT_QUERY_KEY.GET_METRIC_ATTRIBUTES,
|
||||
requestData.metricName,
|
||||
requestData.start,
|
||||
requestData.end,
|
||||
];
|
||||
|
||||
return useQuery<SuccessResponseV2<GetMetricAttributesResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricAttributes(requestData, signal, headers),
|
||||
...options,
|
||||
queryKey,
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,25 @@
|
||||
import { getMetricDashboards } from 'api/metricsExplorer/v2/getMetricDashboards';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricDashboardsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricDashboards = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricDashboardsResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricDashboardsResponse>, Error>;
|
||||
|
||||
export const useGetMetricDashboards: UseGetMetricDashboards = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricDashboardsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricDashboards(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_DASHBOARDS, metricName],
|
||||
});
|
||||
@@ -0,0 +1,25 @@
|
||||
import { getMetricHighlights } from 'api/metricsExplorer/v2/getMetricHighlights';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricHighlightsResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricHighlights = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponseV2<GetMetricHighlightsResponse>,
|
||||
Error
|
||||
>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricHighlightsResponse>, Error>;
|
||||
|
||||
export const useGetMetricHighlights: UseGetMetricHighlights = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricHighlightsResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricHighlights(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_HIGHLIGHTS, metricName],
|
||||
});
|
||||
@@ -0,0 +1,22 @@
|
||||
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { GetMetricMetadataResponse } from 'types/api/metricsExplorer/v2';
|
||||
|
||||
type UseGetMetricMetadata = (
|
||||
metricName: string,
|
||||
options?: UseQueryOptions<SuccessResponseV2<GetMetricMetadataResponse>, Error>,
|
||||
headers?: Record<string, string>,
|
||||
) => UseQueryResult<SuccessResponseV2<GetMetricMetadataResponse>, Error>;
|
||||
|
||||
export const useGetMetricMetadata: UseGetMetricMetadata = (
|
||||
metricName,
|
||||
options,
|
||||
headers,
|
||||
) =>
|
||||
useQuery<SuccessResponseV2<GetMetricMetadataResponse>, Error>({
|
||||
queryFn: ({ signal }) => getMetricMetadata(metricName, signal, headers),
|
||||
...options,
|
||||
queryKey: [REACT_QUERY_KEY.GET_METRIC_METADATA, metricName],
|
||||
});
|
||||
@@ -0,0 +1,22 @@
|
||||
import updateMetricMetadata from 'api/metricsExplorer/v2/updateMetricMetadata';
|
||||
import { useMutation, UseMutationResult } from 'react-query';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
UpdateMetricMetadataResponse,
|
||||
UseUpdateMetricMetadataProps,
|
||||
} from 'types/api/metricsExplorer/v2';
|
||||
|
||||
export function useUpdateMetricMetadata(): UseMutationResult<
|
||||
SuccessResponseV2<UpdateMetricMetadataResponse>,
|
||||
Error,
|
||||
UseUpdateMetricMetadataProps
|
||||
> {
|
||||
return useMutation<
|
||||
SuccessResponseV2<UpdateMetricMetadataResponse>,
|
||||
Error,
|
||||
UseUpdateMetricMetadataProps
|
||||
>({
|
||||
mutationFn: ({ metricName, payload }) =>
|
||||
updateMetricMetadata(metricName, payload),
|
||||
});
|
||||
}
|
||||
77
frontend/src/types/api/metricsExplorer/v2.ts
Normal file
77
frontend/src/types/api/metricsExplorer/v2.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
export interface GetMetricMetadataResponse {
|
||||
status: string;
|
||||
data: {
|
||||
description: string;
|
||||
type: string;
|
||||
unit: string;
|
||||
temporality: string;
|
||||
isMonotonic: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricHighlightsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
dataPoints: number;
|
||||
lastReceived: number;
|
||||
totalTimeSeries: number;
|
||||
activeTimeSeries: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricAttributesRequest {
|
||||
metricName: string;
|
||||
start?: number;
|
||||
end?: number;
|
||||
}
|
||||
|
||||
export interface GetMetricAttributesResponse {
|
||||
status: string;
|
||||
data: {
|
||||
attributes: {
|
||||
key: string;
|
||||
values: string[];
|
||||
valueCount: number;
|
||||
}[];
|
||||
totalKeys: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricAlertsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
alerts: {
|
||||
alertName: string;
|
||||
alertId: string;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface GetMetricDashboardsResponse {
|
||||
status: string;
|
||||
data: {
|
||||
dashboards: {
|
||||
dashboardName: string;
|
||||
dashboardId: string;
|
||||
widgetId: string;
|
||||
widgetName: string;
|
||||
}[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataRequest {
|
||||
type: string;
|
||||
description: string;
|
||||
temporality: string;
|
||||
unit: string;
|
||||
isMonotonic: boolean;
|
||||
}
|
||||
|
||||
export interface UpdateMetricMetadataResponse {
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface UseUpdateMetricMetadataProps {
|
||||
metricName: string;
|
||||
payload: UpdateMetricMetadataRequest;
|
||||
}
|
||||
10
frontend/src/utils/pluralize.ts
Normal file
10
frontend/src/utils/pluralize.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export function pluralize(
|
||||
count: number,
|
||||
singular: string,
|
||||
plural: string,
|
||||
): string {
|
||||
if (count === 1) {
|
||||
return `${count} ${singular}`;
|
||||
}
|
||||
return `${count} ${plural}`;
|
||||
}
|
||||
43
pkg/apiserver/signozapiserver/promote.go
Normal file
43
pkg/apiserver/signozapiserver/promote.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.EditAccess(provider.promoteHandler.HandlePromoteAndIndexPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: new([]*promotetypes.PromotePath),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.ViewAccess(provider.promoteHandler.ListPromotedAndIndexedPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new([]*promotetypes.PromotePath),
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -30,6 +31,7 @@ type provider struct {
|
||||
authDomainHandler authdomain.Handler
|
||||
preferenceHandler preference.Handler
|
||||
globalHandler global.Handler
|
||||
promoteHandler promote.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -41,9 +43,10 @@ func NewFactory(
|
||||
authDomainHandler authdomain.Handler,
|
||||
preferenceHandler preference.Handler,
|
||||
globalHandler global.Handler,
|
||||
promoteHandler promote.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler)
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -59,6 +62,7 @@ func newProvider(
|
||||
authDomainHandler authdomain.Handler,
|
||||
preferenceHandler preference.Handler,
|
||||
globalHandler global.Handler,
|
||||
promoteHandler promote.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -73,6 +77,7 @@ func newProvider(
|
||||
authDomainHandler: authDomainHandler,
|
||||
preferenceHandler: preferenceHandler,
|
||||
globalHandler: globalHandler,
|
||||
promoteHandler: promoteHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -113,6 +118,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addPromoteRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -209,6 +209,11 @@ func NewUnexpectedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeInvalidInput, code, format, args...)
|
||||
}
|
||||
|
||||
// NewMethodNotAllowedf is a wrapper around Newf with TypeMethodNotAllowed.
|
||||
func NewMethodNotAllowedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeMethodNotAllowed, code, format, args...)
|
||||
}
|
||||
|
||||
// WrapTimeoutf is a wrapper around Wrapf with TypeTimeout.
|
||||
func WrapTimeoutf(cause error, code Code, format string, args ...any) *base {
|
||||
return Wrapf(cause, TypeTimeout, code, format, args...)
|
||||
|
||||
60
pkg/modules/promote/implpromote/handler.go
Normal file
60
pkg/modules/promote/implpromote/handler.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module promote.Module
|
||||
}
|
||||
|
||||
func NewHandler(module promote.Module) promote.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (h *handler) HandlePromoteAndIndexPaths(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(Nitya): Use in multi tenant setup
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
var req []*promotetypes.PromotePath
|
||||
if err := binding.JSON.BindBody(r.Body, &req); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = h.module.PromoteAndIndexPaths(r.Context(), req...)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusCreated, nil)
|
||||
}
|
||||
|
||||
func (h *handler) ListPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(Nitya): Use in multi tenant setup
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
paths, err := h.module.ListPromotedAndIndexedPaths(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, paths)
|
||||
}
|
||||
201
pkg/modules/promote/implpromote/module.go
Normal file
201
pkg/modules/promote/implpromote/module.go
Normal file
@@ -0,0 +1,201 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeFailedToCreateIndex = errors.MustNewCode("failed_to_create_index_promoted_paths")
|
||||
CodeFailedToQueryPromotedPaths = errors.MustNewCode("failed_to_query_promoted_paths")
|
||||
)
|
||||
|
||||
type module struct {
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
}
|
||||
|
||||
func NewModule(metadataStore telemetrytypes.MetadataStore, telemetrystore telemetrystore.TelemetryStore) promote.Module {
|
||||
return &module{metadataStore: metadataStore, telemetryStore: telemetrystore}
|
||||
}
|
||||
|
||||
func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetypes.PromotePath, error) {
|
||||
logsIndexes, err := m.metadataStore.ListLogsJSONIndexes(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Flatten the map values (which are slices) into a single slice
|
||||
indexes := slices.Concat(slices.Collect(maps.Values(logsIndexes))...)
|
||||
|
||||
aggr := map[string][]promotetypes.WrappedIndex{}
|
||||
for _, index := range indexes {
|
||||
path, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// clean backticks from the path
|
||||
path = strings.ReplaceAll(path, "`", "")
|
||||
|
||||
aggr[path] = append(aggr[path], promotetypes.WrappedIndex{
|
||||
ColumnType: columnType,
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
promotedPaths, err := m.listPromotedPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := []promotetypes.PromotePath{}
|
||||
for _, path := range promotedPaths {
|
||||
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
item := promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Promote: true,
|
||||
}
|
||||
indexes, ok := aggr[fullPath]
|
||||
if ok {
|
||||
item.Indexes = indexes
|
||||
delete(aggr, fullPath)
|
||||
}
|
||||
response = append(response, item)
|
||||
}
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Indexes: indexes,
|
||||
})
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (m *module) listPromotedPaths(ctx context.Context) ([]string, error) {
|
||||
paths, err := m.metadataStore.ListPromotedPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return slices.Collect(maps.Keys(paths)), nil
|
||||
}
|
||||
|
||||
// PromotePaths inserts provided JSON paths into the promoted paths table for logs queries.
|
||||
func (m *module) PromotePaths(ctx context.Context, paths []string) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
return m.metadataStore.PromotePaths(ctx, paths...)
|
||||
}
|
||||
|
||||
// createIndexes creates string ngram + token filter indexes on JSON path subcolumns for LIKE queries.
|
||||
func (m *module) createIndexes(ctx context.Context, indexes []schemamigrator.Index) error {
|
||||
if len(indexes) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, index := range indexes {
|
||||
alterStmt := schemamigrator.AlterTableAddIndex{
|
||||
Database: telemetrylogs.DBName,
|
||||
Table: telemetrylogs.LogsV2LocalTableName,
|
||||
Index: index,
|
||||
}
|
||||
op := alterStmt.OnCluster(m.telemetryStore.Cluster())
|
||||
if err := m.telemetryStore.ClickhouseDB().Exec(ctx, op.ToSQL()); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToCreateIndex, "failed to create index")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// PromoteAndIndexPaths handles promoting paths and creating indexes in one call.
|
||||
func (m *module) PromoteAndIndexPaths(
|
||||
ctx context.Context,
|
||||
paths ...*promotetypes.PromotePath,
|
||||
) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
pathsStr := []string{}
|
||||
// validate the paths
|
||||
for _, path := range paths {
|
||||
if err := path.ValidateAndSetDefaults(); err != nil {
|
||||
return err
|
||||
}
|
||||
pathsStr = append(pathsStr, path.Path)
|
||||
}
|
||||
|
||||
existingPromotedPaths, err := m.metadataStore.ListPromotedPaths(ctx, pathsStr...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var toInsert []string
|
||||
indexes := []schemamigrator.Index{}
|
||||
for _, it := range paths {
|
||||
if it.Promote {
|
||||
if _, promoted := existingPromotedPaths[it.Path]; !promoted {
|
||||
toInsert = append(toInsert, it.Path)
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
}
|
||||
|
||||
for _, index := range it.Indexes {
|
||||
var typeIndex schemamigrator.IndexType
|
||||
switch {
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeNGramBF)):
|
||||
typeIndex = schemamigrator.IndexTypeNGramBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeTokenBF)):
|
||||
typeIndex = schemamigrator.IndexTypeTokenBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeMinMax)):
|
||||
typeIndex = schemamigrator.IndexTypeMinMax
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid index type: %s", index.Type)
|
||||
}
|
||||
indexes = append(indexes, schemamigrator.Index{
|
||||
Name: schemamigrator.JSONSubColumnIndexName(parentColumn, it.Path, index.JSONDataType.StringValue(), typeIndex),
|
||||
Expression: schemamigrator.JSONSubColumnIndexExpr(parentColumn, it.Path, index.JSONDataType.StringValue()),
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(toInsert) > 0 {
|
||||
err := m.PromotePaths(ctx, toInsert)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(indexes) > 0 {
|
||||
if err := m.createIndexes(ctx, indexes); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
18
pkg/modules/promote/promote.go
Normal file
18
pkg/modules/promote/promote.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package promote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetypes.PromotePath, error)
|
||||
PromoteAndIndexPaths(ctx context.Context, paths ...*promotetypes.PromotePath) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
HandlePromoteAndIndexPaths(w http.ResponseWriter, r *http.Request)
|
||||
ListPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
@@ -43,6 +43,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/traces/tracedetail"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
|
||||
chErrors "github.com/SigNoz/signoz/pkg/query-service/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/metrics"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
@@ -95,7 +96,6 @@ const (
|
||||
signozLocalTableAttributesMetadata = "attributes_metadata"
|
||||
|
||||
signozUpdatedMetricsMetadataLocalTable = "updated_metadata"
|
||||
signozMetricsMetadataLocalTable = "metadata"
|
||||
signozUpdatedMetricsMetadataTable = "distributed_updated_metadata"
|
||||
minTimespanForProgressiveSearch = time.Hour
|
||||
minTimespanForProgressiveSearchMargin = time.Minute
|
||||
@@ -6440,73 +6440,6 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID
|
||||
return cachedMetadata, nil
|
||||
}
|
||||
|
||||
// GetFirstSeenFromMetricMetadata queries the metadata table to get the first_seen timestamp
|
||||
// for each metric-attribute-value combination.
|
||||
// Returns a map where key is `model.MetricMetadataLookupKey` and value is first_seen in milliseconds.
|
||||
func (r *ClickHouseReader) GetFirstSeenFromMetricMetadata(ctx context.Context, lookupKeys []model.MetricMetadataLookupKey) (map[model.MetricMetadataLookupKey]int64, error) {
|
||||
// Chunk the lookup keys to avoid overly large queries (max 300 tuples per query)
|
||||
const chunkSize = 300
|
||||
result := make(map[model.MetricMetadataLookupKey]int64)
|
||||
|
||||
for i := 0; i < len(lookupKeys); i += chunkSize {
|
||||
end := i + chunkSize
|
||||
if end > len(lookupKeys) {
|
||||
end = len(lookupKeys)
|
||||
}
|
||||
chunk := lookupKeys[i:end]
|
||||
|
||||
// Build the IN clause values - ClickHouse uses tuple syntax with placeholders
|
||||
var valueStrings []string
|
||||
var args []interface{}
|
||||
|
||||
for _, key := range chunk {
|
||||
valueStrings = append(valueStrings, "(?, ?, ?)")
|
||||
args = append(args, key.MetricName, key.AttributeName, key.AttributeValue)
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
m.metric_name,
|
||||
m.attr_name,
|
||||
m.attr_string_value,
|
||||
min(m.last_reported_unix_milli) AS first_seen
|
||||
FROM %s.%s AS m
|
||||
WHERE (m.metric_name, m.attr_name, m.attr_string_value) IN (%s)
|
||||
GROUP BY m.metric_name, m.attr_name, m.attr_string_value
|
||||
ORDER BY first_seen`,
|
||||
signozMetricDBName, signozMetricsMetadataLocalTable, strings.Join(valueStrings, ", "))
|
||||
|
||||
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
|
||||
rows, err := r.db.Query(valueCtx, query, args...)
|
||||
if err != nil {
|
||||
zap.L().Error("Error querying metadata for first_seen", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: "ClickhouseErr", Err: fmt.Errorf("error querying metadata for first_seen: %v", err)}
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
var metricName, attrName, attrValue string
|
||||
var firstSeen uint64
|
||||
if err := rows.Scan(&metricName, &attrName, &attrValue, &firstSeen); err != nil {
|
||||
rows.Close()
|
||||
return nil, &model.ApiError{Typ: "ClickhouseErr", Err: fmt.Errorf("error scanning metadata first_seen result: %v", err)}
|
||||
}
|
||||
result[model.MetricMetadataLookupKey{
|
||||
MetricName: metricName,
|
||||
AttributeName: attrName,
|
||||
AttributeValue: attrValue,
|
||||
}] = int64(firstSeen)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
rows.Close()
|
||||
return nil, &model.ApiError{Typ: "ClickhouseErr", Err: fmt.Errorf("error iterating metadata first_seen results: %v", err)}
|
||||
}
|
||||
rows.Close()
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.SearchTracesParams) (*[]model.SearchSpansResult, error) {
|
||||
searchSpansResult := []model.SearchSpansResult{
|
||||
{
|
||||
|
||||
@@ -555,6 +555,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.AdminAccess(aH.setCustomRetentionTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.ViewAccess(aH.getCustomRetentionTTL)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.Signoz.Handlers.Apdex.Set)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.ViewAccess(aH.Signoz.Handlers.Apdex.Get)).Methods(http.MethodGet)
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getClickhouseKey(t *testing.T) {
|
||||
@@ -1210,9 +1211,8 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
t.Errorf("PrepareLogsQuery() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("PrepareLogsQuery() = %v, want %v", got, tt.want)
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -361,7 +361,6 @@ func makeRulesManager(
|
||||
RuleStore: ruleStore,
|
||||
MaintenanceStore: maintenanceStore,
|
||||
SqlStore: sqlstore,
|
||||
QueryParser: queryParser,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
package converter
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/errors"
|
||||
|
||||
// Unit represents a unit of measurement
|
||||
type Unit string
|
||||
|
||||
func (u Unit) Validate() error {
|
||||
if !IsValidUnit(u) {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid unit: %s", u)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value represents a value with a unit of measurement
|
||||
type Value struct {
|
||||
F float64
|
||||
@@ -69,27 +60,6 @@ func FromUnit(u Unit) Converter {
|
||||
}
|
||||
}
|
||||
|
||||
// IsValidUnit returns true if the given unit is valid
|
||||
func IsValidUnit(u Unit) bool {
|
||||
switch u {
|
||||
// Duration unit
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min",
|
||||
// Data unit
|
||||
"bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy",
|
||||
// Data rate unit
|
||||
"binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s",
|
||||
// Percent unit
|
||||
"percent", "percentunit", "%",
|
||||
// Bool unit
|
||||
"bool", "bool_yes_no", "bool_true_false", "bool_1_0",
|
||||
// Throughput unit
|
||||
"cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm", "{count}/s", "{ops}/s", "{req}/s", "{read}/s", "{write}/s", "{iops}/s", "{count}/min", "{ops}/min", "{read}/min", "{write}/min":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func UnitToName(u string) string {
|
||||
switch u {
|
||||
case "ns":
|
||||
|
||||
@@ -81,7 +81,6 @@ type Reader interface {
|
||||
CheckClickHouse(ctx context.Context) error
|
||||
|
||||
GetMetricMetadata(context.Context, valuer.UUID, string, string) (*v3.MetricMetadataResponse, error)
|
||||
GetFirstSeenFromMetricMetadata(ctx context.Context, lookupKeys []model.MetricMetadataLookupKey) (map[model.MetricMetadataLookupKey]int64, error)
|
||||
|
||||
AddRuleStateHistory(ctx context.Context, ruleStateHistory []model.RuleStateHistory) error
|
||||
GetOverallStateTransitions(ctx context.Context, ruleID string, params *model.QueryRuleStateHistory) ([]model.ReleStateItem, error)
|
||||
|
||||
@@ -516,9 +516,3 @@ type LogsAggregateParams struct {
|
||||
Function string `json:"function"`
|
||||
StepSeconds int `json:"step"`
|
||||
}
|
||||
|
||||
type MetricMetadataLookupKey struct {
|
||||
MetricName string
|
||||
AttributeName string
|
||||
AttributeValue string
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/converter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
@@ -641,13 +640,6 @@ func (c *CompositeQuery) Validate() error {
|
||||
return fmt.Errorf("query type is invalid: %w", err)
|
||||
}
|
||||
|
||||
// Validate Unit - if provided (non-empty), it should be a valid unit string
|
||||
if c.Unit != "" {
|
||||
if err := converter.Unit(c.Unit).Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -13,9 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
@@ -90,11 +88,6 @@ type BaseRule struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
|
||||
evaluation ruletypes.Evaluation
|
||||
|
||||
// newGroupEvalDelay is the grace period for new alert groups
|
||||
newGroupEvalDelay *time.Duration
|
||||
|
||||
queryParser queryparser.QueryParser
|
||||
}
|
||||
|
||||
type RuleOption func(*BaseRule)
|
||||
@@ -129,12 +122,6 @@ func WithSQLStore(sqlstore sqlstore.SQLStore) RuleOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithQueryParser(queryParser queryparser.QueryParser) RuleOption {
|
||||
return func(r *BaseRule) {
|
||||
r.queryParser = queryParser
|
||||
}
|
||||
}
|
||||
|
||||
func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader interfaces.Reader, opts ...RuleOption) (*BaseRule, error) {
|
||||
if p.RuleCondition == nil || !p.RuleCondition.IsValid() {
|
||||
return nil, fmt.Errorf("invalid rule condition")
|
||||
@@ -167,12 +154,6 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
|
||||
evaluation: evaluation,
|
||||
}
|
||||
|
||||
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
|
||||
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
|
||||
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
|
||||
baseRule.newGroupEvalDelay = &newGroupEvalDelay
|
||||
}
|
||||
|
||||
if baseRule.evalWindow == 0 {
|
||||
baseRule.evalWindow = 5 * time.Minute
|
||||
}
|
||||
@@ -547,166 +528,3 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ShouldSkipNewGroups returns true if new group filtering should be applied
|
||||
func (r *BaseRule) ShouldSkipNewGroups() bool {
|
||||
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
|
||||
}
|
||||
|
||||
// isFilterNewSeriesSupported checks if the query is supported for new series filtering
|
||||
func (r *BaseRule) isFilterNewSeriesSupported() bool {
|
||||
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
for _, query := range r.ruleCondition.CompositeQuery.Queries {
|
||||
if query.Type != qbtypes.QueryTypeBuilder {
|
||||
continue
|
||||
}
|
||||
switch query.Spec.(type) {
|
||||
// query spec is for Logs or Traces, return with blank metric names and group by fields
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// extractMetricAndGroupBys extracts metric names and groupBy keys from the rule's query.
|
||||
// TODO: implement caching for query parsing results to avoid re-parsing the query + cache invalidation
|
||||
func (r *BaseRule) extractMetricAndGroupBys(ctx context.Context) ([]string, []string, error) {
|
||||
var metricNames []string
|
||||
var groupedFields []string
|
||||
|
||||
// check to avoid processing the query for Logs and Traces
|
||||
// as excluding new series is not supported for Logs and Traces for now
|
||||
if !r.isFilterNewSeriesSupported() {
|
||||
return metricNames, groupedFields, nil
|
||||
}
|
||||
|
||||
result, err := r.queryParser.AnalyzeCompositeQuery(ctx, r.ruleCondition.CompositeQuery)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
metricNames = result.MetricNames
|
||||
for _, col := range result.GroupByColumns {
|
||||
groupedFields = append(groupedFields, col.OriginField)
|
||||
}
|
||||
|
||||
return metricNames, groupedFields, nil
|
||||
}
|
||||
|
||||
// FilterNewSeriesIndexes filters out items that are too new based on metadata first_seen timestamps.
|
||||
// Returns the indexes that should be skipped (not included in the result).
|
||||
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []v3.Series) ([]int, error) {
|
||||
// Extract metric names and groupBy keys
|
||||
metricNames, groupedFields, err := r.extractMetricAndGroupBys(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(metricNames) == 0 || len(groupedFields) == 0 {
|
||||
// No metrics or groupBy keys, nothing to filter (non-ideal case, return early)
|
||||
return []int{}, nil
|
||||
}
|
||||
|
||||
// Build lookup keys from series which will be used to query metadata from CH
|
||||
lookupKeys := make([]model.MetricMetadataLookupKey, 0)
|
||||
seriesIdxToLookupKeys := make(map[int][]model.MetricMetadataLookupKey) // series index -> lookup keys
|
||||
|
||||
for i := 0; i < len(series); i++ {
|
||||
metricLabelMap := series[i].Labels
|
||||
|
||||
// Collect groupBy attribute-value pairs for this series
|
||||
seriesKeys := make([]model.MetricMetadataLookupKey, 0)
|
||||
|
||||
for _, metricName := range metricNames {
|
||||
for _, groupByKey := range groupedFields {
|
||||
if attrValue, ok := metricLabelMap[groupByKey]; ok {
|
||||
lookupKey := model.MetricMetadataLookupKey{
|
||||
MetricName: metricName,
|
||||
AttributeName: groupByKey,
|
||||
AttributeValue: attrValue,
|
||||
}
|
||||
lookupKeys = append(lookupKeys, lookupKey)
|
||||
seriesKeys = append(seriesKeys, lookupKey)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(seriesKeys) > 0 {
|
||||
seriesIdxToLookupKeys[i] = seriesKeys
|
||||
}
|
||||
}
|
||||
|
||||
if len(lookupKeys) == 0 {
|
||||
// No lookup keys to query, return empty skip list
|
||||
// this can happen when the series has no labels at all
|
||||
// in this case, we include all series as we don't know if it is new or old series
|
||||
return []int{}, nil
|
||||
}
|
||||
|
||||
// unique lookup keys
|
||||
uniqueLookupKeysMap := make(map[model.MetricMetadataLookupKey]struct{})
|
||||
uniqueLookupKeys := make([]model.MetricMetadataLookupKey, 0)
|
||||
for _, key := range lookupKeys {
|
||||
if _, ok := uniqueLookupKeysMap[key]; !ok {
|
||||
uniqueLookupKeysMap[key] = struct{}{}
|
||||
uniqueLookupKeys = append(uniqueLookupKeys, key)
|
||||
}
|
||||
}
|
||||
// Query metadata for first_seen timestamps
|
||||
firstSeenMap, err := r.reader.GetFirstSeenFromMetricMetadata(ctx, uniqueLookupKeys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Filter series based on first_seen + delay
|
||||
skipIndexes := make([]int, 0)
|
||||
evalTimeMs := ts.UnixMilli()
|
||||
newGroupEvalDelayMs := r.newGroupEvalDelay.Milliseconds()
|
||||
|
||||
for i := 0; i < len(series); i++ {
|
||||
seriesKeys, ok := seriesIdxToLookupKeys[i]
|
||||
if !ok {
|
||||
// No matching labels used in groupBy from this series, don't exclude it
|
||||
// as we can't decide if it is new or old series
|
||||
continue
|
||||
}
|
||||
|
||||
// Find the maximum first_seen across all groupBy attributes for this series
|
||||
// if the latest is old enough we're good, if latest is new we need to skip it
|
||||
maxFirstSeen := int64(0)
|
||||
// metadataFound tracks if we have metadata for any of the lookup keys
|
||||
metadataFound := false
|
||||
|
||||
for _, lookupKey := range seriesKeys {
|
||||
if firstSeen, exists := firstSeenMap[lookupKey]; exists {
|
||||
metadataFound = true
|
||||
if firstSeen > maxFirstSeen {
|
||||
maxFirstSeen = firstSeen
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if we don't have metadata for any of the lookup keys, we can't decide if it is new or old series
|
||||
// in that case, we don't add it to the skip indexes
|
||||
if !metadataFound {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if first_seen + delay has passed
|
||||
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
|
||||
// Still within grace period, skip this series
|
||||
skipIndexes = append(skipIndexes, i)
|
||||
continue
|
||||
}
|
||||
|
||||
// Old enough, don't skip this series
|
||||
}
|
||||
|
||||
if r.logger != nil && len(skipIndexes) > 0 {
|
||||
r.logger.InfoContext(ctx, "Filtered new series", "rule_name", r.Name(), "skipped_count", len(skipIndexes), "total_count", len(series), "delay_ms", newGroupEvalDelayMs)
|
||||
}
|
||||
|
||||
return skipIndexes, nil
|
||||
}
|
||||
|
||||
@@ -1,31 +1,12 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache/cachetest"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/prometheustest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
)
|
||||
|
||||
func TestBaseRule_RequireMinPoints(t *testing.T) {
|
||||
@@ -100,704 +81,3 @@ func TestBaseRule_RequireMinPoints(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// createTestSeries creates a v3.Series with the given labels and optional points
|
||||
// so we don't exactly need the points in the series because the labels are used to determine if the series is new or old
|
||||
// we use the labels to create a lookup key for the series and then check the first_seen timestamp for the series in the metadata table
|
||||
func createTestSeries(labels map[string]string, points []v3.Point) v3.Series {
|
||||
if points == nil {
|
||||
points = []v3.Point{}
|
||||
}
|
||||
return v3.Series{
|
||||
Labels: labels,
|
||||
Points: points,
|
||||
}
|
||||
}
|
||||
|
||||
// calculateFirstSeen calculates first_seen timestamp based on evalTime, delay, and isOld flag
|
||||
func calculateFirstSeen(evalTime time.Time, delay time.Duration, isOld bool) int64 {
|
||||
if isOld {
|
||||
// Old: evalTime - (2 * delay)
|
||||
return evalTime.Add(-2 * delay).UnixMilli()
|
||||
}
|
||||
// New: evalTime - (delay / 2)
|
||||
return evalTime.Add(-delay / 2).UnixMilli()
|
||||
}
|
||||
|
||||
// createFirstSeenMap creates a first_seen map for a series with given attributes
|
||||
// metricName: the metric name
|
||||
// groupByFields: list of groupBy field names
|
||||
// evalTime: evaluation time
|
||||
// delay: newGroupEvalDelay
|
||||
// isOld: whether the series is old (true) or new (false)
|
||||
// attributeValues: values for each groupBy field in order
|
||||
func createFirstSeenMap(metricName string, groupByFields []string, evalTime time.Time, delay time.Duration, isOld bool, attributeValues ...string) map[model.MetricMetadataLookupKey]int64 {
|
||||
result := make(map[model.MetricMetadataLookupKey]int64)
|
||||
firstSeen := calculateFirstSeen(evalTime, delay, isOld)
|
||||
|
||||
for i, field := range groupByFields {
|
||||
if i < len(attributeValues) {
|
||||
key := model.MetricMetadataLookupKey{
|
||||
MetricName: metricName,
|
||||
AttributeName: field,
|
||||
AttributeValue: attributeValues[i],
|
||||
}
|
||||
result[key] = firstSeen
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// mergeFirstSeenMaps merges multiple first_seen maps into one
|
||||
// When the same key exists in multiple maps, it keeps the lowest value
|
||||
// which simulatest the behavior of the ClickHouse query
|
||||
// finding the minimum first_seen timestamp across all groupBy attributes for a single series
|
||||
func mergeFirstSeenMaps(maps ...map[model.MetricMetadataLookupKey]int64) map[model.MetricMetadataLookupKey]int64 {
|
||||
result := make(map[model.MetricMetadataLookupKey]int64)
|
||||
for _, m := range maps {
|
||||
for k, v := range m {
|
||||
if existingValue, exists := result[k]; exists {
|
||||
// Keep the lowest value
|
||||
if v < existingValue {
|
||||
result[k] = v
|
||||
}
|
||||
} else {
|
||||
result[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// createPostableRule creates a PostableRule with the given CompositeQuery
|
||||
func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRule {
|
||||
return ruletypes.PostableRule{
|
||||
AlertName: "Test Rule",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{
|
||||
Kind: ruletypes.RollingEvaluation,
|
||||
Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
},
|
||||
},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: compositeQuery,
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{
|
||||
{
|
||||
Name: "test-threshold",
|
||||
TargetValue: func() *float64 { v := 1.0; return &v }(),
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// setupMetadataQueryMock sets up the ClickHouse mock for GetFirstSeenFromMetricMetadata query
|
||||
func setupMetadataQueryMock(telemetryStore *telemetrystoretest.Provider, metricNames []string, groupedFields []string, series []v3.Series, firstSeenMap map[model.MetricMetadataLookupKey]int64) {
|
||||
if len(firstSeenMap) == 0 || len(series) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Build args from series the same way we build lookup keys in FilterNewSeries
|
||||
var args []any
|
||||
uniqueArgsMap := make(map[string]struct{})
|
||||
for _, s := range series {
|
||||
labelMap := s.Labels
|
||||
for _, metricName := range metricNames {
|
||||
for _, groupByKey := range groupedFields {
|
||||
if attrValue, ok := labelMap[groupByKey]; ok {
|
||||
argKey := fmt.Sprintf("%s,%s,%s", metricName, groupByKey, attrValue)
|
||||
if _, ok := uniqueArgsMap[argKey]; ok {
|
||||
continue
|
||||
}
|
||||
uniqueArgsMap[argKey] = struct{}{}
|
||||
args = append(args, metricName, groupByKey, attrValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build the query pattern - it uses IN clause with tuples
|
||||
// We'll match any query that contains the metadata table pattern
|
||||
metadataCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "attr_name", Type: "String"},
|
||||
{Name: "attr_string_value", Type: "String"},
|
||||
{Name: "first_seen", Type: "UInt64"},
|
||||
}
|
||||
|
||||
var values [][]interface{}
|
||||
for key, firstSeen := range firstSeenMap {
|
||||
values = append(values, []interface{}{
|
||||
key.MetricName,
|
||||
key.AttributeName,
|
||||
key.AttributeValue,
|
||||
uint64(firstSeen),
|
||||
})
|
||||
}
|
||||
|
||||
rows := cmock.NewRows(metadataCols, values)
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT any").
|
||||
WithArgs(args...).
|
||||
WillReturnRows(rows)
|
||||
}
|
||||
|
||||
// filterNewSeriesTestCase represents a test case for FilterNewSeries
|
||||
type filterNewSeriesTestCase struct {
|
||||
name string
|
||||
compositeQuery *v3.CompositeQuery
|
||||
series []v3.Series
|
||||
firstSeenMap map[model.MetricMetadataLookupKey]int64
|
||||
newGroupEvalDelay *time.Duration
|
||||
evalTime time.Time
|
||||
expectedSkipIndexes []int
|
||||
expectError bool
|
||||
}
|
||||
|
||||
func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
defaultEvalTime := time.Unix(1700000000, 0)
|
||||
defaultDelay := 2 * time.Minute
|
||||
defaultGroupByFields := []string{"service_name", "env"}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
settings := instrumentationtest.New().ToProviderSettings()
|
||||
|
||||
tests := []filterNewSeriesTestCase{
|
||||
{
|
||||
name: "mixed old and new series - Builder query",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "request_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "env"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-new", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
|
||||
},
|
||||
firstSeenMap: mergeFirstSeenMaps(
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
|
||||
// svc-missing has no metadata, so it will be skipped
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{1}, // svc-missing should be skipped as we can't decide if it is new or old series
|
||||
},
|
||||
{
|
||||
name: "all new series - PromQL query",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "P1",
|
||||
Query: "sum by (service_name,env) (rate(request_total[5m]))",
|
||||
Disabled: false,
|
||||
Step: qbtypes.Step{Duration: 0},
|
||||
Stats: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-new1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-new2", "env": "stage"}, nil),
|
||||
},
|
||||
firstSeenMap: mergeFirstSeenMaps(
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{0, 1}, // all should be skipped
|
||||
},
|
||||
{
|
||||
name: "all old series - ClickHouse query",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "CH1",
|
||||
Query: "SELECT service_name, env FROM metrics WHERE metric_name='request_total' GROUP BY service_name, env",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
|
||||
},
|
||||
firstSeenMap: mergeFirstSeenMaps(
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // none should be skipped
|
||||
},
|
||||
{
|
||||
name: "no grouping in query - Builder",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "request_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[model.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // early return, no filtering
|
||||
},
|
||||
{
|
||||
name: "no metric names - Builder",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "env"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[model.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // early return, no filtering
|
||||
},
|
||||
{
|
||||
name: "series with no matching labels - Builder",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "request_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "env"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
|
||||
},
|
||||
firstSeenMap: make(map[model.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // series included as we can't decide if it's new or old
|
||||
},
|
||||
{
|
||||
name: "series with missing metadata - PromQL",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "P1",
|
||||
Query: "sum by (service_name,env) (rate(request_total[5m]))",
|
||||
Disabled: false,
|
||||
Step: qbtypes.Step{Duration: 0},
|
||||
Stats: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
|
||||
// svc-no-metadata has no entry in firstSeenMap
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // svc-no-metadata should not be skipped as we can't decide if it is new or old series
|
||||
},
|
||||
{
|
||||
name: "series with partial metadata - ClickHouse",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "CH1",
|
||||
Query: "SELECT service_name, env FROM metrics WHERE metric_name='request_total' GROUP BY service_name, env",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
|
||||
},
|
||||
// Only provide metadata for service_name, not env
|
||||
firstSeenMap: map[model.MetricMetadataLookupKey]int64{
|
||||
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
|
||||
// env metadata is missing
|
||||
},
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // has some metadata, uses max first_seen which is old
|
||||
},
|
||||
{
|
||||
name: "empty series array - Builder",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "request_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "env"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{},
|
||||
firstSeenMap: make(map[model.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{},
|
||||
},
|
||||
{
|
||||
name: "zero delay - Builder",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "request_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "env"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // with zero delay, all series pass
|
||||
},
|
||||
{
|
||||
name: "multiple metrics with same groupBy keys - Builder",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "request_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
{
|
||||
MetricName: "error_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "env"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: mergeFirstSeenMaps(
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{},
|
||||
},
|
||||
{
|
||||
name: "series with multiple groupBy attributes where one is new and one is old - Builder",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "request_total",
|
||||
TimeAggregation: metrictypes.TimeAggregationCount,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "env"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
// service_name is old, env is new - should use max (new)
|
||||
firstSeenMap: mergeFirstSeenMaps(
|
||||
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
|
||||
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{0}, // max first_seen is new, so should skip
|
||||
},
|
||||
{
|
||||
name: "Logs query - should skip filtering and return empty skip indexes",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[model.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // Logs queries should return early, no filtering
|
||||
},
|
||||
{
|
||||
name: "Traces query - should skip filtering and return empty skip indexes",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service_name"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[model.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedSkipIndexes: []int{}, // Traces queries should return early, no filtering
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Create postableRule from compositeQuery
|
||||
postableRule := createPostableRule(tt.compositeQuery)
|
||||
|
||||
// Setup telemetry store mock
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
// Create query parser
|
||||
queryParser := queryparser.New(settings)
|
||||
|
||||
// Use query parser to extract metric names and groupBy fields
|
||||
analyzeResult, err := queryParser.AnalyzeCompositeQuery(context.Background(), tt.compositeQuery)
|
||||
require.NoError(t, err)
|
||||
|
||||
metricNames := analyzeResult.MetricNames
|
||||
groupedFields := []string{}
|
||||
for _, col := range analyzeResult.GroupByColumns {
|
||||
groupedFields = append(groupedFields, col.OriginField)
|
||||
}
|
||||
|
||||
// Setup metadata query mock
|
||||
setupMetadataQueryMock(telemetryStore, metricNames, groupedFields, tt.series, tt.firstSeenMap)
|
||||
|
||||
// Create reader with mocked telemetry store
|
||||
readerCache, err := cachetest.New(
|
||||
cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
},
|
||||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
options := clickhouseReader.NewOptions("", "", "archiveNamespace")
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
readerCache,
|
||||
options,
|
||||
)
|
||||
|
||||
// Set newGroupEvalDelay in NotificationSettings if provided
|
||||
if tt.newGroupEvalDelay != nil {
|
||||
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
|
||||
NewGroupEvalDelay: func() *ruletypes.Duration {
|
||||
d := ruletypes.Duration(*tt.newGroupEvalDelay)
|
||||
return &d
|
||||
}(),
|
||||
}
|
||||
}
|
||||
|
||||
// Create BaseRule using NewBaseRule
|
||||
rule, err := NewBaseRule("test-rule", valuer.GenerateUUID(), &postableRule, reader, WithQueryParser(queryParser), WithLogger(logger))
|
||||
require.NoError(t, err)
|
||||
|
||||
skipIndexes, err := rule.FilterNewSeries(context.Background(), tt.evalTime, tt.series)
|
||||
|
||||
if tt.expectError {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
require.ElementsMatch(t, tt.expectedSkipIndexes, skipIndexes, "skip indexes should match")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
@@ -104,7 +103,6 @@ type ManagerOptions struct {
|
||||
RuleStore ruletypes.RuleStore
|
||||
MaintenanceStore ruletypes.MaintenanceStore
|
||||
SqlStore sqlstore.SQLStore
|
||||
QueryParser queryparser.QueryParser
|
||||
}
|
||||
|
||||
// The Manager manages recording and alerting rules.
|
||||
@@ -127,8 +125,6 @@ type Manager struct {
|
||||
alertmanager alertmanager.Alertmanager
|
||||
sqlstore sqlstore.SQLStore
|
||||
orgGetter organization.Getter
|
||||
// queryParser is used for parsing queries for rules
|
||||
queryParser queryparser.QueryParser
|
||||
}
|
||||
|
||||
func defaultOptions(o *ManagerOptions) *ManagerOptions {
|
||||
@@ -170,7 +166,6 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
opts.SLogger,
|
||||
WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
WithSQLStore(opts.SQLStore),
|
||||
WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -193,7 +188,6 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
WithSQLStore(opts.SQLStore),
|
||||
WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -232,7 +226,6 @@ func NewManager(o *ManagerOptions) (*Manager, error) {
|
||||
alertmanager: o.Alertmanager,
|
||||
orgGetter: o.OrgGetter,
|
||||
sqlstore: o.SqlStore,
|
||||
queryParser: o.QueryParser,
|
||||
}
|
||||
|
||||
zap.L().Debug("Manager created successfully with NotificationGroup")
|
||||
|
||||
@@ -119,42 +119,6 @@ func (r *PromRule) getPqlQuery() (string, error) {
|
||||
return "", fmt.Errorf("invalid promql rule query")
|
||||
}
|
||||
|
||||
// filterNewSeries filters out new series based on the first_seen timestamp.
|
||||
func (r *PromRule) filterNewSeries(ctx context.Context, ts time.Time, res promql.Matrix) (promql.Matrix, error) {
|
||||
// Convert promql.Matrix to []v3.Series
|
||||
v3Series := make([]v3.Series, 0, len(res))
|
||||
for _, series := range res {
|
||||
v3Series = append(v3Series, toCommonSeries(series))
|
||||
}
|
||||
|
||||
// Get indexes to skip
|
||||
skipIndexes, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, v3Series)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
|
||||
// if no series are skipped, return the original matrix
|
||||
if len(skipIndexes) == 0 {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// Create a map of skip indexes for efficient lookup
|
||||
skippedIdxMap := make(map[int]struct{}, len(skipIndexes))
|
||||
for _, idx := range skipIndexes {
|
||||
skippedIdxMap[idx] = struct{}{}
|
||||
}
|
||||
|
||||
// Filter out skipped series from promql.Matrix
|
||||
filteredMatrix := make(promql.Matrix, 0, len(res)-len(skipIndexes))
|
||||
for i, series := range res {
|
||||
if _, shouldSkip := skippedIdxMap[i]; !shouldSkip {
|
||||
filteredMatrix = append(filteredMatrix, series)
|
||||
}
|
||||
}
|
||||
return filteredMatrix, nil
|
||||
}
|
||||
|
||||
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
start, end := r.Timestamps(ts)
|
||||
interval := 60 * time.Second // TODO(srikanthccv): this should be configurable
|
||||
@@ -171,19 +135,8 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
return nil, err
|
||||
}
|
||||
|
||||
matrixToProcess := res
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.filterNewSeries(ctx, ts, matrixToProcess)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
matrixToProcess = filteredSeries
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
for _, series := range matrixToProcess {
|
||||
for _, series := range res {
|
||||
resultSeries, err := r.Threshold.Eval(toCommonSeries(series), r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
|
||||
@@ -52,7 +52,6 @@ func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
WithSendAlways(),
|
||||
WithSendUnmatched(),
|
||||
WithSQLStore(opts.SQLStore),
|
||||
WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -73,7 +72,6 @@ func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
WithSendAlways(),
|
||||
WithSendUnmatched(),
|
||||
WithSQLStore(opts.SQLStore),
|
||||
WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -378,42 +378,6 @@ func (r *ThresholdRule) GetSelectedQuery() string {
|
||||
return r.ruleCondition.GetSelectedQueryName()
|
||||
}
|
||||
|
||||
// filterNewSeries filters out new series based on the first_seen timestamp.
|
||||
func (r *ThresholdRule) filterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
|
||||
// Convert []*v3.Series to []v3.Series for filtering
|
||||
v3Series := make([]v3.Series, 0, len(series))
|
||||
for _, s := range series {
|
||||
v3Series = append(v3Series, *s)
|
||||
}
|
||||
|
||||
// Get indexes to skip
|
||||
skipIndexes, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, v3Series)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
|
||||
// if no series are skipped, return the original series
|
||||
if len(skipIndexes) == 0 {
|
||||
return series, nil
|
||||
}
|
||||
|
||||
// Create a map of skip indexes for efficient lookup
|
||||
skippedIdxMap := make(map[int]struct{}, len(skipIndexes))
|
||||
for _, idx := range skipIndexes {
|
||||
skippedIdxMap[idx] = struct{}{}
|
||||
}
|
||||
|
||||
// Filter out skipped series
|
||||
oldSeries := make([]*v3.Series, 0, len(series)-len(skipIndexes))
|
||||
for i, s := range series {
|
||||
if _, shouldSkip := skippedIdxMap[i]; !shouldSkip {
|
||||
oldSeries = append(oldSeries, s)
|
||||
}
|
||||
}
|
||||
return oldSeries, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ctx, ts)
|
||||
@@ -517,18 +481,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.Series
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.filterNewSeries(ctx, ts, seriesToProcess)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
for _, series := range queryResult.Series {
|
||||
if r.Condition() != nil && r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.ruleCondition.RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
@@ -607,17 +560,7 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.Series
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.filterNewSeries(ctx, ts, seriesToProcess)
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
return nil, filterErr
|
||||
}
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
for _, series := range seriesToProcess {
|
||||
for _, series := range queryResult.Series {
|
||||
if r.Condition() != nil && r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.Condition().RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
|
||||
@@ -9,22 +9,19 @@ import (
|
||||
// AssignReservedVars assigns values for go template vars. assumes that
|
||||
// model.QueryRangeParamsV3.Start and End are Unix Nano timestamps
|
||||
func AssignReservedVarsV3(queryRangeParams *v3.QueryRangeParamsV3) {
|
||||
AssignReservedVars(queryRangeParams.Variables, queryRangeParams.Start, queryRangeParams.End)
|
||||
}
|
||||
|
||||
func AssignReservedVars(variables map[string]interface{}, start int64, end int64) {
|
||||
variables["start_timestamp"] = start / 1000
|
||||
variables["end_timestamp"] = end / 1000
|
||||
|
||||
variables["start_timestamp_ms"] = start
|
||||
variables["end_timestamp_ms"] = end
|
||||
|
||||
variables["SIGNOZ_START_TIME"] = start
|
||||
variables["SIGNOZ_END_TIME"] = end
|
||||
|
||||
variables["start_timestamp_nano"] = start * 1e6
|
||||
variables["end_timestamp_nano"] = end * 1e6
|
||||
|
||||
variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", start/1000)
|
||||
variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", end/1000)
|
||||
queryRangeParams.Variables["start_timestamp"] = queryRangeParams.Start / 1000
|
||||
queryRangeParams.Variables["end_timestamp"] = queryRangeParams.End / 1000
|
||||
|
||||
queryRangeParams.Variables["start_timestamp_ms"] = queryRangeParams.Start
|
||||
queryRangeParams.Variables["end_timestamp_ms"] = queryRangeParams.End
|
||||
|
||||
queryRangeParams.Variables["SIGNOZ_START_TIME"] = queryRangeParams.Start
|
||||
queryRangeParams.Variables["SIGNOZ_END_TIME"] = queryRangeParams.End
|
||||
|
||||
queryRangeParams.Variables["start_timestamp_nano"] = queryRangeParams.Start * 1e6
|
||||
queryRangeParams.Variables["end_timestamp_nano"] = queryRangeParams.End * 1e6
|
||||
|
||||
queryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.Start/1000)
|
||||
queryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.End/1000)
|
||||
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package queryfilterextractor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
clickhouse "github.com/AfterShip/clickhouse-sql-parser/parser"
|
||||
@@ -88,12 +87,6 @@ func (e *ClickHouseFilterExtractor) Extract(query string) (*FilterResult, error)
|
||||
result.GroupByColumns = append(result.GroupByColumns, colInfo)
|
||||
}
|
||||
|
||||
// Sort the metric names and group by columns to return deterministic results
|
||||
sort.Strings(result.MetricNames)
|
||||
sort.Slice(result.GroupByColumns, func(i, j int) bool {
|
||||
return result.GroupByColumns[i].Name < result.GroupByColumns[j].Name
|
||||
})
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
package queryfilterextractor
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
@@ -47,12 +45,6 @@ func (e *PromQLFilterExtractor) Extract(query string) (*FilterResult, error) {
|
||||
result.GroupByColumns = append(result.GroupByColumns, ColumnInfo{Name: groupKey, OriginExpr: groupKey, OriginField: groupKey})
|
||||
}
|
||||
|
||||
// Sort the metric names and group by columns to return deterministic results
|
||||
sort.Strings(result.MetricNames)
|
||||
sort.Slice(result.GroupByColumns, func(i, j int) bool {
|
||||
return result.GroupByColumns[i].Name < result.GroupByColumns[j].Name
|
||||
})
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -2,9 +2,7 @@ package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
@@ -13,22 +11,4 @@ import (
|
||||
type QueryParser interface {
|
||||
// AnalyzeQueryFilter extracts filter conditions from a given query string.
|
||||
AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error)
|
||||
// AnalyzeCompositeQuery extracts filter conditions from a composite query.
|
||||
AnalyzeCompositeQuery(ctx context.Context, compositeQuery *v3.CompositeQuery) (*queryfilterextractor.FilterResult, error)
|
||||
// ValidateCompositeQuery validates a composite query and returns an error if validation fails.
|
||||
ValidateCompositeQuery(ctx context.Context, compositeQuery *v3.CompositeQuery) error
|
||||
}
|
||||
|
||||
type QueryParseError struct {
|
||||
StartPosition *int
|
||||
EndPosition *int
|
||||
ErrorMessage string
|
||||
Query string
|
||||
}
|
||||
|
||||
func (e *QueryParseError) Error() string {
|
||||
if e.StartPosition != nil && e.EndPosition != nil {
|
||||
return fmt.Sprintf("query parse error: %s at position %d:%d", e.ErrorMessage, *e.StartPosition, *e.EndPosition)
|
||||
}
|
||||
return fmt.Sprintf("query parse error: %s", e.ErrorMessage)
|
||||
}
|
||||
|
||||
40
pkg/queryparser/queryparser_impl.go
Normal file
40
pkg/queryparser/queryparser_impl.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
type queryParserImpl struct {
|
||||
settings factory.ProviderSettings
|
||||
}
|
||||
|
||||
// New creates a new implementation of the QueryParser service.
|
||||
func New(settings factory.ProviderSettings) QueryParser {
|
||||
return &queryParserImpl{
|
||||
settings: settings,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error) {
|
||||
var extractorType queryfilterextractor.ExtractorType
|
||||
switch queryType {
|
||||
case querybuildertypesv5.QueryTypePromQL:
|
||||
extractorType = queryfilterextractor.ExtractorTypePromQL
|
||||
case querybuildertypesv5.QueryTypeClickHouseSQL:
|
||||
extractorType = queryfilterextractor.ExtractorTypeClickHouseSQL
|
||||
default:
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %s. Supported values are '%s' and '%s'", queryType, querybuildertypesv5.QueryTypePromQL, querybuildertypesv5.QueryTypeClickHouseSQL)
|
||||
}
|
||||
|
||||
// Create extractor
|
||||
extractor, err := queryfilterextractor.NewExtractor(extractorType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return extractor.Extract(query)
|
||||
}
|
||||
@@ -1,256 +0,0 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
type queryParserImpl struct {
|
||||
settings factory.ProviderSettings
|
||||
}
|
||||
|
||||
// New creates a new implementation of the QueryParser service.
|
||||
func New(settings factory.ProviderSettings) QueryParser {
|
||||
return &queryParserImpl{
|
||||
settings: settings,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType qbtypes.QueryType, query string) (*queryfilterextractor.FilterResult, error) {
|
||||
var extractorType queryfilterextractor.ExtractorType
|
||||
switch queryType {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
extractorType = queryfilterextractor.ExtractorTypePromQL
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
extractorType = queryfilterextractor.ExtractorTypeClickHouseSQL
|
||||
default:
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %s. Supported values are '%s' and '%s'", queryType, qbtypes.QueryTypePromQL, qbtypes.QueryTypeClickHouseSQL)
|
||||
}
|
||||
|
||||
// Create extractor
|
||||
extractor, err := queryfilterextractor.NewExtractor(extractorType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return extractor.Extract(query)
|
||||
}
|
||||
|
||||
func (p *queryParserImpl) AnalyzeCompositeQuery(ctx context.Context, compositeQuery *v3.CompositeQuery) (*queryfilterextractor.FilterResult, error) {
|
||||
var result = &queryfilterextractor.FilterResult{
|
||||
MetricNames: []string{},
|
||||
GroupByColumns: []queryfilterextractor.ColumnInfo{},
|
||||
}
|
||||
|
||||
for _, query := range compositeQuery.Queries {
|
||||
switch query.Type {
|
||||
case qbtypes.QueryTypeBuilder:
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
// extract group by fields
|
||||
for _, groupBy := range spec.GroupBy {
|
||||
if groupBy.Name != "" {
|
||||
result.GroupByColumns = append(result.GroupByColumns, queryfilterextractor.ColumnInfo{Name: groupBy.Name, OriginExpr: groupBy.Name, OriginField: groupBy.Name})
|
||||
}
|
||||
}
|
||||
// extract metric names
|
||||
for _, aggregation := range spec.Aggregations {
|
||||
if aggregation.MetricName != "" {
|
||||
result.MetricNames = append(result.MetricNames, aggregation.MetricName)
|
||||
}
|
||||
}
|
||||
default:
|
||||
// TODO: add support for Traces and Logs Aggregation types
|
||||
if p.settings.Logger != nil {
|
||||
p.settings.Logger.WarnContext(ctx, "unsupported QueryBuilderQuery type: %T", spec)
|
||||
}
|
||||
continue
|
||||
}
|
||||
case qbtypes.QueryTypePromQL:
|
||||
spec, ok := query.Spec.(qbtypes.PromQuery)
|
||||
if !ok || spec.Query == "" {
|
||||
continue
|
||||
}
|
||||
res, err := p.AnalyzeQueryFilter(ctx, qbtypes.QueryTypePromQL, spec.Query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.MetricNames = append(result.MetricNames, res.MetricNames...)
|
||||
result.GroupByColumns = append(result.GroupByColumns, res.GroupByColumns...)
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
spec, ok := query.Spec.(qbtypes.ClickHouseQuery)
|
||||
if !ok || spec.Query == "" {
|
||||
continue
|
||||
}
|
||||
res, err := p.AnalyzeQueryFilter(ctx, qbtypes.QueryTypeClickHouseSQL, spec.Query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.MetricNames = append(result.MetricNames, res.MetricNames...)
|
||||
result.GroupByColumns = append(result.GroupByColumns, res.GroupByColumns...)
|
||||
default:
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported query type: %s", query.Type)
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ValidateCompositeQuery validates a composite query by checking all queries in the queries array
|
||||
func (p *queryParserImpl) ValidateCompositeQuery(ctx context.Context, compositeQuery *v3.CompositeQuery) error {
|
||||
if compositeQuery == nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"composite query is required",
|
||||
)
|
||||
}
|
||||
|
||||
if len(compositeQuery.Queries) == 0 {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"at least one query is required",
|
||||
)
|
||||
}
|
||||
|
||||
// Validate each query
|
||||
for i, envelope := range compositeQuery.Queries {
|
||||
queryId := qbtypes.GetQueryIdentifier(envelope, i)
|
||||
|
||||
switch envelope.Type {
|
||||
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
queryId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
queryId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
queryId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown query spec type for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
case qbtypes.QueryTypePromQL:
|
||||
spec, ok := envelope.Spec.(qbtypes.PromQuery)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if err := validatePromQLQuery(spec.Query); err != nil {
|
||||
return err
|
||||
}
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if err := validateClickHouseQuery(spec.Query); err != nil {
|
||||
return err
|
||||
}
|
||||
case qbtypes.QueryTypeFormula:
|
||||
spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if err := spec.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
case qbtypes.QueryTypeJoin:
|
||||
spec, ok := envelope.Spec.(qbtypes.QueryBuilderJoin)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if err := spec.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
case qbtypes.QueryTypeTraceOperator:
|
||||
spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
err := spec.ValidateTraceOperator(compositeQuery.Queries)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown query type '%s' for %s",
|
||||
envelope.Type,
|
||||
queryId,
|
||||
).WithAdditional(
|
||||
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, promql, clickhouse_sql, trace_operator",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if all queries are disabled
|
||||
if allDisabled := checkQueriesDisabled(compositeQuery); allDisabled {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"all queries are disabled - at least one query must be enabled",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestBaseRule_ExtractMetricAndGroupBys(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
payload string
|
||||
wantMetrics []string
|
||||
wantGroupBy []queryfilterextractor.ColumnInfo
|
||||
}{
|
||||
{
|
||||
name: "builder multiple grouping",
|
||||
payload: builderQueryWithGrouping,
|
||||
wantMetrics: []string{"test_metric_cardinality", "cpu_usage_total"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{
|
||||
{Name: "service_name", Alias: "", OriginExpr: "service_name", OriginField: "service_name"},
|
||||
{Name: "env", Alias: "", OriginExpr: "env", OriginField: "env"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "builder single grouping",
|
||||
payload: builderQuerySingleGrouping,
|
||||
wantMetrics: []string{"latency_p50"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{
|
||||
{Name: "namespace", Alias: "", OriginExpr: "namespace", OriginField: "namespace"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "builder no grouping",
|
||||
payload: builderQueryNoGrouping,
|
||||
wantMetrics: []string{"disk_usage_total"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{},
|
||||
},
|
||||
{
|
||||
name: "promql multiple grouping",
|
||||
payload: promQueryWithGrouping,
|
||||
wantMetrics: []string{"http_requests_total"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{
|
||||
{Name: "pod", Alias: "", OriginExpr: "pod", OriginField: "pod"},
|
||||
{Name: "region", Alias: "", OriginExpr: "region", OriginField: "region"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "promql single grouping",
|
||||
payload: promQuerySingleGrouping,
|
||||
wantMetrics: []string{"cpu_usage_seconds_total"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{
|
||||
{Name: "env", Alias: "", OriginExpr: "env", OriginField: "env"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "promql no grouping",
|
||||
payload: promQueryNoGrouping,
|
||||
wantMetrics: []string{"node_cpu_seconds_total"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{},
|
||||
},
|
||||
{
|
||||
name: "clickhouse multiple grouping",
|
||||
payload: clickHouseQueryWithGrouping,
|
||||
wantMetrics: []string{"cpu"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{
|
||||
{Name: "region", Alias: "r", OriginExpr: "region", OriginField: "region"},
|
||||
{Name: "zone", Alias: "", OriginExpr: "zone", OriginField: "zone"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "clickhouse single grouping",
|
||||
payload: clickHouseQuerySingleGrouping,
|
||||
wantMetrics: []string{"cpu_usage"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{
|
||||
{Name: "region", Alias: "r", OriginExpr: "region", OriginField: "region"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "clickhouse no grouping",
|
||||
payload: clickHouseQueryNoGrouping,
|
||||
wantMetrics: []string{"memory_usage"},
|
||||
wantGroupBy: []queryfilterextractor.ColumnInfo{},
|
||||
},
|
||||
}
|
||||
|
||||
queryParser := New(instrumentationtest.New().ToProviderSettings())
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
cq := mustCompositeQuery(t, tt.payload)
|
||||
res, err := queryParser.AnalyzeCompositeQuery(ctx, cq)
|
||||
require.NoError(t, err)
|
||||
require.ElementsMatch(t, tt.wantMetrics, res.MetricNames)
|
||||
require.ElementsMatch(t, tt.wantGroupBy, res.GroupByColumns)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func mustCompositeQuery(t *testing.T, payload string) *v3.CompositeQuery {
|
||||
t.Helper()
|
||||
var compositeQuery v3.CompositeQuery
|
||||
require.NoError(t, json.Unmarshal([]byte(payload), &compositeQuery))
|
||||
return &compositeQuery
|
||||
}
|
||||
@@ -1,184 +0,0 @@
|
||||
package queryparser
|
||||
|
||||
var (
|
||||
builderQueryWithGrouping = `
|
||||
{
|
||||
"queryType":"builder",
|
||||
"panelType":"graph",
|
||||
"queries":[
|
||||
{
|
||||
"type":"builder_query",
|
||||
"spec":{
|
||||
"name":"A",
|
||||
"signal":"metrics",
|
||||
"stepInterval":null,
|
||||
"disabled":false,
|
||||
"filter":{"expression":""},
|
||||
"groupBy":[
|
||||
{"name":"service_name","fieldDataType":"","fieldContext":""},
|
||||
{"name":"env","fieldDataType":"","fieldContext":""}
|
||||
],
|
||||
"aggregations":[
|
||||
{"metricName":"test_metric_cardinality","timeAggregation":"count","spaceAggregation":"sum"},
|
||||
{"metricName":"cpu_usage_total","timeAggregation":"avg","spaceAggregation":"avg"}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
builderQuerySingleGrouping = `
|
||||
{
|
||||
"queryType":"builder",
|
||||
"panelType":"graph",
|
||||
"queries":[
|
||||
{
|
||||
"type":"builder_query",
|
||||
"spec":{
|
||||
"name":"B",
|
||||
"signal":"metrics",
|
||||
"stepInterval":null,
|
||||
"disabled":false,
|
||||
"groupBy":[
|
||||
{"name":"namespace","fieldDataType":"","fieldContext":""}
|
||||
],
|
||||
"aggregations":[
|
||||
{"metricName":"latency_p50","timeAggregation":"avg","spaceAggregation":"max"}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
builderQueryNoGrouping = `
|
||||
{
|
||||
"queryType":"builder",
|
||||
"panelType":"graph",
|
||||
"queries":[
|
||||
{
|
||||
"type":"builder_query",
|
||||
"spec":{
|
||||
"name":"C",
|
||||
"signal":"metrics",
|
||||
"stepInterval":null,
|
||||
"disabled":false,
|
||||
"groupBy":[],
|
||||
"aggregations":[
|
||||
{"metricName":"disk_usage_total","timeAggregation":"sum","spaceAggregation":"sum"}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
promQueryWithGrouping = `
|
||||
{
|
||||
"queries":[
|
||||
{
|
||||
"type":"promql",
|
||||
"spec":{
|
||||
"name":"P1",
|
||||
"query":"sum by (pod,region) (rate(http_requests_total[5m]))",
|
||||
"disabled":false,
|
||||
"step":0,
|
||||
"stats":false
|
||||
}
|
||||
}
|
||||
],
|
||||
"panelType":"graph",
|
||||
"queryType":"promql"
|
||||
}
|
||||
`
|
||||
|
||||
promQuerySingleGrouping = `
|
||||
{
|
||||
"queries":[
|
||||
{
|
||||
"type":"promql",
|
||||
"spec":{
|
||||
"name":"P2",
|
||||
"query":"sum by (env)(rate(cpu_usage_seconds_total{job=\"api\"}[5m]))",
|
||||
"disabled":false,
|
||||
"step":0,
|
||||
"stats":false
|
||||
}
|
||||
}
|
||||
],
|
||||
"panelType":"graph",
|
||||
"queryType":"promql"
|
||||
}
|
||||
`
|
||||
|
||||
promQueryNoGrouping = `
|
||||
{
|
||||
"queries":[
|
||||
{
|
||||
"type":"promql",
|
||||
"spec":{
|
||||
"name":"P3",
|
||||
"query":"rate(node_cpu_seconds_total[1m])",
|
||||
"disabled":false,
|
||||
"step":0,
|
||||
"stats":false
|
||||
}
|
||||
}
|
||||
],
|
||||
"panelType":"graph",
|
||||
"queryType":"promql"
|
||||
}
|
||||
`
|
||||
|
||||
clickHouseQueryWithGrouping = `
|
||||
{
|
||||
"queryType":"clickhouse_sql",
|
||||
"panelType":"graph",
|
||||
"queries":[
|
||||
{
|
||||
"type":"clickhouse_sql",
|
||||
"spec":{
|
||||
"name":"CH1",
|
||||
"query":"SELECT region as r, zone FROM metrics WHERE metric_name='cpu' GROUP BY region, zone",
|
||||
"disabled":false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
clickHouseQuerySingleGrouping = `
|
||||
{
|
||||
"queryType":"clickhouse_sql",
|
||||
"panelType":"graph",
|
||||
"queries":[
|
||||
{
|
||||
"type":"clickhouse_sql",
|
||||
"spec":{
|
||||
"name":"CH2",
|
||||
"query":"SELECT region as r FROM metrics WHERE metric_name='cpu_usage' GROUP BY region",
|
||||
"disabled":false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
clickHouseQueryNoGrouping = `
|
||||
{
|
||||
"queryType":"clickhouse_sql",
|
||||
"panelType":"graph",
|
||||
"queries":[
|
||||
{
|
||||
"type":"clickhouse_sql",
|
||||
"spec":{
|
||||
"name":"CH3",
|
||||
"query":"SELECT * FROM metrics WHERE metric_name = 'memory_usage'",
|
||||
"disabled":false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
)
|
||||
@@ -1,466 +0,0 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestValidateCompositeQuery(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
queryParser := New(instrumentationtest.New().ToProviderSettings())
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
compositeQuery *v3.CompositeQuery
|
||||
wantErr bool
|
||||
errContains string
|
||||
}{
|
||||
{
|
||||
name: "nil composite query should return error",
|
||||
compositeQuery: nil,
|
||||
wantErr: true,
|
||||
errContains: "composite query is required",
|
||||
},
|
||||
{
|
||||
name: "empty queries array should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "at least one query is required",
|
||||
},
|
||||
{
|
||||
name: "valid metric builder query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid log builder query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Name: "log_query",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid trace builder query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "trace_query",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid PromQL query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid ClickHouse query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid formula query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "formula_query",
|
||||
Expression: "A + B",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid join query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeJoin,
|
||||
Spec: qbtypes.QueryBuilderJoin{
|
||||
Name: "join_query",
|
||||
Left: qbtypes.QueryRef{Name: "A"},
|
||||
Right: qbtypes.QueryRef{Name: "B"},
|
||||
Type: qbtypes.JoinTypeInner,
|
||||
On: "service_name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid trace operator query should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{
|
||||
Name: "trace_operator",
|
||||
Expression: "A && B",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "invalid metric builder query - missing aggregation should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "invalid",
|
||||
},
|
||||
{
|
||||
name: "invalid PromQL query - empty query should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query expression is required",
|
||||
},
|
||||
{
|
||||
name: "invalid PromQL query - syntax error should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "unclosed left parenthesis",
|
||||
},
|
||||
{
|
||||
name: "invalid ClickHouse query - empty query should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query expression is required",
|
||||
},
|
||||
{
|
||||
name: "invalid ClickHouse query - syntax error should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "SELECT * FROM metrics WHERE",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query parse error",
|
||||
},
|
||||
{
|
||||
name: "invalid formula query - empty expression should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "formula_query",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "formula expression cannot be blank",
|
||||
},
|
||||
{
|
||||
name: "invalid trace operator query - empty expression should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{
|
||||
Name: "trace_operator",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "expression cannot be empty",
|
||||
},
|
||||
{
|
||||
name: "all queries disabled should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "all queries are disabled",
|
||||
},
|
||||
{
|
||||
name: "mixed disabled and enabled queries should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "multiple valid queries should pass",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "invalid query in multiple queries should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "invalid promql syntax [",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query parse error",
|
||||
},
|
||||
{
|
||||
name: "unknown query type should return error",
|
||||
compositeQuery: &v3.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryType{String: valuer.NewString("invalid_query_type")},
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "unknown query type",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := queryParser.ValidateCompositeQuery(ctx, tt.compositeQuery)
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
if tt.errContains != "" {
|
||||
require.Contains(t, err.Error(), tt.errContains)
|
||||
}
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
package queryparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
clickhouse "github.com/AfterShip/clickhouse-sql-parser/parser"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
|
||||
// validatePromQLQuery validates a PromQL query syntax using the Prometheus parser
|
||||
func validatePromQLQuery(query string) error {
|
||||
_, err := parser.ParseExpr(query)
|
||||
if err != nil {
|
||||
if syntaxErrs, ok := err.(parser.ParseErrors); ok {
|
||||
syntaxErr := syntaxErrs[0]
|
||||
startPosition := int(syntaxErr.PositionRange.Start)
|
||||
endPosition := int(syntaxErr.PositionRange.End)
|
||||
return &QueryParseError{
|
||||
StartPosition: &startPosition,
|
||||
EndPosition: &endPosition,
|
||||
ErrorMessage: syntaxErr.Error(),
|
||||
Query: query,
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// validateClickHouseQuery validates a ClickHouse SQL query syntax using the ClickHouse parser
|
||||
func validateClickHouseQuery(query string) error {
|
||||
// Assign the default template variables with dummy values
|
||||
variables := make(map[string]interface{})
|
||||
start := time.Now().UnixMilli()
|
||||
end := start + 1000
|
||||
querytemplate.AssignReservedVars(variables, start, end)
|
||||
|
||||
// Apply the values for default template variables before parsing the query
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(query)
|
||||
if err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to parse clickhouse query: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
var queryBuffer bytes.Buffer
|
||||
err = tmpl.Execute(&queryBuffer, variables)
|
||||
if err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to execute clickhouse query template: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
|
||||
// Parse the ClickHouse query with the default template variables applied
|
||||
p := clickhouse.NewParser(queryBuffer.String())
|
||||
_, err = p.ParseStmts()
|
||||
if err != nil {
|
||||
// TODO: errors returned here is errors.errorString, rather than using regex to parser the error
|
||||
// we should think on using some other library that parses the CH query in more accurate manner,
|
||||
// current CH parser only does very minimal checks.
|
||||
// Sample Error: "line 0:36 expected table name or subquery, got ;\nSELECT department, avg(salary) FROM ;\n ^\n"
|
||||
return &QueryParseError{
|
||||
ErrorMessage: err.Error(),
|
||||
Query: query,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// checkQueriesDisabled checks if all queries are disabled. Returns true if all queries are disabled, false otherwise.
|
||||
func checkQueriesDisabled(compositeQuery *v3.CompositeQuery) bool {
|
||||
for _, envelope := range compositeQuery.Queries {
|
||||
switch envelope.Type {
|
||||
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
}
|
||||
case qbtypes.QueryTypeFormula:
|
||||
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypeTraceOperator:
|
||||
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypeJoin:
|
||||
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderJoin); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypePromQL:
|
||||
if spec, ok := envelope.Spec.(qbtypes.PromQuery); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
if spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we reach here, all queries are disabled
|
||||
return true
|
||||
}
|
||||
@@ -20,6 +20,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote/implpromote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
@@ -65,6 +67,7 @@ type Modules struct {
|
||||
Services services.Module
|
||||
SpanPercentile spanpercentile.Module
|
||||
MetricsExplorer metricsexplorer.Module
|
||||
Promote promote.Module
|
||||
}
|
||||
|
||||
func NewModules(
|
||||
@@ -108,5 +111,6 @@ func NewModules(
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
|
||||
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
@@ -38,6 +39,7 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ authdomain.Handler }{},
|
||||
struct{ preference.Handler }{},
|
||||
struct{ global.Handler }{},
|
||||
struct{ promote.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote/implpromote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session/implsession"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
@@ -234,6 +235,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
implauthdomain.NewHandler(modules.AuthDomain),
|
||||
implpreference.NewHandler(modules.Preference),
|
||||
signozglobal.NewHandler(global),
|
||||
implpromote.NewHandler(modules.Promote),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/chcol"
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
@@ -15,7 +14,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
@@ -34,6 +32,10 @@ var (
|
||||
CodeFailScanVariant = errors.MustNewCode("fail_scan_variant")
|
||||
CodeFailBuildJSONPathsQuery = errors.MustNewCode("fail_build_json_paths_query")
|
||||
CodeNoPathsToQueryIndexes = errors.MustNewCode("no_paths_to_query_indexes_provided")
|
||||
|
||||
CodeFailedToPrepareBatch = errors.MustNewCode("failed_to_prepare_batch_promoted_paths")
|
||||
CodeFailedToSendBatch = errors.MustNewCode("failed_to_send_batch_promoted_paths")
|
||||
CodeFailedToAppendPath = errors.MustNewCode("failed_to_append_path_promoted_paths")
|
||||
)
|
||||
|
||||
// GetBodyJSONPaths extracts body JSON paths from the path_types table
|
||||
@@ -48,7 +50,7 @@ var (
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.TelemetryStore,
|
||||
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
|
||||
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||
|
||||
query, args, limit, err := buildGetBodyJSONPathsQuery(fieldKeySelectors)
|
||||
@@ -56,7 +58,7 @@ func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.Telemet
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.WrapInternalf(err, CodeFailExtractBodyJSONKeys, "failed to extract body JSON keys")
|
||||
}
|
||||
@@ -96,12 +98,12 @@ func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.Telemet
|
||||
return nil, false, errors.WrapInternalf(rows.Err(), CodeFailIterateBodyJSONKeys, "error iterating body JSON keys")
|
||||
}
|
||||
|
||||
promoted, err := GetPromotedPaths(ctx, telemetryStore.ClickhouseDB(), paths...)
|
||||
promoted, err := t.GetPromotedPaths(ctx, paths...)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
indexes, err := getJSONPathIndexes(ctx, telemetryStore, paths...)
|
||||
indexes, err := t.getJSONPathIndexes(ctx, paths...)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@@ -163,7 +165,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
filteredPaths := []string{}
|
||||
for _, path := range paths {
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
@@ -176,7 +178,7 @@ func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.Telem
|
||||
}
|
||||
|
||||
// list indexes for the paths
|
||||
indexesMap, err := ListLogsJSONIndexes(ctx, telemetryStore, filteredPaths...)
|
||||
indexesMap, err := t.ListLogsJSONIndexes(ctx, filteredPaths...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to list JSON path indexes")
|
||||
}
|
||||
@@ -215,7 +217,6 @@ func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.Telem
|
||||
}
|
||||
|
||||
func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, []any) {
|
||||
// This aggregates all types per path and gets the max last_seen, then applies LIMIT
|
||||
sb := sqlbuilder.Select(
|
||||
"name", "type_full", "expr", "granularity",
|
||||
).From(fmt.Sprintf("clusterAllReplicas('%s', %s)", cluster, SkipIndexTableName))
|
||||
@@ -236,9 +237,9 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
|
||||
return sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
}
|
||||
|
||||
func ListLogsJSONIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
query, args := buildListLogsJSONIndexesQuery(telemetryStore.Cluster(), filters...)
|
||||
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
query, args := buildListLogsJSONIndexesQuery(t.telemetrystore.Cluster(), filters...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to load string indexed columns")
|
||||
}
|
||||
@@ -264,9 +265,16 @@ func ListLogsJSONIndexes(ctx context.Context, telemetryStore telemetrystore.Tele
|
||||
return indexesMap, nil
|
||||
}
|
||||
|
||||
func ListPromotedPaths(ctx context.Context, conn clickhouse.Conn) (map[string]struct{}, error) {
|
||||
query := fmt.Sprintf("SELECT path FROM %s.%s", DBName, PromotedPathsTableName)
|
||||
rows, err := conn.Query(ctx, query)
|
||||
func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
pathConditions = append(pathConditions, sb.Equal("path", path))
|
||||
}
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadPromotedPaths, "failed to load promoted paths")
|
||||
}
|
||||
@@ -285,14 +293,14 @@ func ListPromotedPaths(ctx context.Context, conn clickhouse.Conn) (map[string]st
|
||||
}
|
||||
|
||||
// TODO(Piyush): Remove this if not used in future
|
||||
func ListJSONValues(ctx context.Context, conn clickhouse.Conn, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
path = CleanPathPrefixes(path)
|
||||
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
|
||||
}
|
||||
|
||||
promoted, err := IsPathPromoted(ctx, conn, path)
|
||||
promoted, err := t.IsPathPromoted(ctx, path)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@@ -325,7 +333,7 @@ func ListJSONValues(ctx context.Context, conn clickhouse.Conn, path string, limi
|
||||
contextWithTimeout, cancel := context.WithTimeout(ctx, 5*time.Second)
|
||||
defer cancel()
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := conn.Query(contextWithTimeout, query, args...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(contextWithTimeout, query, args...)
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
return nil, false, errors.WrapTimeoutf(err, errors.CodeTimeout, "query timed out").WithAdditional("failed to list JSON values")
|
||||
@@ -447,10 +455,10 @@ func derefValue(v any) any {
|
||||
}
|
||||
|
||||
// IsPathPromoted checks if a specific path is promoted
|
||||
func IsPathPromoted(ctx context.Context, conn clickhouse.Conn, path string) (bool, error) {
|
||||
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
|
||||
split := strings.Split(path, telemetrylogs.ArraySep)
|
||||
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
|
||||
rows, err := conn.Query(ctx, query, split[0])
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
|
||||
if err != nil {
|
||||
return false, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to check if path %s is promoted", path)
|
||||
}
|
||||
@@ -460,7 +468,7 @@ func IsPathPromoted(ctx context.Context, conn clickhouse.Conn, path string) (boo
|
||||
}
|
||||
|
||||
// GetPromotedPaths checks if a specific path is promoted
|
||||
func GetPromotedPaths(ctx context.Context, conn clickhouse.Conn, paths ...string) (*utils.ConcurrentSet[string], error) {
|
||||
func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...string) (*utils.ConcurrentSet[string], error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
@@ -469,7 +477,7 @@ func GetPromotedPaths(ctx context.Context, conn clickhouse.Conn, paths ...string
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := conn.Query(ctx, query, args...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to get promoted paths")
|
||||
}
|
||||
@@ -494,3 +502,29 @@ func CleanPathPrefixes(path string) string {
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
|
||||
return path
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) PromotePaths(ctx context.Context, paths ...string) error {
|
||||
batch, err := t.telemetrystore.ClickhouseDB().PrepareBatch(ctx,
|
||||
fmt.Sprintf("INSERT INTO %s.%s (path, created_at) VALUES", DBName,
|
||||
PromotedPathsTableName))
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToPrepareBatch, "failed to prepare batch")
|
||||
}
|
||||
|
||||
nowMs := uint64(time.Now().UnixMilli())
|
||||
for _, p := range paths {
|
||||
trimmed := strings.TrimSpace(p)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if err := batch.Append(trimmed, nowMs); err != nil {
|
||||
_ = batch.Abort()
|
||||
return errors.WrapInternalf(err, CodeFailedToAppendPath, "failed to append path")
|
||||
}
|
||||
}
|
||||
|
||||
if err := batch.Send(); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToSendBatch, "failed to send batch")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -349,7 +349,7 @@ func (dashboard *Dashboard) GetWidgetQuery(startTime, endTime uint64, widgetInde
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, ErrCodeDashboardInvalidData, "invalid dashboard data")
|
||||
}
|
||||
|
||||
if len(data.Widgets) < int(widgetIndex)+1 {
|
||||
if widgetIndex < 0 || int(widgetIndex) >= len(data.Widgets) {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeDashboardInvalidInput, "widget with index %v doesn't exist", widgetIndex)
|
||||
}
|
||||
|
||||
|
||||
76
pkg/types/promotetypes/types.go
Normal file
76
pkg/types/promotetypes/types.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package promotetypes
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type WrappedIndex struct {
|
||||
JSONDataType telemetrytypes.JSONDataType `json:"-"`
|
||||
ColumnType string `json:"column_type"`
|
||||
Type string `json:"type"`
|
||||
Granularity int `json:"granularity"`
|
||||
}
|
||||
|
||||
type PromotePath struct {
|
||||
Path string `json:"path"`
|
||||
Promote bool `json:"promote,omitempty"`
|
||||
|
||||
Indexes []WrappedIndex `json:"indexes,omitempty"`
|
||||
}
|
||||
|
||||
func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
if i.Path == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path is required")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, " ") {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, telemetrylogs.ArraySep) || strings.Contains(i.Path, telemetrylogs.ArrayAnyIndex) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
if strings.HasPrefix(i.Path, constants.BodyJSONColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
|
||||
}
|
||||
|
||||
// remove the "body." prefix from the path
|
||||
i.Path = strings.TrimPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix)
|
||||
|
||||
isCardinal := keycheck.IsCardinal(i.Path)
|
||||
if isCardinal {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cardinal paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
for idx, index := range i.Indexes {
|
||||
if index.Type == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index type is required")
|
||||
}
|
||||
if index.Granularity <= 0 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index granularity must be greater than 0")
|
||||
}
|
||||
|
||||
jsonDataType, ok := telemetrytypes.MappingStringToJSONDataType[index.ColumnType]
|
||||
if !ok {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid column type: %s", index.ColumnType)
|
||||
}
|
||||
if !jsonDataType.IndexSupported {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index is not supported for column type: %s", index.ColumnType)
|
||||
}
|
||||
|
||||
i.Indexes[idx].JSONDataType = jsonDataType
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -539,11 +539,6 @@ func (f Function) Copy() Function {
|
||||
return c
|
||||
}
|
||||
|
||||
// Validate validates the Function by calling Validate on its Name
|
||||
func (f Function) Validate() error {
|
||||
return f.Name.Validate()
|
||||
}
|
||||
|
||||
type LimitBy struct {
|
||||
// keys to limit by
|
||||
Keys []string `json:"keys"`
|
||||
|
||||
@@ -73,53 +73,6 @@ func (f *QueryBuilderFormula) UnmarshalJSON(data []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate validates the QueryBuilderFormula
|
||||
func (f QueryBuilderFormula) Validate() error {
|
||||
// Validate name is not blank
|
||||
if strings.TrimSpace(f.Name) == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"formula name cannot be blank",
|
||||
)
|
||||
}
|
||||
|
||||
// Validate expression is not blank
|
||||
if strings.TrimSpace(f.Expression) == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"formula expression cannot be blank",
|
||||
)
|
||||
}
|
||||
|
||||
// If having is not null, validate that expression is not blank
|
||||
if f.Having != nil {
|
||||
if strings.TrimSpace(f.Having.Expression) == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"having expression cannot be blank when having clause is present",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate functions if present
|
||||
for i, fn := range f.Functions {
|
||||
if err := fn.Validate(); err != nil {
|
||||
fnId := fmt.Sprintf("function #%d", i+1)
|
||||
if f.Name != "" {
|
||||
fnId = fmt.Sprintf("function #%d in formula '%s'", i+1, f.Name)
|
||||
}
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
fnId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// small container to store the query name and index or alias reference
|
||||
// for a variable in the formula expression
|
||||
// read below for more details on aggregation references
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"slices"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -34,37 +33,6 @@ var (
|
||||
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
|
||||
)
|
||||
|
||||
// Validate validates that the FunctionName is one of the known types
|
||||
func (fn FunctionName) Validate() error {
|
||||
switch fn {
|
||||
case FunctionNameCutOffMin,
|
||||
FunctionNameCutOffMax,
|
||||
FunctionNameClampMin,
|
||||
FunctionNameClampMax,
|
||||
FunctionNameAbsolute,
|
||||
FunctionNameRunningDiff,
|
||||
FunctionNameLog2,
|
||||
FunctionNameLog10,
|
||||
FunctionNameCumulativeSum,
|
||||
FunctionNameEWMA3,
|
||||
FunctionNameEWMA5,
|
||||
FunctionNameEWMA7,
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly,
|
||||
FunctionNameFillZero:
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid function name: %s",
|
||||
fn.StringValue(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// ApplyFunction applies the given function to the result data
|
||||
func ApplyFunction(fn Function, result *TimeSeries) *TimeSeries {
|
||||
// Extract the function name and arguments
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -19,15 +16,6 @@ var (
|
||||
JoinTypeCross = JoinType{valuer.NewString("cross")}
|
||||
)
|
||||
|
||||
func (j JoinType) Validate() error {
|
||||
switch j {
|
||||
case JoinTypeInner, JoinTypeLeft, JoinTypeRight, JoinTypeFull, JoinTypeCross:
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid join type: %s", j.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRef struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
@@ -65,25 +53,6 @@ type QueryBuilderJoin struct {
|
||||
Functions []Function `json:"functions,omitempty"`
|
||||
}
|
||||
|
||||
func (q *QueryBuilderJoin) Validate() error {
|
||||
if strings.TrimSpace(q.Name) == "" {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "name is required")
|
||||
}
|
||||
if strings.TrimSpace(q.Left.Name) == "" {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "left name is required")
|
||||
}
|
||||
if strings.TrimSpace(q.Right.Name) == "" {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "right name is required")
|
||||
}
|
||||
if err := q.Type.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
if strings.TrimSpace(q.On) == "" {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "on is required")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Copy creates a deep copy of QueryBuilderJoin
|
||||
func (q QueryBuilderJoin) Copy() QueryBuilderJoin {
|
||||
c := q
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user