Compare commits
20 Commits
v0.58.0-cl
...
v0.58.0-gi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
946f30b08a | ||
|
|
831540eaf0 | ||
|
|
22c10f9479 | ||
|
|
e748fb0655 | ||
|
|
fdc54a62a9 | ||
|
|
abe0ab69b0 | ||
|
|
e623c92615 | ||
|
|
dc5917db01 | ||
|
|
d6a7f0b6f4 | ||
|
|
471803115e | ||
|
|
8403a3362d | ||
|
|
64d46bc855 | ||
|
|
c9fee27604 | ||
|
|
f1b6b2d3d8 | ||
|
|
468f056530 | ||
|
|
7086470ce2 | ||
|
|
352296c6cd | ||
|
|
975307a8b8 | ||
|
|
12377be809 | ||
|
|
9d90b8d19c |
83
.github/workflows/docs.yml
vendored
Normal file
83
.github/workflows/docs.yml
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
name: "Update PR labels and Block PR until related docs are shipped for the feature"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
types: [opened, edited, labeled, unlabeled]
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
docs_label_check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check PR Title and Manage Labels
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const prTitle = context.payload.pull_request.title;
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
const owner = context.repo.owner;
|
||||
const repo = context.repo.repo;
|
||||
|
||||
// Fetch the current PR details to get labels
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber
|
||||
});
|
||||
|
||||
const labels = pr.data.labels.map(label => label.name);
|
||||
|
||||
if (prTitle.startsWith('feat:')) {
|
||||
const hasDocsRequired = labels.includes('docs required');
|
||||
const hasDocsShipped = labels.includes('docs shipped');
|
||||
const hasDocsNotRequired = labels.includes('docs not required');
|
||||
|
||||
// If "docs not required" is present, skip the checks
|
||||
if (hasDocsNotRequired && !hasDocsRequired) {
|
||||
console.log("Skipping checks due to 'docs not required' label.");
|
||||
return; // Exit the script early
|
||||
}
|
||||
|
||||
// If "docs shipped" is present, remove "docs required" if it exists
|
||||
if (hasDocsShipped && hasDocsRequired) {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: prNumber,
|
||||
name: 'docs required'
|
||||
});
|
||||
console.log("Removed 'docs required' label.");
|
||||
}
|
||||
|
||||
// Add "docs required" label if neither "docs shipped" nor "docs required" are present
|
||||
if (!hasDocsRequired && !hasDocsShipped) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: prNumber,
|
||||
labels: ['docs required']
|
||||
});
|
||||
console.log("Added 'docs required' label.");
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch the updated labels after any changes
|
||||
const updatedPr = await github.rest.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber
|
||||
});
|
||||
|
||||
const updatedLabels = updatedPr.data.labels.map(label => label.name);
|
||||
const updatedHasDocsRequired = updatedLabels.includes('docs required');
|
||||
const updatedHasDocsShipped = updatedLabels.includes('docs shipped');
|
||||
|
||||
// Block PR if "docs required" is still present and "docs shipped" is missing
|
||||
if (updatedHasDocsRequired && !updatedHasDocsShipped) {
|
||||
core.setFailed("This PR requires documentation. Please remove the 'docs required' label and add the 'docs shipped' label to proceed.");
|
||||
}
|
||||
@@ -31,7 +31,6 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/rules"
|
||||
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/migrate"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
|
||||
licensepkg "go.signoz.io/signoz/ee/query-service/license"
|
||||
@@ -348,7 +347,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
|
||||
}
|
||||
|
||||
if user.User.OrgId == "" {
|
||||
return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
return nil, basemodel.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
}
|
||||
|
||||
return user, nil
|
||||
@@ -765,8 +764,9 @@ func makeRulesManager(
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
baserules "go.signoz.io/signoz/pkg/query-service/rules"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
|
||||
@@ -79,6 +84,106 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
return task, nil
|
||||
}
|
||||
|
||||
// TestNotification prepares a dummy rule for given rule parameters and
|
||||
// sends a test notification. returns alert count and error (if any)
|
||||
func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.ApiError) {
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if opts.Rule == nil {
|
||||
return 0, basemodel.BadRequest(fmt.Errorf("rule is required"))
|
||||
}
|
||||
|
||||
parsedRule := opts.Rule
|
||||
var alertname = parsedRule.AlertName
|
||||
if alertname == "" {
|
||||
// alertname is not mandatory for testing, so picking
|
||||
// a random string here
|
||||
alertname = uuid.New().String()
|
||||
}
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, baserules.TestAlertPostFix)
|
||||
|
||||
var rule baserules.Rule
|
||||
var err error
|
||||
|
||||
if parsedRule.RuleType == baserules.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
parsedRule.Labels[labels.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
|
||||
|
||||
// create a threshold rule
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = baserules.NewPromRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else {
|
||||
return 0, basemodel.BadRequest(fmt.Errorf("failed to derive ruletype with given information"))
|
||||
}
|
||||
|
||||
// set timestamp to current utc time
|
||||
ts := time.Now().UTC()
|
||||
|
||||
count, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||
}
|
||||
alertsFound, ok := count.(int)
|
||||
if !ok {
|
||||
return 0, basemodel.InternalError(fmt.Errorf("something went wrong"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), opts.NotifyFunc)
|
||||
|
||||
return alertsFound, nil
|
||||
}
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task {
|
||||
|
||||
@@ -87,6 +87,8 @@
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.379.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.160.3",
|
||||
"rc-tween-one": "3.0.6",
|
||||
@@ -107,11 +109,10 @@
|
||||
"react-query": "3.39.3",
|
||||
"react-redux": "^7.2.2",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"react-router-dom-v5-compat": "6.27.0",
|
||||
"react-syntax-highlighter": "15.5.0",
|
||||
"react-use": "^17.3.2",
|
||||
"react-virtuoso": "4.0.3",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"redux": "^4.0.5",
|
||||
"redux-thunk": "^2.3.0",
|
||||
"rehype-raw": "7.0.0",
|
||||
|
||||
@@ -43,11 +43,18 @@
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_4hours": "4 hours",
|
||||
"option_2hours": "2 hours",
|
||||
"option_3hours": "3 hours",
|
||||
"option_4hours": "4 hours",
|
||||
"option_5hours": "5 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_8hours": "8 hours",
|
||||
"option_10hours": "10 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"option_48hours": "48 hours",
|
||||
"option_72hours": "72 hours",
|
||||
"option_1week": "1 week",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
|
||||
@@ -29,12 +29,24 @@
|
||||
"text_condition1": "Send a notification when",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_1min": "1 min",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_2hours": "2 hours",
|
||||
"option_3hours": "3 hours",
|
||||
"option_4hours": "4 hours",
|
||||
"option_5hours": "5 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_8hours": "8 hours",
|
||||
"option_10hours": "10 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"option_48hours": "48 hours",
|
||||
"option_72hours": "72 hours",
|
||||
"option_1week": "1 week",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
|
||||
@@ -47,11 +47,18 @@
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_2hours": "2 hours",
|
||||
"option_3hours": "3 hours",
|
||||
"option_4hours": "4 hours",
|
||||
"option_5hours": "5 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_8hours": "8 hours",
|
||||
"option_10hours": "10 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"option_48hours": "48 hours",
|
||||
"option_72hours": "72 hours",
|
||||
"option_1week": "1 week",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
|
||||
@@ -1,30 +1,50 @@
|
||||
{
|
||||
"breadcrumb": "Messaging Queues",
|
||||
"header": "Kafka / Overview",
|
||||
"overview": {
|
||||
"title": "Start sending data in as little as 20 minutes",
|
||||
"subtitle": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"configureConsumer": {
|
||||
"title": "Configure Consumer",
|
||||
"description": "Add consumer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"configureProducer": {
|
||||
"title": "Configure Producer",
|
||||
"description": "Add producer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"monitorKafka": {
|
||||
"title": "Monitor kafka",
|
||||
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"summarySection": {
|
||||
"viewDetailsButton": "View Details"
|
||||
},
|
||||
"confirmModal": {
|
||||
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
|
||||
"okText": "Proceed"
|
||||
}
|
||||
}
|
||||
"breadcrumb": "Messaging Queues",
|
||||
"header": "Kafka / Overview",
|
||||
"overview": {
|
||||
"title": "Start sending data in as little as 20 minutes",
|
||||
"subtitle": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"configureConsumer": {
|
||||
"title": "Configure Consumer",
|
||||
"description": "Add consumer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"configureProducer": {
|
||||
"title": "Configure Producer",
|
||||
"description": "Add producer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"monitorKafka": {
|
||||
"title": "Monitor kafka",
|
||||
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"summarySection": {
|
||||
"viewDetailsButton": "View Details",
|
||||
"consumer": {
|
||||
"title": "Consumer lag view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"producer": {
|
||||
"title": "Producer latency view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"partition": {
|
||||
"title": "Partition Latency view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"dropRate": {
|
||||
"title": "Drop Rate view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
}
|
||||
},
|
||||
"confirmModal": {
|
||||
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
|
||||
"okText": "Proceed"
|
||||
},
|
||||
"overviewSummarySection": {
|
||||
"title": "Monitor Your Data Streams",
|
||||
"subtitle": "Monitor key Kafka metrics like consumer lag and latency to ensure efficient data flow and troubleshoot in real time."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,12 +29,24 @@
|
||||
"text_condition1": "Send a notification when",
|
||||
"text_condition2": "the threshold",
|
||||
"text_condition3": "during the last",
|
||||
"option_1min": "1 min",
|
||||
"option_5min": "5 mins",
|
||||
"option_10min": "10 mins",
|
||||
"option_15min": "15 mins",
|
||||
"option_30min": "30 mins",
|
||||
"option_60min": "60 mins",
|
||||
"option_2hours": "2 hours",
|
||||
"option_3hours": "3 hours",
|
||||
"option_4hours": "4 hours",
|
||||
"option_5hours": "5 hours",
|
||||
"option_6hours": "6 hours",
|
||||
"option_8hours": "8 hours",
|
||||
"option_10hours": "10 hours",
|
||||
"option_12hours": "12 hours",
|
||||
"option_24hours": "24 hours",
|
||||
"option_48hours": "48 hours",
|
||||
"option_72hours": "72 hours",
|
||||
"option_1week": "1 week",
|
||||
"field_threshold": "Alert Threshold",
|
||||
"option_allthetimes": "all the times",
|
||||
"option_atleastonce": "at least once",
|
||||
|
||||
@@ -28,6 +28,7 @@ import { Suspense, useEffect, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
@@ -292,36 +293,38 @@ function App(): JSX.Element {
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import { ApiBaseInstance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
export interface OnboardingStatusResponse {
|
||||
status: string;
|
||||
data: {
|
||||
attribute?: string;
|
||||
error_message?: string;
|
||||
status?: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
const getOnboardingStatus = async (props: {
|
||||
start: number;
|
||||
end: number;
|
||||
endpointService?: string;
|
||||
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
||||
const { endpointService, ...rest } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
||||
rest,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG);
|
||||
}
|
||||
};
|
||||
|
||||
export default getOnboardingStatus;
|
||||
@@ -17,6 +17,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
@@ -40,6 +41,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
@@ -62,6 +64,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
@@ -83,6 +86,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {
|
||||
log_level: 'INFO' as never,
|
||||
},
|
||||
@@ -112,6 +116,7 @@ describe('getLogIndicatorTypeForTable', () => {
|
||||
attributesString: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
scope_string: {},
|
||||
attributesFloat: {},
|
||||
severity_text: 'WARN',
|
||||
};
|
||||
@@ -130,6 +135,7 @@ describe('getLogIndicatorTypeForTable', () => {
|
||||
severity_number: 0,
|
||||
body: 'Sample log message',
|
||||
resources_string: {},
|
||||
scope_string: {},
|
||||
attributesString: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
@@ -166,6 +172,7 @@ describe('logIndicatorBySeverityNumber', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
|
||||
@@ -37,4 +37,8 @@ export enum QueryParams {
|
||||
partition = 'partition',
|
||||
selectedTimelineQuery = 'selectedTimelineQuery',
|
||||
ruleType = 'ruleType',
|
||||
configDetail = 'configDetail',
|
||||
getStartedSource = 'getStartedSource',
|
||||
getStartedSourceService = 'getStartedSourceService',
|
||||
mqServiceView = 'mqServiceView',
|
||||
}
|
||||
|
||||
@@ -94,6 +94,7 @@ export const anamolyAlertDefaults: AlertDef = {
|
||||
matchType: defaultMatchType,
|
||||
algorithm: defaultAlgorithm,
|
||||
seasonality: defaultSeasonality,
|
||||
target: 3,
|
||||
},
|
||||
labels: {
|
||||
severity: 'warning',
|
||||
|
||||
@@ -181,8 +181,18 @@ function RuleOptions({
|
||||
<Select.Option value="10m0s">{t('option_10min')}</Select.Option>
|
||||
<Select.Option value="15m0s">{t('option_15min')}</Select.Option>
|
||||
<Select.Option value="1h0m0s">{t('option_60min')}</Select.Option>
|
||||
<Select.Option value="2h0m0s">{t('option_2hours')}</Select.Option>
|
||||
<Select.Option value="3h0m0s">{t('option_3hours')}</Select.Option>
|
||||
<Select.Option value="4h0m0s">{t('option_4hours')}</Select.Option>
|
||||
<Select.Option value="5h0m0s">{t('option_5hours')}</Select.Option>
|
||||
<Select.Option value="6h0m0s">{t('option_6hours')}</Select.Option>
|
||||
<Select.Option value="8h0m0s">{t('option_8hours')}</Select.Option>
|
||||
<Select.Option value="10h0m0s">{t('option_10hours')}</Select.Option>
|
||||
<Select.Option value="12h0m0s">{t('option_12hours')}</Select.Option>
|
||||
<Select.Option value="24h0m0s">{t('option_24hours')}</Select.Option>
|
||||
<Select.Option value="48h0m0s">{t('option_48hours')}</Select.Option>
|
||||
<Select.Option value="72h0m0s">{t('option_72hours')}</Select.Option>
|
||||
<Select.Option value="168h0m0s">{t('option_1week')}</Select.Option>
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
|
||||
@@ -138,6 +138,9 @@ function LabelSelect({
|
||||
if (e.key === 'Enter' || e.code === 'Enter' || e.key === ':') {
|
||||
send('NEXT');
|
||||
}
|
||||
if (state.value === 'Idle') {
|
||||
send('NEXT');
|
||||
}
|
||||
}}
|
||||
bordered={false}
|
||||
value={currentVal as never}
|
||||
|
||||
@@ -157,6 +157,11 @@ export const getFieldAttributes = (field: string): IFieldAttributes => {
|
||||
const stringWithoutPrefix = field.slice('resources_'.length);
|
||||
const parts = splitOnce(stringWithoutPrefix, '.');
|
||||
[dataType, newField] = parts;
|
||||
} else if (field.startsWith('scope_string')) {
|
||||
logType = MetricsType.Scope;
|
||||
const stringWithoutPrefix = field.slice('scope_'.length);
|
||||
const parts = splitOnce(stringWithoutPrefix, '.');
|
||||
[dataType, newField] = parts;
|
||||
}
|
||||
|
||||
return { dataType, newField, logType };
|
||||
@@ -187,6 +192,7 @@ export const aggregateAttributesResourcesToString = (logData: ILog): string => {
|
||||
traceId: logData.traceId,
|
||||
attributes: {},
|
||||
resources: {},
|
||||
scope: {},
|
||||
severity_text: logData.severity_text,
|
||||
severity_number: logData.severity_number,
|
||||
};
|
||||
@@ -198,6 +204,9 @@ export const aggregateAttributesResourcesToString = (logData: ILog): string => {
|
||||
} else if (key.startsWith('resources_')) {
|
||||
outputJson.resources = outputJson.resources || {};
|
||||
Object.assign(outputJson.resources, logData[key as keyof ILog]);
|
||||
} else if (key.startsWith('scope_string')) {
|
||||
outputJson.scope = outputJson.scope || {};
|
||||
Object.assign(outputJson.scope, logData[key as keyof ILog]);
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
|
||||
@@ -53,6 +53,7 @@ export enum KeyOperationTableHeader {
|
||||
export enum MetricsType {
|
||||
Tag = 'tag',
|
||||
Resource = 'resource',
|
||||
Scope = 'scope',
|
||||
}
|
||||
|
||||
export enum WidgetKeys {
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
|
||||
|
||||
Once you are done instrumenting your Java application, you can run it using the below commands
|
||||
|
||||
**Note:**
|
||||
- Ensure you have Java and Maven installed. Compile your Java consumer applications: Ensure your consumer apps are compiled and ready to run.
|
||||
|
||||
**Run Consumer App with Java Agent:**
|
||||
|
||||
```bash
|
||||
java -javaagent:/path/to/opentelemetry-javaagent.jar \
|
||||
-Dotel.service.name=consumer-svc \
|
||||
-Dotel.traces.exporter=otlp \
|
||||
-Dotel.metrics.exporter=otlp \
|
||||
-Dotel.logs.exporter=otlp \
|
||||
-Dotel.instrumentation.kafka.producer-propagation.enabled=true \
|
||||
-Dotel.instrumentation.kafka.experimental-span-attributes=true \
|
||||
-Dotel.instrumentation.kafka.metric-reporter.enabled=true \
|
||||
-jar /path/to/your/consumer.jar
|
||||
```
|
||||
|
||||
<path> - update it to the path where you downloaded the Java JAR agent in previous step
|
||||
<my-app> - Jar file of your application
|
||||
|
||||
|
||||
|
||||
**Note:**
|
||||
- In case you're dockerising your application, make sure to dockerise it along with OpenTelemetry instrumentation done in previous step.
|
||||
|
||||
|
||||
|
||||
If you encounter any difficulties, please consult the [troubleshooting section](https://signoz.io/docs/instrumentation/springboot/#troubleshooting-your-installation) for assistance.
|
||||
@@ -0,0 +1,29 @@
|
||||
|
||||
|
||||
Once you are done instrumenting your Java application, you can run it using the below commands
|
||||
|
||||
**Note:**
|
||||
- Ensure you have Java and Maven installed. Compile your Java producer applications: Ensure your producer apps are compiled and ready to run.
|
||||
|
||||
**Run Producer App with Java Agent:**
|
||||
|
||||
```bash
|
||||
java -javaagent:/path/to/opentelemetry-javaagent.jar \
|
||||
-Dotel.service.name=producer-svc \
|
||||
-Dotel.traces.exporter=otlp \
|
||||
-Dotel.metrics.exporter=otlp \
|
||||
-Dotel.logs.exporter=otlp \
|
||||
-jar /path/to/your/producer.jar
|
||||
```
|
||||
|
||||
<path> - update it to the path where you downloaded the Java JAR agent in previous step
|
||||
<my-app> - Jar file of your application
|
||||
|
||||
|
||||
|
||||
**Note:**
|
||||
- In case you're dockerising your application, make sure to dockerise it along with OpenTelemetry instrumentation done in previous step.
|
||||
|
||||
|
||||
|
||||
If you encounter any difficulties, please consult the [troubleshooting section](https://signoz.io/docs/instrumentation/springboot/#troubleshooting-your-installation) for assistance.
|
||||
@@ -6,11 +6,16 @@ import {
|
||||
LoadingOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import Header from 'container/OnboardingContainer/common/Header/Header';
|
||||
import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext';
|
||||
import { useOnboardingStatus } from 'hooks/messagingQueue / onboarding/useOnboardingStatus';
|
||||
import { useQueryService } from 'hooks/useQueryService';
|
||||
import useResourceAttribute from 'hooks/useResourceAttribute';
|
||||
import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import MessagingQueueHealthCheck from 'pages/MessagingQueues/MessagingQueueHealthCheck/MessagingQueueHealthCheck';
|
||||
import { getAttributeDataFromOnboardingStatus } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -27,6 +32,12 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const getStartedSource = urlQuery.get(QueryParams.getStartedSource);
|
||||
const getStartedSourceService = urlQuery.get(
|
||||
QueryParams.getStartedSourceService,
|
||||
);
|
||||
|
||||
const {
|
||||
serviceName,
|
||||
selectedDataSource,
|
||||
@@ -57,8 +68,69 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
maxTime,
|
||||
selectedTime,
|
||||
selectedTags,
|
||||
options: {
|
||||
enabled: getStartedSource !== 'kafka',
|
||||
},
|
||||
});
|
||||
|
||||
const [pollInterval, setPollInterval] = useState<number | false>(10000);
|
||||
const {
|
||||
data: onbData,
|
||||
error: onbErr,
|
||||
isFetching: onbFetching,
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: getStartedSource === 'kafka',
|
||||
refetchInterval: pollInterval,
|
||||
},
|
||||
getStartedSourceService || '',
|
||||
'query-key-onboarding-status',
|
||||
);
|
||||
|
||||
const [
|
||||
shouldRetryOnboardingCall,
|
||||
setShouldRetryOnboardingCall,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
useEffect(() => {
|
||||
// runs only when the caller is coming from 'kafka' i.e. coming from Messaging Queues - setup helper
|
||||
if (getStartedSource === 'kafka') {
|
||||
if (onbData?.statusCode !== 200) {
|
||||
setShouldRetryOnboardingCall(true);
|
||||
} else if (onbData?.payload?.status === 'success') {
|
||||
const attributeData = getAttributeDataFromOnboardingStatus(
|
||||
onbData?.payload,
|
||||
);
|
||||
if (attributeData.overallStatus === 'success') {
|
||||
setLoading(false);
|
||||
setIsReceivingData(true);
|
||||
setPollInterval(false);
|
||||
} else {
|
||||
setShouldRetryOnboardingCall(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [
|
||||
shouldRetryOnboardingCall,
|
||||
onbData,
|
||||
onbErr,
|
||||
onbFetching,
|
||||
getStartedSource,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (retryCount < 0 && getStartedSource === 'kafka') {
|
||||
setPollInterval(false);
|
||||
setLoading(false);
|
||||
}
|
||||
}, [retryCount, getStartedSource]);
|
||||
|
||||
useEffect(() => {
|
||||
if (getStartedSource === 'kafka' && !onbFetching) {
|
||||
setRetryCount((prevCount) => prevCount - 1);
|
||||
}
|
||||
}, [getStartedSource, onbData, onbFetching]);
|
||||
|
||||
const renderDocsReference = (): JSX.Element => {
|
||||
switch (selectedDataSource?.name) {
|
||||
case 'java':
|
||||
@@ -192,25 +264,27 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
useEffect(() => {
|
||||
let pollingTimer: string | number | NodeJS.Timer | undefined;
|
||||
|
||||
if (loading) {
|
||||
pollingTimer = setInterval(() => {
|
||||
// Trigger a refetch with the updated parameters
|
||||
const updatedMinTime = (Date.now() - 15 * 60 * 1000) * 1000000;
|
||||
const updatedMaxTime = Date.now() * 1000000;
|
||||
if (getStartedSource !== 'kafka') {
|
||||
if (loading) {
|
||||
pollingTimer = setInterval(() => {
|
||||
// Trigger a refetch with the updated parameters
|
||||
const updatedMinTime = (Date.now() - 15 * 60 * 1000) * 1000000;
|
||||
const updatedMaxTime = Date.now() * 1000000;
|
||||
|
||||
const payload = {
|
||||
maxTime: updatedMaxTime,
|
||||
minTime: updatedMinTime,
|
||||
selectedTime,
|
||||
};
|
||||
const payload = {
|
||||
maxTime: updatedMaxTime,
|
||||
minTime: updatedMinTime,
|
||||
selectedTime,
|
||||
};
|
||||
|
||||
dispatch({
|
||||
type: UPDATE_TIME_INTERVAL,
|
||||
payload,
|
||||
});
|
||||
}, pollingInterval); // Same interval as pollingInterval
|
||||
} else if (!loading && pollingTimer) {
|
||||
clearInterval(pollingTimer);
|
||||
dispatch({
|
||||
type: UPDATE_TIME_INTERVAL,
|
||||
payload,
|
||||
});
|
||||
}, pollingInterval); // Same interval as pollingInterval
|
||||
} else if (!loading && pollingTimer) {
|
||||
clearInterval(pollingTimer);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up the interval when the component unmounts
|
||||
@@ -221,15 +295,24 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
}, [refetch, selectedTags, selectedTime, loading]);
|
||||
|
||||
useEffect(() => {
|
||||
verifyApplicationData(data);
|
||||
if (getStartedSource !== 'kafka') {
|
||||
verifyApplicationData(data);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isServiceLoading, data, error, isError]);
|
||||
|
||||
useEffect(() => {
|
||||
refetch();
|
||||
if (getStartedSource !== 'kafka') {
|
||||
refetch();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const isQueryServiceLoading = useMemo(
|
||||
() => isServiceLoading || loading || onbFetching,
|
||||
[isServiceLoading, loading, onbFetching],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="connection-status-container">
|
||||
<div className="full-docs-link">{renderDocsReference()}</div>
|
||||
@@ -250,30 +333,42 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
<div className="label"> Status </div>
|
||||
|
||||
<div className="status">
|
||||
{(loading || isServiceLoading) && <LoadingOutlined />}
|
||||
{!(loading || isServiceLoading) && isReceivingData && (
|
||||
<>
|
||||
<CheckCircleTwoTone twoToneColor="#52c41a" />
|
||||
<span> Success </span>
|
||||
</>
|
||||
)}
|
||||
{!(loading || isServiceLoading) && !isReceivingData && (
|
||||
<>
|
||||
<CloseCircleTwoTone twoToneColor="#e84749" />
|
||||
<span> Failed </span>
|
||||
</>
|
||||
)}
|
||||
{isQueryServiceLoading && <LoadingOutlined />}
|
||||
{!isQueryServiceLoading &&
|
||||
isReceivingData &&
|
||||
(getStartedSource !== 'kafka' ? (
|
||||
<>
|
||||
<CheckCircleTwoTone twoToneColor="#52c41a" />
|
||||
<span> Success </span>
|
||||
</>
|
||||
) : (
|
||||
<MessagingQueueHealthCheck
|
||||
serviceToInclude={[getStartedSourceService || '']}
|
||||
/>
|
||||
))}
|
||||
{!isQueryServiceLoading &&
|
||||
!isReceivingData &&
|
||||
(getStartedSource !== 'kafka' ? (
|
||||
<>
|
||||
<CloseCircleTwoTone twoToneColor="#e84749" />
|
||||
<span> Failed </span>
|
||||
</>
|
||||
) : (
|
||||
<MessagingQueueHealthCheck
|
||||
serviceToInclude={[getStartedSourceService || '']}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="details-info">
|
||||
<div className="label"> Details </div>
|
||||
|
||||
<div className="details">
|
||||
{(loading || isServiceLoading) && <div> Waiting for Update </div>}
|
||||
{!(loading || isServiceLoading) && isReceivingData && (
|
||||
{isQueryServiceLoading && <div> Waiting for Update </div>}
|
||||
{!isQueryServiceLoading && isReceivingData && (
|
||||
<div> Received data from the application successfully. </div>
|
||||
)}
|
||||
{!(loading || isServiceLoading) && !isReceivingData && (
|
||||
{!isQueryServiceLoading && !isReceivingData && (
|
||||
<div> Could not detect the install </div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -74,4 +74,11 @@ div[class*='-setup-instructions-container'] {
|
||||
.dataSourceName {
|
||||
color: var(--bg-slate-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.supported-languages-container {
|
||||
.disabled {
|
||||
cursor: not-allowed;
|
||||
opacity: 0.5;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,15 +6,21 @@ import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Button, Card, Form, Input, Select, Space, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext';
|
||||
import { useCases } from 'container/OnboardingContainer/OnboardingContainer';
|
||||
import {
|
||||
ModulesMap,
|
||||
useCases,
|
||||
} from 'container/OnboardingContainer/OnboardingContainer';
|
||||
import {
|
||||
getDataSources,
|
||||
getSupportedFrameworks,
|
||||
hasFrameworks,
|
||||
messagingQueueKakfaSupportedDataSources,
|
||||
} from 'container/OnboardingContainer/utils/dataSourceUtils';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { Blocks, Check } from 'lucide-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -33,6 +39,8 @@ export default function DataSource(): JSX.Element {
|
||||
const { t } = useTranslation(['common']);
|
||||
const history = useHistory();
|
||||
|
||||
const getStartedSource = useUrlQuery().get(QueryParams.getStartedSource);
|
||||
|
||||
const {
|
||||
serviceName,
|
||||
selectedModule,
|
||||
@@ -44,6 +52,9 @@ export default function DataSource(): JSX.Element {
|
||||
updateSelectedFramework,
|
||||
} = useOnboardingContext();
|
||||
|
||||
const isKafkaAPM =
|
||||
getStartedSource === 'kafka' && selectedModule?.id === ModulesMap.APM;
|
||||
|
||||
const [supportedDataSources, setSupportedDataSources] = useState<
|
||||
DataSourceType[]
|
||||
>([]);
|
||||
@@ -150,13 +161,19 @@ export default function DataSource(): JSX.Element {
|
||||
className={cx(
|
||||
'supported-language',
|
||||
selectedDataSource?.name === dataSource.name ? 'selected' : '',
|
||||
isKafkaAPM &&
|
||||
!messagingQueueKakfaSupportedDataSources.includes(dataSource?.id || '')
|
||||
? 'disabled'
|
||||
: '',
|
||||
)}
|
||||
key={dataSource.name}
|
||||
onClick={(): void => {
|
||||
updateSelectedFramework(null);
|
||||
updateSelectedEnvironment(null);
|
||||
updateSelectedDataSource(dataSource);
|
||||
form.setFieldsValue({ selectFramework: null });
|
||||
if (!isKafkaAPM) {
|
||||
updateSelectedFramework(null);
|
||||
updateSelectedEnvironment(null);
|
||||
updateSelectedDataSource(dataSource);
|
||||
form.setFieldsValue({ selectFramework: null });
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
||||
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { ApmDocFilePaths } from 'container/OnboardingContainer/constants/apmDocFilePaths';
|
||||
import { AwsMonitoringDocFilePaths } from 'container/OnboardingContainer/constants/awsMonitoringDocFilePaths';
|
||||
import { AzureMonitoringDocFilePaths } from 'container/OnboardingContainer/constants/azureMonitoringDocFilePaths';
|
||||
@@ -10,6 +11,7 @@ import {
|
||||
useOnboardingContext,
|
||||
} from 'container/OnboardingContainer/context/OnboardingContext';
|
||||
import { ModulesMap } from 'container/OnboardingContainer/OnboardingContainer';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
export interface IngestionInfoProps {
|
||||
@@ -31,6 +33,12 @@ export default function MarkdownStep(): JSX.Element {
|
||||
|
||||
const [markdownContent, setMarkdownContent] = useState('');
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const getStartedSource = urlQuery.get(QueryParams.getStartedSource);
|
||||
const getStartedSourceService = urlQuery.get(
|
||||
QueryParams.getStartedSourceService,
|
||||
);
|
||||
|
||||
const { step } = activeStep;
|
||||
|
||||
const getFilePath = (): any => {
|
||||
@@ -54,6 +62,12 @@ export default function MarkdownStep(): JSX.Element {
|
||||
|
||||
path += `_${step?.id}`;
|
||||
|
||||
if (
|
||||
getStartedSource === 'kafka' &&
|
||||
path === 'APM_java_springBoot_kubernetes_recommendedSteps_runApplication' // todo: Sagar - Make this generic logic in followup PRs
|
||||
) {
|
||||
path += `_${getStartedSourceService}`;
|
||||
}
|
||||
return path;
|
||||
};
|
||||
|
||||
|
||||
@@ -252,6 +252,8 @@ import APM_java_springBoot_docker_recommendedSteps_runApplication from '../Modul
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-installOtelCollector.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-instrumentApplication.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_consumers from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-consumers.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producers from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producers.md';
|
||||
// SpringBoot-LinuxAMD64-quickstart
|
||||
import APM_java_springBoot_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Java/md-docs/SpringBoot/LinuxAMD64/QuickStart/springBoot-linuxamd64-quickStart-instrumentApplication.md';
|
||||
import APM_java_springBoot_linuxAMD64_quickStart_runApplication from '../Modules/APM/Java/md-docs/SpringBoot/LinuxAMD64/QuickStart/springBoot-linuxamd64-quickStart-runApplication.md';
|
||||
@@ -1053,6 +1055,8 @@ export const ApmDocFilePaths = {
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_setupOtelCollector,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_instrumentApplication,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_runApplication,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producers,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_runApplication_consumers,
|
||||
|
||||
// SpringBoot-LinuxAMD64-recommended
|
||||
APM_java_springBoot_linuxAMD64_recommendedSteps_setupOtelCollector,
|
||||
|
||||
@@ -399,3 +399,5 @@ export const moduleRouteMap = {
|
||||
[ModulesMap.AwsMonitoring]: ROUTES.GET_STARTED_AWS_MONITORING,
|
||||
[ModulesMap.AzureMonitoring]: ROUTES.GET_STARTED_AZURE_MONITORING,
|
||||
};
|
||||
|
||||
export const messagingQueueKakfaSupportedDataSources = ['java'];
|
||||
|
||||
@@ -82,7 +82,7 @@ export function AboutSigNozQuestions({
|
||||
otherInterestInSignoz,
|
||||
});
|
||||
|
||||
logEvent('User Onboarding: About SigNoz Questions Answered', {
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
hearAboutSignoz,
|
||||
otherAboutSignoz,
|
||||
interestInSignoz,
|
||||
|
||||
@@ -161,6 +161,13 @@ function InviteTeamMembers({
|
||||
|
||||
setInviteUsersSuccessResponse(successfulInvites);
|
||||
|
||||
logEvent('Org Onboarding: Invite Team Members Success', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
totalInvites: inviteUsersResponse.summary.total_invites,
|
||||
successfulInvites: inviteUsersResponse.summary.successful_invites,
|
||||
failedInvites: inviteUsersResponse.summary.failed_invites,
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
setDisableNextButton(false);
|
||||
onNext();
|
||||
@@ -172,6 +179,13 @@ function InviteTeamMembers({
|
||||
|
||||
setInviteUsersSuccessResponse(successfulInvites);
|
||||
|
||||
logEvent('Org Onboarding: Invite Team Members Partial Success', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
totalInvites: inviteUsersResponse.summary.total_invites,
|
||||
successfulInvites: inviteUsersResponse.summary.successful_invites,
|
||||
failedInvites: inviteUsersResponse.summary.failed_invites,
|
||||
});
|
||||
|
||||
if (inviteUsersResponse.failed_invites.length > 0) {
|
||||
setHasErrors(true);
|
||||
|
||||
@@ -182,27 +196,21 @@ function InviteTeamMembers({
|
||||
}
|
||||
};
|
||||
|
||||
const {
|
||||
mutate: sendInvites,
|
||||
isLoading: isSendingInvites,
|
||||
data: inviteUsersApiResponseData,
|
||||
} = useMutation(inviteUsers, {
|
||||
onSuccess: (response: SuccessResponse<InviteUsersResponse>): void => {
|
||||
logEvent('User Onboarding: Invite Team Members Sent', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
});
|
||||
const { mutate: sendInvites, isLoading: isSendingInvites } = useMutation(
|
||||
inviteUsers,
|
||||
{
|
||||
onSuccess: (response: SuccessResponse<InviteUsersResponse>): void => {
|
||||
handleInviteUsersSuccess(response);
|
||||
},
|
||||
onError: (error: AxiosError): void => {
|
||||
logEvent('Org Onboarding: Invite Team Members Failed', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
});
|
||||
|
||||
handleInviteUsersSuccess(response);
|
||||
handleError(error);
|
||||
},
|
||||
},
|
||||
onError: (error: AxiosError): void => {
|
||||
logEvent('User Onboarding: Invite Team Members Failed', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
error,
|
||||
});
|
||||
|
||||
handleError(error);
|
||||
},
|
||||
});
|
||||
);
|
||||
|
||||
const handleNext = (): void => {
|
||||
if (validateAllUsers()) {
|
||||
@@ -254,9 +262,8 @@ function InviteTeamMembers({
|
||||
};
|
||||
|
||||
const handleDoLater = (): void => {
|
||||
logEvent('User Onboarding: Invite Team Members Skipped', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
apiResponse: inviteUsersApiResponseData,
|
||||
logEvent('Org Onboarding: Clicked Do Later', {
|
||||
currentPageID: 4,
|
||||
});
|
||||
|
||||
onNext();
|
||||
|
||||
@@ -122,7 +122,7 @@ function OptimiseSignozNeeds({
|
||||
}, [services, hostsPerDay, logsPerDay]);
|
||||
|
||||
const handleOnNext = (): void => {
|
||||
logEvent('User Onboarding: Optimise SigNoz Needs Answered', {
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
logsPerDay,
|
||||
hostsPerDay,
|
||||
services,
|
||||
@@ -144,10 +144,8 @@ function OptimiseSignozNeeds({
|
||||
|
||||
onWillDoLater();
|
||||
|
||||
logEvent('User Onboarding: Optimise SigNoz Needs Skipped', {
|
||||
logsPerDay: 0,
|
||||
hostsPerDay: 0,
|
||||
services: 0,
|
||||
logEvent('Org Onboarding: Clicked Do Later', {
|
||||
currentPageID: 3,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -94,6 +94,13 @@ function OrgQuestions({
|
||||
organisationName === '' ||
|
||||
orgDetails.organisationName === organisationName
|
||||
) {
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
usesObservability,
|
||||
observabilityTool,
|
||||
otherTool,
|
||||
familiarity,
|
||||
});
|
||||
|
||||
onNext({
|
||||
organisationName,
|
||||
usesObservability,
|
||||
@@ -121,10 +128,17 @@ function OrgQuestions({
|
||||
},
|
||||
});
|
||||
|
||||
logEvent('User Onboarding: Org Name Updated', {
|
||||
logEvent('Org Onboarding: Org Name Updated', {
|
||||
organisationName: orgDetails.organisationName,
|
||||
});
|
||||
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
usesObservability,
|
||||
observabilityTool,
|
||||
otherTool,
|
||||
familiarity,
|
||||
});
|
||||
|
||||
onNext({
|
||||
organisationName,
|
||||
usesObservability,
|
||||
@@ -133,7 +147,7 @@ function OrgQuestions({
|
||||
familiarity,
|
||||
});
|
||||
} else {
|
||||
logEvent('User Onboarding: Org Name Update Failed', {
|
||||
logEvent('Org Onboarding: Org Name Update Failed', {
|
||||
organisationName: orgDetails.organisationName,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import './OnboardingQuestionaire.styles.scss';
|
||||
|
||||
import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import updateProfileAPI from 'api/onboarding/updateProfile';
|
||||
import getAllOrgPreferences from 'api/preferences/getAllOrgPreferences';
|
||||
import updateOrgPreferenceAPI from 'api/preferences/updateOrgPreference';
|
||||
@@ -61,6 +62,10 @@ const INITIAL_OPTIMISE_SIGNOZ_DETAILS: OptimiseSignozDetails = {
|
||||
services: 0,
|
||||
};
|
||||
|
||||
const BACK_BUTTON_EVENT_NAME = 'Org Onboarding: Back Button Clicked';
|
||||
const NEXT_BUTTON_EVENT_NAME = 'Org Onboarding: Next Button Clicked';
|
||||
const ONBOARDING_COMPLETE_EVENT_NAME = 'Org Onboarding: Complete';
|
||||
|
||||
function OnboardingQuestionaire(): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const { org } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
@@ -98,6 +103,13 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [org]);
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('Org Onboarding: Started', {
|
||||
org_id: org?.[0]?.id,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const { refetch: refetchOrgPreferences } = useQuery({
|
||||
queryFn: () => getAllOrgPreferences(),
|
||||
queryKey: ['getOrgPreferences'],
|
||||
@@ -120,6 +132,8 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
|
||||
setUpdatingOrgOnboardingStatus(false);
|
||||
|
||||
logEvent('Org Onboarding: Redirecting to Get Started', {});
|
||||
|
||||
history.push(ROUTES.GET_STARTED);
|
||||
},
|
||||
onError: () => {
|
||||
@@ -156,6 +170,11 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
});
|
||||
|
||||
const handleUpdateProfile = (): void => {
|
||||
logEvent(NEXT_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 3,
|
||||
nextPageID: 4,
|
||||
});
|
||||
|
||||
updateProfile({
|
||||
familiarity_with_observability: orgDetails?.familiarity as string,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
@@ -180,6 +199,10 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
};
|
||||
|
||||
const handleOnboardingComplete = (): void => {
|
||||
logEvent(ONBOARDING_COMPLETE_EVENT_NAME, {
|
||||
currentPageID: 4,
|
||||
});
|
||||
|
||||
setUpdatingOrgOnboardingStatus(true);
|
||||
updateOrgPreference({
|
||||
preferenceID: 'ORG_ONBOARDING',
|
||||
@@ -199,6 +222,11 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
currentOrgData={currentOrgData}
|
||||
orgDetails={orgDetails}
|
||||
onNext={(orgDetails: OrgDetails): void => {
|
||||
logEvent(NEXT_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 1,
|
||||
nextPageID: 2,
|
||||
});
|
||||
|
||||
setOrgDetails(orgDetails);
|
||||
setCurrentStep(2);
|
||||
}}
|
||||
@@ -209,8 +237,20 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
<AboutSigNozQuestions
|
||||
signozDetails={signozDetails}
|
||||
setSignozDetails={setSignozDetails}
|
||||
onBack={(): void => setCurrentStep(1)}
|
||||
onNext={(): void => setCurrentStep(3)}
|
||||
onBack={(): void => {
|
||||
logEvent(BACK_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 2,
|
||||
prevPageID: 1,
|
||||
});
|
||||
setCurrentStep(1);
|
||||
}}
|
||||
onNext={(): void => {
|
||||
logEvent(NEXT_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 2,
|
||||
nextPageID: 3,
|
||||
});
|
||||
setCurrentStep(3);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -220,9 +260,15 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
isUpdatingProfile={isUpdatingProfile}
|
||||
optimiseSignozDetails={optimiseSignozDetails}
|
||||
setOptimiseSignozDetails={setOptimiseSignozDetails}
|
||||
onBack={(): void => setCurrentStep(2)}
|
||||
onBack={(): void => {
|
||||
logEvent(BACK_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 3,
|
||||
prevPageID: 2,
|
||||
});
|
||||
setCurrentStep(2);
|
||||
}}
|
||||
onNext={handleUpdateProfile}
|
||||
onWillDoLater={(): void => setCurrentStep(4)} // This is temporary, only to skip gateway api call as it's not setup on staging yet
|
||||
onWillDoLater={(): void => setCurrentStep(4)}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -231,7 +277,13 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
isLoading={updatingOrgOnboardingStatus}
|
||||
teamMembers={teamMembers}
|
||||
setTeamMembers={setTeamMembers}
|
||||
onBack={(): void => setCurrentStep(3)}
|
||||
onBack={(): void => {
|
||||
logEvent(BACK_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 4,
|
||||
prevPageID: 3,
|
||||
});
|
||||
setCurrentStep(3);
|
||||
}}
|
||||
onNext={handleOnboardingComplete}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -81,8 +81,10 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
}),
|
||||
!item.isColumn && item.type ? item.type : '',
|
||||
)}
|
||||
dataType={item.dataType}
|
||||
type={item.type || ''}
|
||||
/>
|
||||
),
|
||||
value: `${item.key}${selectValueDivider}${createIdFromObjectFields(
|
||||
@@ -187,6 +189,9 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
prefix: query.aggregateAttribute.type || '',
|
||||
condition: !query.aggregateAttribute.isColumn,
|
||||
}),
|
||||
!query.aggregateAttribute.isColumn && query.aggregateAttribute.type
|
||||
? query.aggregateAttribute.type
|
||||
: '',
|
||||
);
|
||||
|
||||
return (
|
||||
|
||||
@@ -75,8 +75,10 @@ export const GroupByFilter = memo(function GroupByFilter({
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
}),
|
||||
!item.isColumn && item.type ? item.type : '',
|
||||
)}
|
||||
dataType={item.dataType || ''}
|
||||
type={item.type || ''}
|
||||
/>
|
||||
),
|
||||
value: `${item.id}`,
|
||||
@@ -166,6 +168,7 @@ export const GroupByFilter = memo(function GroupByFilter({
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
}),
|
||||
!item.isColumn && item.type ? item.type : '',
|
||||
)}`,
|
||||
value: `${item.id}`,
|
||||
}),
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { MetricsType } from 'container/MetricsApplication/constant';
|
||||
|
||||
export function removePrefix(str: string): string {
|
||||
export function removePrefix(str: string, type: string): string {
|
||||
const tagPrefix = `${MetricsType.Tag}_`;
|
||||
const resourcePrefix = `${MetricsType.Resource}_`;
|
||||
const scopePrefix = `${MetricsType.Scope}_`;
|
||||
|
||||
if (str.startsWith(tagPrefix)) {
|
||||
return str.slice(tagPrefix.length);
|
||||
@@ -10,5 +11,9 @@ export function removePrefix(str: string): string {
|
||||
if (str.startsWith(resourcePrefix)) {
|
||||
return str.slice(resourcePrefix.length);
|
||||
}
|
||||
if (str.startsWith(scopePrefix) && type === MetricsType.Scope) {
|
||||
return str.slice(scopePrefix.length);
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
@@ -3,25 +3,23 @@ import './QueryBuilderSearch.styles.scss';
|
||||
import { Tooltip } from 'antd';
|
||||
|
||||
import { TagContainer, TagLabel, TagValue } from './style';
|
||||
import { getOptionType } from './utils';
|
||||
|
||||
function OptionRenderer({
|
||||
label,
|
||||
value,
|
||||
dataType,
|
||||
type,
|
||||
}: OptionRendererProps): JSX.Element {
|
||||
const optionType = getOptionType(label);
|
||||
|
||||
return (
|
||||
<span className="option">
|
||||
{optionType ? (
|
||||
{type ? (
|
||||
<Tooltip title={`${value}`} placement="topLeft">
|
||||
<div className="selectOptionContainer">
|
||||
<div className="option-value">{value}</div>
|
||||
<div className="option-meta-data-container">
|
||||
<TagContainer>
|
||||
<TagLabel>Type: </TagLabel>
|
||||
<TagValue>{optionType}</TagValue>
|
||||
<TagValue>{type}</TagValue>
|
||||
</TagContainer>
|
||||
<TagContainer>
|
||||
<TagLabel>Data type: </TagLabel>
|
||||
@@ -43,6 +41,7 @@ interface OptionRendererProps {
|
||||
label: string;
|
||||
value: string;
|
||||
dataType: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
export default OptionRenderer;
|
||||
|
||||
@@ -410,6 +410,7 @@ function QueryBuilderSearch({
|
||||
label={option.label}
|
||||
value={option.value}
|
||||
dataType={option.dataType || ''}
|
||||
type={option.type || ''}
|
||||
/>
|
||||
{option.selected && <StyledCheckOutlined />}
|
||||
</Select.Option>
|
||||
|
||||
@@ -260,6 +260,20 @@
|
||||
background: rgba(189, 153, 121, 0.1);
|
||||
}
|
||||
}
|
||||
|
||||
&.scope {
|
||||
border: 1px solid rgba(113, 144, 249, 0.2);
|
||||
|
||||
.ant-typography {
|
||||
color: var(--bg-robin-400);
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.ant-tag-close-icon {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,6 +94,25 @@
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
}
|
||||
|
||||
&.scope {
|
||||
border-radius: 50px;
|
||||
background: rgba(113, 144, 249, 0.1) !important;
|
||||
color: var(--bg-robin-400) !important;
|
||||
|
||||
.dot {
|
||||
background-color: var(--bg-robin-400);
|
||||
}
|
||||
.text {
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 150% */
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.option-meta-data-container {
|
||||
|
||||
@@ -16,4 +16,5 @@ export type Option = {
|
||||
selected?: boolean;
|
||||
dataType?: string;
|
||||
isIndexed?: boolean;
|
||||
type?: string;
|
||||
};
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
import getOnboardingStatus, {
|
||||
OnboardingStatusResponse,
|
||||
} from 'api/messagingQueues/onboarding/getOnboardingStatus';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
type UseOnboardingStatus = (
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponse<OnboardingStatusResponse> | ErrorResponse
|
||||
>,
|
||||
endpointService?: string,
|
||||
queryKey?: string,
|
||||
) => UseQueryResult<SuccessResponse<OnboardingStatusResponse> | ErrorResponse>;
|
||||
|
||||
export const useOnboardingStatus: UseOnboardingStatus = (
|
||||
options,
|
||||
endpointService,
|
||||
queryKey,
|
||||
) =>
|
||||
useQuery<SuccessResponse<OnboardingStatusResponse> | ErrorResponse>({
|
||||
queryKey: [queryKey || `onboardingStatus-${endpointService}`],
|
||||
queryFn: () =>
|
||||
getOnboardingStatus({
|
||||
start: (Date.now() - 15 * 60 * 1000) * 1_000_000,
|
||||
end: Date.now() * 1_000_000,
|
||||
endpointService,
|
||||
}),
|
||||
...options,
|
||||
});
|
||||
@@ -46,6 +46,7 @@ export const useOptions = (
|
||||
value: item.key,
|
||||
dataType: item.dataType,
|
||||
isIndexed: item?.isIndexed,
|
||||
type: item?.type || '',
|
||||
})),
|
||||
[getLabel],
|
||||
);
|
||||
|
||||
@@ -57,8 +57,11 @@ export const useAlertHistoryQueryParams = (): {
|
||||
|
||||
const startTime = params.get(QueryParams.startTime);
|
||||
const endTime = params.get(QueryParams.endTime);
|
||||
const relativeTimeParam = params.get(QueryParams.relativeTime);
|
||||
|
||||
const relativeTime =
|
||||
params.get(QueryParams.relativeTime) ?? RelativeTimeMap['6hr'];
|
||||
(relativeTimeParam === 'null' ? null : relativeTimeParam) ??
|
||||
RelativeTimeMap['6hr'];
|
||||
|
||||
const intStartTime = parseInt(startTime || '0', 10);
|
||||
const intEndTime = parseInt(endTime || '0', 10);
|
||||
|
||||
@@ -1,26 +1,65 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import '../MessagingQueues.styles.scss';
|
||||
|
||||
import { Select, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { ListMinus } from 'lucide-react';
|
||||
import { useEffect } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
|
||||
import { MessagingQueuesViewType } from '../MessagingQueuesUtils';
|
||||
import { SelectLabelWithComingSoon } from '../MQCommon/MQCommon';
|
||||
import {
|
||||
MessagingQueuesViewType,
|
||||
MessagingQueuesViewTypeOptions,
|
||||
ProducerLatencyOptions,
|
||||
} from '../MessagingQueuesUtils';
|
||||
import DropRateView from '../MQDetails/DropRateView/DropRateView';
|
||||
import MessagingQueueOverview from '../MQDetails/MessagingQueueOverview';
|
||||
import MessagingQueuesDetails from '../MQDetails/MQDetails';
|
||||
import MessagingQueuesConfigOptions from '../MQGraph/MQConfigOptions';
|
||||
import MessagingQueuesGraph from '../MQGraph/MQGraph';
|
||||
|
||||
function MQDetailPage(): JSX.Element {
|
||||
const history = useHistory();
|
||||
const [
|
||||
selectedView,
|
||||
setSelectedView,
|
||||
] = useState<MessagingQueuesViewTypeOptions>(
|
||||
MessagingQueuesViewType.consumerLag.value,
|
||||
);
|
||||
|
||||
const [
|
||||
producerLatencyOption,
|
||||
setproducerLatencyOption,
|
||||
] = useState<ProducerLatencyOptions>(ProducerLatencyOptions.Producers);
|
||||
|
||||
const mqServiceView = useUrlQuery().get(
|
||||
QueryParams.mqServiceView,
|
||||
) as MessagingQueuesViewTypeOptions;
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('Messaging Queues: Detail page visited', {});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (mqServiceView) {
|
||||
setSelectedView(mqServiceView);
|
||||
}
|
||||
}, [mqServiceView]);
|
||||
|
||||
const updateUrlQuery = (query: Record<string, string | number>): void => {
|
||||
const searchParams = new URLSearchParams(history.location.search);
|
||||
Object.keys(query).forEach((key) => {
|
||||
searchParams.set(key, query[key].toString());
|
||||
});
|
||||
history.push({
|
||||
search: searchParams.toString(),
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="messaging-queue-container">
|
||||
<div className="messaging-breadcrumb">
|
||||
@@ -39,50 +78,55 @@ function MQDetailPage(): JSX.Element {
|
||||
className="messaging-queue-options"
|
||||
defaultValue={MessagingQueuesViewType.consumerLag.value}
|
||||
popupClassName="messaging-queue-options-popup"
|
||||
onChange={(value): void => {
|
||||
setSelectedView(value);
|
||||
updateUrlQuery({ [QueryParams.mqServiceView]: value });
|
||||
}}
|
||||
value={mqServiceView}
|
||||
options={[
|
||||
{
|
||||
label: MessagingQueuesViewType.consumerLag.label,
|
||||
value: MessagingQueuesViewType.consumerLag.value,
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<SelectLabelWithComingSoon
|
||||
label={MessagingQueuesViewType.partitionLatency.label}
|
||||
/>
|
||||
),
|
||||
label: MessagingQueuesViewType.partitionLatency.label,
|
||||
value: MessagingQueuesViewType.partitionLatency.value,
|
||||
disabled: true,
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<SelectLabelWithComingSoon
|
||||
label={MessagingQueuesViewType.producerLatency.label}
|
||||
/>
|
||||
),
|
||||
label: MessagingQueuesViewType.producerLatency.label,
|
||||
value: MessagingQueuesViewType.producerLatency.value,
|
||||
disabled: true,
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<SelectLabelWithComingSoon
|
||||
label={MessagingQueuesViewType.consumerLatency.label}
|
||||
/>
|
||||
),
|
||||
value: MessagingQueuesViewType.consumerLatency.value,
|
||||
disabled: true,
|
||||
label: MessagingQueuesViewType.dropRate.label,
|
||||
value: MessagingQueuesViewType.dropRate.value,
|
||||
},
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
<DateTimeSelectionV2 showAutoRefresh={false} hideShareModal />
|
||||
</div>
|
||||
<div className="messaging-queue-main-graph">
|
||||
<MessagingQueuesConfigOptions />
|
||||
<MessagingQueuesGraph />
|
||||
</div>
|
||||
<div className="messaging-queue-details">
|
||||
<MessagingQueuesDetails />
|
||||
</div>
|
||||
{selectedView === MessagingQueuesViewType.consumerLag.value ? (
|
||||
<div className="messaging-queue-main-graph">
|
||||
<MessagingQueuesConfigOptions />
|
||||
<MessagingQueuesGraph />
|
||||
</div>
|
||||
) : selectedView === MessagingQueuesViewType.dropRate.value ? (
|
||||
<DropRateView />
|
||||
) : (
|
||||
<MessagingQueueOverview
|
||||
selectedView={selectedView}
|
||||
option={producerLatencyOption}
|
||||
setOption={setproducerLatencyOption}
|
||||
/>
|
||||
)}
|
||||
{selectedView !== MessagingQueuesViewType.dropRate.value && (
|
||||
<div className="messaging-queue-details">
|
||||
<MessagingQueuesDetails
|
||||
selectedView={selectedView}
|
||||
producerLatencyOption={producerLatencyOption}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
.evaluation-time-selector {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.eval-title {
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 28px;
|
||||
color: var(--bg-vanilla-200);
|
||||
}
|
||||
|
||||
.ant-selector {
|
||||
background-color: var(--bg-ink-400);
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
.select-dropdown-render {
|
||||
padding: 8px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
width: 200px;
|
||||
margin: 6px;
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import '../MQDetails.style.scss';
|
||||
|
||||
import { Table, Typography } from 'antd';
|
||||
import axios from 'axios';
|
||||
import cx from 'classnames';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { isNumber } from 'lodash-es';
|
||||
import {
|
||||
convertToTitleCase,
|
||||
MessagingQueuesViewType,
|
||||
RowData,
|
||||
} from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { MessagingQueueServicePayload } from '../MQTables/getConsumerLagDetails';
|
||||
import { getKafkaSpanEval } from '../MQTables/getKafkaSpanEval';
|
||||
import {
|
||||
convertToMilliseconds,
|
||||
DropRateAPIResponse,
|
||||
DropRateResponse,
|
||||
} from './dropRateViewUtils';
|
||||
import EvaluationTimeSelector from './EvaluationTimeSelector';
|
||||
|
||||
export function getTableData(data: DropRateResponse[]): RowData[] {
|
||||
if (data?.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const tableData: RowData[] =
|
||||
data?.map(
|
||||
(row: DropRateResponse, index: number): RowData => ({
|
||||
...(row.data as any), // todo-sagar
|
||||
key: index,
|
||||
}),
|
||||
) || [];
|
||||
|
||||
return tableData;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function getColumns(
|
||||
data: DropRateResponse[],
|
||||
visibleCounts: Record<number, number>,
|
||||
handleShowMore: (index: number) => void,
|
||||
): any[] {
|
||||
if (data?.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const columnsOrder = [
|
||||
'producer_service',
|
||||
'consumer_service',
|
||||
'breach_percentage',
|
||||
'top_traceIDs',
|
||||
'breached_spans',
|
||||
'total_spans',
|
||||
];
|
||||
|
||||
const columns: {
|
||||
title: string;
|
||||
dataIndex: string;
|
||||
key: string;
|
||||
}[] = columnsOrder.map((column) => ({
|
||||
title: convertToTitleCase(column),
|
||||
dataIndex: column,
|
||||
key: column,
|
||||
render: (
|
||||
text: string | string[],
|
||||
_record: any,
|
||||
index: number,
|
||||
): JSX.Element => {
|
||||
if (Array.isArray(text)) {
|
||||
const visibleCount = visibleCounts[index] || 4;
|
||||
const visibleItems = text.slice(0, visibleCount);
|
||||
const remainingCount = (text || []).length - visibleCount;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="trace-id-list">
|
||||
{visibleItems.map((item, idx) => {
|
||||
const shouldShowMore = remainingCount > 0 && idx === visibleCount - 1;
|
||||
return (
|
||||
<div key={item} className="traceid-style">
|
||||
<Typography.Text
|
||||
key={item}
|
||||
className="traceid-text"
|
||||
onClick={(): void => {
|
||||
window.open(`${ROUTES.TRACE}/${item}`, '_blank');
|
||||
}}
|
||||
>
|
||||
{item}
|
||||
</Typography.Text>
|
||||
{shouldShowMore && (
|
||||
<Typography
|
||||
onClick={(): void => handleShowMore(index)}
|
||||
className="remaing-count"
|
||||
>
|
||||
+ {remainingCount} more
|
||||
</Typography>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (column === 'consumer_service' || column === 'producer_service') {
|
||||
return (
|
||||
<Typography.Link
|
||||
onClick={(e): void => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
window.open(`/services/${encodeURIComponent(text)}`, '_blank');
|
||||
}}
|
||||
>
|
||||
{text}
|
||||
</Typography.Link>
|
||||
);
|
||||
}
|
||||
|
||||
if (column === 'breach_percentage' && text) {
|
||||
if (!isNumber(text))
|
||||
return <Typography.Text>{text.toString()}</Typography.Text>;
|
||||
return (
|
||||
<Typography.Text>
|
||||
{(typeof text === 'string' ? parseFloat(text) : text).toFixed(2)} %
|
||||
</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
return <Typography.Text>{text}</Typography.Text>;
|
||||
},
|
||||
}));
|
||||
|
||||
return columns;
|
||||
}
|
||||
|
||||
const showPaginationItem = (total: number, range: number[]): JSX.Element => (
|
||||
<>
|
||||
<Typography.Text className="numbers">
|
||||
{range[0]} — {range[1]}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="total"> of {total}</Typography.Text>
|
||||
</>
|
||||
);
|
||||
|
||||
function DropRateView(): JSX.Element {
|
||||
const [columns, setColumns] = useState<any[]>([]);
|
||||
const [tableData, setTableData] = useState<any[]>([]);
|
||||
const { notifications } = useNotifications();
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const [data, setData] = useState<
|
||||
DropRateAPIResponse['data']['result'][0]['list']
|
||||
>([]);
|
||||
const [interval, setInterval] = useState<string>('');
|
||||
|
||||
const [visibleCounts, setVisibleCounts] = useState<Record<number, number>>({});
|
||||
|
||||
const paginationConfig = useMemo(
|
||||
() =>
|
||||
tableData?.length > 10 && {
|
||||
pageSize: 10,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
hideOnSinglePage: true,
|
||||
},
|
||||
[tableData],
|
||||
);
|
||||
|
||||
const evaluationTime = useMemo(() => convertToMilliseconds(interval), [
|
||||
interval,
|
||||
]);
|
||||
const tableApiPayload: MessagingQueueServicePayload = useMemo(
|
||||
() => ({
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
evalTime: evaluationTime * 1e6,
|
||||
}),
|
||||
[evaluationTime, maxTime, minTime],
|
||||
);
|
||||
|
||||
const handleOnError = (error: Error): void => {
|
||||
notifications.error({
|
||||
message: axios.isAxiosError(error) ? error?.message : SOMETHING_WENT_WRONG,
|
||||
});
|
||||
};
|
||||
|
||||
const handleShowMore = (index: number): void => {
|
||||
setVisibleCounts((prevCounts) => ({
|
||||
...prevCounts,
|
||||
[index]: (prevCounts[index] || 4) + 4,
|
||||
}));
|
||||
};
|
||||
|
||||
const { mutate: getViewDetails, isLoading } = useMutation(getKafkaSpanEval, {
|
||||
onSuccess: (data) => {
|
||||
if (data.payload) {
|
||||
setData(data.payload.result[0].list);
|
||||
}
|
||||
},
|
||||
onError: handleOnError,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.length > 0) {
|
||||
setColumns(getColumns(data, visibleCounts, handleShowMore));
|
||||
setTableData(getTableData(data));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [data, visibleCounts]);
|
||||
|
||||
useEffect(() => {
|
||||
if (evaluationTime) {
|
||||
getViewDetails(tableApiPayload);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [minTime, maxTime, evaluationTime]);
|
||||
|
||||
return (
|
||||
<div className={cx('mq-overview-container', 'droprate-view')}>
|
||||
<div className="mq-overview-title">
|
||||
<div className="drop-rat-title">
|
||||
{MessagingQueuesViewType.dropRate.label}
|
||||
</div>
|
||||
<EvaluationTimeSelector setInterval={setInterval} />
|
||||
</div>
|
||||
<Table
|
||||
className={cx('mq-table', 'pagination-left')}
|
||||
pagination={paginationConfig}
|
||||
size="middle"
|
||||
columns={columns}
|
||||
dataSource={tableData}
|
||||
bordered={false}
|
||||
loading={isLoading}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DropRateView;
|
||||
@@ -0,0 +1,111 @@
|
||||
import './DropRateView.styles.scss';
|
||||
|
||||
import { Input, Select, Typography } from 'antd';
|
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
interface SelectDropdownRenderProps {
|
||||
menu: React.ReactNode;
|
||||
inputValue: string;
|
||||
handleInputChange: (e: React.ChangeEvent<HTMLInputElement>) => void;
|
||||
handleKeyDown: (e: React.KeyboardEvent<HTMLInputElement>) => void;
|
||||
handleAddCustomValue: () => void;
|
||||
}
|
||||
|
||||
function SelectDropdownRender({
|
||||
menu,
|
||||
inputValue,
|
||||
handleInputChange,
|
||||
handleAddCustomValue,
|
||||
handleKeyDown,
|
||||
}: SelectDropdownRenderProps): JSX.Element {
|
||||
return (
|
||||
<>
|
||||
{menu}
|
||||
<Input
|
||||
placeholder="Enter custom time (ms)"
|
||||
value={inputValue}
|
||||
onChange={handleInputChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onBlur={handleAddCustomValue}
|
||||
className="select-dropdown-render"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
function EvaluationTimeSelector({
|
||||
setInterval,
|
||||
}: {
|
||||
setInterval: Dispatch<SetStateAction<string>>;
|
||||
}): JSX.Element {
|
||||
const [inputValue, setInputValue] = useState<string>('');
|
||||
const [selectedInterval, setSelectedInterval] = useState<string | null>('5ms');
|
||||
const [dropdownOpen, setDropdownOpen] = useState<boolean>(false);
|
||||
|
||||
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
setInputValue(e.target.value);
|
||||
};
|
||||
|
||||
const handleSelectChange = (value: string): void => {
|
||||
setSelectedInterval(value);
|
||||
setInputValue('');
|
||||
setDropdownOpen(false);
|
||||
};
|
||||
|
||||
const handleAddCustomValue = (): void => {
|
||||
setSelectedInterval(inputValue);
|
||||
setInputValue(inputValue);
|
||||
setDropdownOpen(false);
|
||||
};
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>): void => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
handleAddCustomValue();
|
||||
}
|
||||
};
|
||||
|
||||
const renderDropdown = (menu: React.ReactNode): JSX.Element => (
|
||||
<SelectDropdownRender
|
||||
menu={menu}
|
||||
inputValue={inputValue}
|
||||
handleInputChange={handleInputChange}
|
||||
handleAddCustomValue={handleAddCustomValue}
|
||||
handleKeyDown={handleKeyDown}
|
||||
/>
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedInterval) {
|
||||
setInterval(() => selectedInterval);
|
||||
}
|
||||
}, [selectedInterval, setInterval]);
|
||||
|
||||
return (
|
||||
<div className="evaluation-time-selector">
|
||||
<Typography.Text className="eval-title">
|
||||
Evaluation Interval:
|
||||
</Typography.Text>
|
||||
<Select
|
||||
style={{ width: 220 }}
|
||||
placeholder="Select time interval (ms)"
|
||||
value={selectedInterval}
|
||||
onChange={handleSelectChange}
|
||||
open={dropdownOpen}
|
||||
onDropdownVisibleChange={setDropdownOpen}
|
||||
dropdownRender={renderDropdown}
|
||||
>
|
||||
<Option value="1ms">1ms</Option>
|
||||
<Option value="2ms">2ms</Option>
|
||||
<Option value="5ms">5ms</Option>
|
||||
<Option value="10ms">10ms</Option>
|
||||
<Option value="15ms">15ms</Option>
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EvaluationTimeSelector;
|
||||
@@ -0,0 +1,46 @@
|
||||
export function convertToMilliseconds(timeInput: string): number {
|
||||
if (!timeInput.trim()) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const match = timeInput.match(/^(\d+)(ms|s|ns)?$/); // Match number and optional unit
|
||||
if (!match) {
|
||||
throw new Error(`Invalid time format: ${timeInput}`);
|
||||
}
|
||||
|
||||
const value = parseInt(match[1], 10);
|
||||
const unit = match[2] || 'ms'; // Default to 'ms' if no unit is provided
|
||||
|
||||
switch (unit) {
|
||||
case 's':
|
||||
return value * 1e3;
|
||||
case 'ms':
|
||||
return value;
|
||||
case 'ns':
|
||||
return value / 1e6;
|
||||
default:
|
||||
throw new Error('Invalid time format');
|
||||
}
|
||||
}
|
||||
|
||||
export interface DropRateResponse {
|
||||
timestamp: string;
|
||||
data: {
|
||||
breach_percentage: number;
|
||||
breached_spans: number;
|
||||
consumer_service: string;
|
||||
producer_service: string;
|
||||
top_traceIDs: string[];
|
||||
total_spans: number;
|
||||
};
|
||||
}
|
||||
export interface DropRateAPIResponse {
|
||||
status: string;
|
||||
data: {
|
||||
resultType: string;
|
||||
result: {
|
||||
queryName: string;
|
||||
list: DropRateResponse[];
|
||||
}[];
|
||||
};
|
||||
}
|
||||
@@ -4,3 +4,115 @@
|
||||
flex-direction: column;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.mq-overview-container {
|
||||
display: flex;
|
||||
padding: 24px;
|
||||
flex-direction: column;
|
||||
align-items: start;
|
||||
gap: 16px;
|
||||
|
||||
border-radius: 6px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-500);
|
||||
|
||||
.mq-overview-title {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
|
||||
.drop-rat-title {
|
||||
color: var(--bg-vanilla-200);
|
||||
|
||||
font-family: Inter;
|
||||
font-size: 18px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 28px;
|
||||
}
|
||||
}
|
||||
|
||||
.mq-details-options {
|
||||
letter-spacing: -0.06px;
|
||||
cursor: pointer;
|
||||
|
||||
.ant-radio-button-wrapper {
|
||||
border-color: var(--bg-slate-400);
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
.ant-radio-button-wrapper-checked {
|
||||
background: var(--bg-slate-400);
|
||||
color: var(--bg-vanilla-100);
|
||||
}
|
||||
.ant-radio-button-wrapper::before {
|
||||
width: 0px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.droprate-view {
|
||||
.mq-table {
|
||||
width: 100%;
|
||||
|
||||
.ant-table-content {
|
||||
border-radius: 6px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
box-shadow: 0px 4px 12px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.ant-table-tbody {
|
||||
.ant-table-cell {
|
||||
max-width: 250px;
|
||||
border-bottom: none;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead {
|
||||
.ant-table-cell {
|
||||
background-color: var(--bg-ink-500);
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.trace-id-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
width: max-content;
|
||||
|
||||
.traceid-style {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
|
||||
.traceid-text {
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-slate-400);
|
||||
padding: 2px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.remaing-count {
|
||||
cursor: pointer;
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: normal;
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.pagination-left {
|
||||
&.mq-table {
|
||||
.ant-pagination {
|
||||
justify-content: flex-start;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,65 +1,222 @@
|
||||
import './MQDetails.style.scss';
|
||||
|
||||
import { Radio } from 'antd';
|
||||
import { Dispatch, SetStateAction, useState } from 'react';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { Dispatch, SetStateAction, useEffect, useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
ConsumerLagDetailTitle,
|
||||
ConsumerLagDetailType,
|
||||
getMetaDataAndAPIPerView,
|
||||
MessagingQueueServiceDetailType,
|
||||
MessagingQueuesViewType,
|
||||
MessagingQueuesViewTypeOptions,
|
||||
ProducerLatencyOptions,
|
||||
SelectedTimelineQuery,
|
||||
} from '../MessagingQueuesUtils';
|
||||
import { ComingSoon } from '../MQCommon/MQCommon';
|
||||
import MessagingQueuesTable from './MQTables/MQTables';
|
||||
|
||||
const MQServiceDetailTypePerView = (
|
||||
producerLatencyOption: ProducerLatencyOptions,
|
||||
): Record<string, MessagingQueueServiceDetailType[]> => ({
|
||||
[MessagingQueuesViewType.consumerLag.value]: [
|
||||
MessagingQueueServiceDetailType.ConsumerDetails,
|
||||
MessagingQueueServiceDetailType.ProducerDetails,
|
||||
MessagingQueueServiceDetailType.NetworkLatency,
|
||||
MessagingQueueServiceDetailType.PartitionHostMetrics,
|
||||
],
|
||||
[MessagingQueuesViewType.partitionLatency.value]: [
|
||||
MessagingQueueServiceDetailType.ConsumerDetails,
|
||||
MessagingQueueServiceDetailType.ProducerDetails,
|
||||
],
|
||||
[MessagingQueuesViewType.producerLatency.value]: [
|
||||
producerLatencyOption === ProducerLatencyOptions.Consumers
|
||||
? MessagingQueueServiceDetailType.ConsumerDetails
|
||||
: MessagingQueueServiceDetailType.ProducerDetails,
|
||||
],
|
||||
});
|
||||
|
||||
interface MessagingQueuesOptionsProps {
|
||||
currentTab: MessagingQueueServiceDetailType;
|
||||
setCurrentTab: Dispatch<SetStateAction<MessagingQueueServiceDetailType>>;
|
||||
selectedView: MessagingQueuesViewTypeOptions;
|
||||
producerLatencyOption: ProducerLatencyOptions;
|
||||
}
|
||||
|
||||
function MessagingQueuesOptions({
|
||||
currentTab,
|
||||
setCurrentTab,
|
||||
}: {
|
||||
currentTab: ConsumerLagDetailType;
|
||||
setCurrentTab: Dispatch<SetStateAction<ConsumerLagDetailType>>;
|
||||
}): JSX.Element {
|
||||
const [option, setOption] = useState<ConsumerLagDetailType>(currentTab);
|
||||
selectedView,
|
||||
producerLatencyOption,
|
||||
}: MessagingQueuesOptionsProps): JSX.Element {
|
||||
const handleChange = (value: MessagingQueueServiceDetailType): void => {
|
||||
setCurrentTab(value);
|
||||
};
|
||||
|
||||
const renderRadioButtons = (): JSX.Element[] => {
|
||||
const detailTypes =
|
||||
MQServiceDetailTypePerView(producerLatencyOption)[selectedView] || [];
|
||||
return detailTypes.map((detailType) => (
|
||||
<Radio.Button
|
||||
key={detailType}
|
||||
value={detailType}
|
||||
disabled={
|
||||
detailType === MessagingQueueServiceDetailType.PartitionHostMetrics
|
||||
}
|
||||
className={
|
||||
detailType === MessagingQueueServiceDetailType.PartitionHostMetrics
|
||||
? 'disabled-option'
|
||||
: ''
|
||||
}
|
||||
>
|
||||
{ConsumerLagDetailTitle[detailType]}
|
||||
{detailType === MessagingQueueServiceDetailType.PartitionHostMetrics && (
|
||||
<ComingSoon />
|
||||
)}
|
||||
</Radio.Button>
|
||||
));
|
||||
};
|
||||
|
||||
return (
|
||||
<Radio.Group
|
||||
onChange={(value): void => {
|
||||
setOption(value.target.value);
|
||||
setCurrentTab(value.target.value);
|
||||
}}
|
||||
value={option}
|
||||
onChange={(e): void => handleChange(e.target.value)}
|
||||
value={currentTab}
|
||||
className="mq-details-options"
|
||||
>
|
||||
<Radio.Button value={ConsumerLagDetailType.ConsumerDetails} checked>
|
||||
{ConsumerLagDetailTitle[ConsumerLagDetailType.ConsumerDetails]}
|
||||
</Radio.Button>
|
||||
<Radio.Button value={ConsumerLagDetailType.ProducerDetails}>
|
||||
{ConsumerLagDetailTitle[ConsumerLagDetailType.ProducerDetails]}
|
||||
</Radio.Button>
|
||||
<Radio.Button value={ConsumerLagDetailType.NetworkLatency}>
|
||||
{ConsumerLagDetailTitle[ConsumerLagDetailType.NetworkLatency]}
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
value={ConsumerLagDetailType.PartitionHostMetrics}
|
||||
disabled
|
||||
className="disabled-option"
|
||||
>
|
||||
{ConsumerLagDetailTitle[ConsumerLagDetailType.PartitionHostMetrics]}
|
||||
<ComingSoon />
|
||||
</Radio.Button>
|
||||
{renderRadioButtons()}
|
||||
</Radio.Group>
|
||||
);
|
||||
}
|
||||
|
||||
function MessagingQueuesDetails(): JSX.Element {
|
||||
const [currentTab, setCurrentTab] = useState<ConsumerLagDetailType>(
|
||||
ConsumerLagDetailType.ConsumerDetails,
|
||||
const checkValidityOfDetailConfigs = (
|
||||
selectedTimelineQuery: SelectedTimelineQuery,
|
||||
selectedView: MessagingQueuesViewTypeOptions,
|
||||
currentTab: MessagingQueueServiceDetailType,
|
||||
configDetails?: {
|
||||
[key: string]: string;
|
||||
},
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): boolean => {
|
||||
if (selectedView === MessagingQueuesViewType.consumerLag.value) {
|
||||
return !(
|
||||
isEmpty(selectedTimelineQuery) ||
|
||||
(!selectedTimelineQuery?.group &&
|
||||
!selectedTimelineQuery?.topic &&
|
||||
!selectedTimelineQuery?.partition)
|
||||
);
|
||||
}
|
||||
|
||||
if (selectedView === MessagingQueuesViewType.partitionLatency.value) {
|
||||
if (isEmpty(configDetails)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Boolean(configDetails?.topic && configDetails?.partition);
|
||||
}
|
||||
|
||||
if (selectedView === MessagingQueuesViewType.producerLatency.value) {
|
||||
if (isEmpty(configDetails)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (currentTab === MessagingQueueServiceDetailType.ProducerDetails) {
|
||||
return Boolean(
|
||||
configDetails?.topic &&
|
||||
configDetails?.partition &&
|
||||
configDetails?.service_name,
|
||||
);
|
||||
}
|
||||
return Boolean(configDetails?.topic && configDetails?.service_name);
|
||||
}
|
||||
|
||||
return selectedView === MessagingQueuesViewType.dropRate.value;
|
||||
};
|
||||
|
||||
function MessagingQueuesDetails({
|
||||
selectedView,
|
||||
producerLatencyOption,
|
||||
}: {
|
||||
selectedView: MessagingQueuesViewTypeOptions;
|
||||
producerLatencyOption: ProducerLatencyOptions;
|
||||
}): JSX.Element {
|
||||
const [currentTab, setCurrentTab] = useState<MessagingQueueServiceDetailType>(
|
||||
MessagingQueueServiceDetailType.ConsumerDetails,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
producerLatencyOption &&
|
||||
selectedView === MessagingQueuesViewType.producerLatency.value
|
||||
) {
|
||||
setCurrentTab(
|
||||
producerLatencyOption === ProducerLatencyOptions.Consumers
|
||||
? MessagingQueueServiceDetailType.ConsumerDetails
|
||||
: MessagingQueueServiceDetailType.ProducerDetails,
|
||||
);
|
||||
}
|
||||
}, [selectedView, producerLatencyOption]);
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const timelineQuery = decodeURIComponent(
|
||||
urlQuery.get(QueryParams.selectedTimelineQuery) || '',
|
||||
);
|
||||
|
||||
const timelineQueryData: SelectedTimelineQuery = useMemo(
|
||||
() => (timelineQuery ? JSON.parse(timelineQuery) : {}),
|
||||
[timelineQuery],
|
||||
);
|
||||
|
||||
const configDetails = decodeURIComponent(
|
||||
urlQuery.get(QueryParams.configDetail) || '',
|
||||
);
|
||||
|
||||
const configDetailQueryData: {
|
||||
[key: string]: string;
|
||||
} = useMemo(() => (configDetails ? JSON.parse(configDetails) : {}), [
|
||||
configDetails,
|
||||
]);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const serviceConfigDetails = useMemo(
|
||||
() =>
|
||||
getMetaDataAndAPIPerView({
|
||||
detailType: currentTab,
|
||||
minTime,
|
||||
maxTime,
|
||||
selectedTimelineQuery: timelineQueryData,
|
||||
configDetails: configDetailQueryData,
|
||||
}),
|
||||
[configDetailQueryData, currentTab, maxTime, minTime, timelineQueryData],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="mq-details">
|
||||
<MessagingQueuesOptions
|
||||
currentTab={currentTab}
|
||||
setCurrentTab={setCurrentTab}
|
||||
selectedView={selectedView}
|
||||
producerLatencyOption={producerLatencyOption}
|
||||
/>
|
||||
<MessagingQueuesTable
|
||||
currentTab={currentTab}
|
||||
selectedView={selectedView}
|
||||
tableApi={serviceConfigDetails[selectedView]?.tableApi}
|
||||
validConfigPresent={checkValidityOfDetailConfigs(
|
||||
timelineQueryData,
|
||||
selectedView,
|
||||
currentTab,
|
||||
configDetailQueryData,
|
||||
)}
|
||||
tableApiPayload={serviceConfigDetails[selectedView]?.tableApiPayload}
|
||||
/>
|
||||
<MessagingQueuesTable currentTab={currentTab} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
.mq-tables-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
|
||||
.mq-table-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -31,9 +34,6 @@
|
||||
.ant-table-tbody {
|
||||
.ant-table-cell {
|
||||
max-width: 250px;
|
||||
|
||||
background-color: var(--bg-ink-400);
|
||||
|
||||
border-bottom: none;
|
||||
}
|
||||
}
|
||||
@@ -63,6 +63,21 @@
|
||||
}
|
||||
}
|
||||
|
||||
.mq-table {
|
||||
&.mq-overview-row-clickable {
|
||||
.ant-table-row {
|
||||
background-color: var(--bg-ink-400);
|
||||
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
background-color: var(--bg-slate-400) !important;
|
||||
color: var(--bg-vanilla-400);
|
||||
transition: background-color 0.3s ease, color 0.3s ease;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.mq-tables-container {
|
||||
.mq-table-title {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
/* eslint-disable react/require-default-props */
|
||||
import './MQTables.styles.scss';
|
||||
|
||||
import { Skeleton, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import axios from 'axios';
|
||||
import { isNumber } from 'chart.js/helpers';
|
||||
import cx from 'classnames';
|
||||
import { ColumnTypeRender } from 'components/Logs/TableView/types';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { QueryParams } from 'constants/query';
|
||||
@@ -13,27 +15,31 @@ import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import {
|
||||
ConsumerLagDetailTitle,
|
||||
ConsumerLagDetailType,
|
||||
convertToTitleCase,
|
||||
MessagingQueueServiceDetailType,
|
||||
MessagingQueuesViewType,
|
||||
MessagingQueuesViewTypeOptions,
|
||||
RowData,
|
||||
SelectedTimelineQuery,
|
||||
setConfigDetail,
|
||||
} from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
ConsumerLagPayload,
|
||||
getConsumerLagDetails,
|
||||
MessagingQueueServicePayload,
|
||||
MessagingQueuesPayloadProps,
|
||||
} from './getConsumerLagDetails';
|
||||
|
||||
const INITIAL_PAGE_SIZE = 10;
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function getColumns(
|
||||
data: MessagingQueuesPayloadProps['payload'],
|
||||
history: History<unknown>,
|
||||
): RowData[] {
|
||||
console.log(data);
|
||||
if (data?.result?.length === 0) {
|
||||
return [];
|
||||
}
|
||||
@@ -105,10 +111,25 @@ const showPaginationItem = (total: number, range: number[]): JSX.Element => (
|
||||
</>
|
||||
);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function MessagingQueuesTable({
|
||||
currentTab,
|
||||
selectedView,
|
||||
tableApiPayload,
|
||||
tableApi,
|
||||
validConfigPresent = false,
|
||||
type = 'Detail',
|
||||
}: {
|
||||
currentTab: ConsumerLagDetailType;
|
||||
currentTab?: MessagingQueueServiceDetailType;
|
||||
selectedView: MessagingQueuesViewTypeOptions;
|
||||
tableApiPayload?: MessagingQueueServicePayload;
|
||||
tableApi: (
|
||||
props: MessagingQueueServicePayload,
|
||||
) => Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
>;
|
||||
validConfigPresent?: boolean;
|
||||
type?: 'Detail' | 'Overview';
|
||||
}): JSX.Element {
|
||||
const [columns, setColumns] = useState<any[]>([]);
|
||||
const [tableData, setTableData] = useState<any[]>([]);
|
||||
@@ -118,15 +139,26 @@ function MessagingQueuesTable({
|
||||
const timelineQuery = decodeURIComponent(
|
||||
urlQuery.get(QueryParams.selectedTimelineQuery) || '',
|
||||
);
|
||||
|
||||
const timelineQueryData: SelectedTimelineQuery = useMemo(
|
||||
() => (timelineQuery ? JSON.parse(timelineQuery) : {}),
|
||||
[timelineQuery],
|
||||
);
|
||||
|
||||
const configDetails = decodeURIComponent(
|
||||
urlQuery.get(QueryParams.configDetail) || '',
|
||||
);
|
||||
|
||||
const configDetailQueryData: {
|
||||
[key: string]: string;
|
||||
} = useMemo(() => (configDetails ? JSON.parse(configDetails) : {}), [
|
||||
configDetails,
|
||||
]);
|
||||
|
||||
const paginationConfig = useMemo(
|
||||
() =>
|
||||
tableData?.length > 20 && {
|
||||
pageSize: 20,
|
||||
tableData?.length > INITIAL_PAGE_SIZE && {
|
||||
pageSize: INITIAL_PAGE_SIZE,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
hideOnSinglePage: true,
|
||||
@@ -134,28 +166,14 @@ function MessagingQueuesTable({
|
||||
[tableData],
|
||||
);
|
||||
|
||||
const props: ConsumerLagPayload = useMemo(
|
||||
() => ({
|
||||
start: (timelineQueryData?.start || 0) * 1e9,
|
||||
end: (timelineQueryData?.end || 0) * 1e9,
|
||||
variables: {
|
||||
partition: timelineQueryData?.partition,
|
||||
topic: timelineQueryData?.topic,
|
||||
consumer_group: timelineQueryData?.group,
|
||||
},
|
||||
detailType: currentTab,
|
||||
}),
|
||||
[currentTab, timelineQueryData],
|
||||
);
|
||||
|
||||
const handleConsumerDetailsOnError = (error: Error): void => {
|
||||
notifications.error({
|
||||
message: axios.isAxiosError(error) ? error?.message : SOMETHING_WENT_WRONG,
|
||||
});
|
||||
};
|
||||
|
||||
const { mutate: getConsumerDetails, isLoading } = useMutation(
|
||||
getConsumerLagDetails,
|
||||
const { mutate: getViewDetails, isLoading, error, isError } = useMutation(
|
||||
tableApi,
|
||||
{
|
||||
onSuccess: (data) => {
|
||||
if (data.payload) {
|
||||
@@ -167,57 +185,92 @@ function MessagingQueuesTable({
|
||||
},
|
||||
);
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
useEffect(() => getConsumerDetails(props), [currentTab, props]);
|
||||
useEffect(
|
||||
() => {
|
||||
if (validConfigPresent && tableApiPayload) {
|
||||
getViewDetails(tableApiPayload);
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[currentTab, selectedView, tableApiPayload],
|
||||
);
|
||||
|
||||
const isLogEventCalled = useRef<boolean>(false);
|
||||
const [selectedRowKey, setSelectedRowKey] = useState<React.Key>();
|
||||
const [, setSelectedRows] = useState<any>();
|
||||
const location = useLocation();
|
||||
|
||||
const isEmptyDetails = (timelineQueryData: SelectedTimelineQuery): boolean => {
|
||||
const isEmptyDetail =
|
||||
isEmpty(timelineQueryData) ||
|
||||
(!timelineQueryData?.group &&
|
||||
!timelineQueryData?.topic &&
|
||||
!timelineQueryData?.partition);
|
||||
const onRowClick = (record: { [key: string]: string }): void => {
|
||||
const selectedKey = record.key;
|
||||
|
||||
if (!isEmptyDetail && !isLogEventCalled.current) {
|
||||
logEvent('Messaging Queues: More details viewed', {
|
||||
'tab-option': ConsumerLagDetailTitle[currentTab],
|
||||
variables: {
|
||||
group: timelineQueryData?.group,
|
||||
topic: timelineQueryData?.topic,
|
||||
partition: timelineQueryData?.partition,
|
||||
},
|
||||
});
|
||||
isLogEventCalled.current = true;
|
||||
if (`${selectedKey}_${selectedView}` === selectedRowKey) {
|
||||
setSelectedRowKey(undefined);
|
||||
setSelectedRows({});
|
||||
setConfigDetail(urlQuery, location, history, {});
|
||||
} else {
|
||||
setSelectedRowKey(`${selectedKey}_${selectedView}`);
|
||||
setSelectedRows(record);
|
||||
|
||||
if (!isEmpty(record)) {
|
||||
setConfigDetail(urlQuery, location, history, record);
|
||||
}
|
||||
}
|
||||
return isEmptyDetail;
|
||||
};
|
||||
|
||||
const subtitle =
|
||||
selectedView === MessagingQueuesViewType.consumerLag.value
|
||||
? `${timelineQueryData?.group || ''} ${timelineQueryData?.topic || ''} ${
|
||||
timelineQueryData?.partition || ''
|
||||
}`
|
||||
: `${configDetailQueryData?.service_name || ''} ${
|
||||
configDetailQueryData?.topic || ''
|
||||
} ${configDetailQueryData?.partition || ''}`;
|
||||
|
||||
return (
|
||||
<div className="mq-tables-container">
|
||||
{isEmptyDetails(timelineQueryData) ? (
|
||||
{!validConfigPresent ? (
|
||||
<div className="no-data-style">
|
||||
<Typography.Text>
|
||||
Click on a co-ordinate above to see the details
|
||||
{selectedView === MessagingQueuesViewType.consumerLag.value
|
||||
? 'Click on a co-ordinate above to see the details'
|
||||
: 'Click on a row above to see the details'}
|
||||
</Typography.Text>
|
||||
<Skeleton />
|
||||
</div>
|
||||
) : isError ? (
|
||||
<div className="no-data-style">
|
||||
<Typography.Text>{error?.message || SOMETHING_WENT_WRONG}</Typography.Text>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className="mq-table-title">
|
||||
{ConsumerLagDetailTitle[currentTab]}
|
||||
<div className="mq-table-subtitle">{`${timelineQueryData?.group || ''} ${
|
||||
timelineQueryData?.topic || ''
|
||||
} ${timelineQueryData?.partition || ''}`}</div>
|
||||
</div>
|
||||
{currentTab && (
|
||||
<div className="mq-table-title">
|
||||
{ConsumerLagDetailTitle[currentTab]}
|
||||
<div className="mq-table-subtitle">{subtitle}</div>
|
||||
</div>
|
||||
)}
|
||||
<Table
|
||||
className="mq-table"
|
||||
className={cx(
|
||||
'mq-table',
|
||||
type !== 'Detail' ? 'mq-overview-row-clickable' : 'pagination-left',
|
||||
)}
|
||||
pagination={paginationConfig}
|
||||
size="middle"
|
||||
columns={columns}
|
||||
dataSource={tableData}
|
||||
bordered={false}
|
||||
loading={isLoading}
|
||||
onRow={(record): any =>
|
||||
type !== 'Detail'
|
||||
? {
|
||||
onClick: (): void => onRowClick(record),
|
||||
}
|
||||
: {}
|
||||
}
|
||||
rowClassName={(record): any =>
|
||||
`${record.key}_${selectedView}` === selectedRowKey
|
||||
? 'ant-table-row-selected'
|
||||
: ''
|
||||
}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { ConsumerLagDetailType } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { MessagingQueueServiceDetailType } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
export interface ConsumerLagPayload {
|
||||
export interface MessagingQueueServicePayload {
|
||||
start?: number | string;
|
||||
end?: number | string;
|
||||
variables: {
|
||||
variables?: {
|
||||
partition?: string;
|
||||
topic?: string;
|
||||
consumer_group?: string;
|
||||
service_name?: string;
|
||||
};
|
||||
detailType: ConsumerLagDetailType;
|
||||
detailType?: MessagingQueueServiceDetailType | 'producer' | 'consumer';
|
||||
evalTime?: number;
|
||||
}
|
||||
|
||||
export interface MessagingQueuesPayloadProps {
|
||||
@@ -36,26 +35,22 @@ export interface MessagingQueuesPayloadProps {
|
||||
}
|
||||
|
||||
export const getConsumerLagDetails = async (
|
||||
props: ConsumerLagPayload,
|
||||
props: MessagingQueueServicePayload,
|
||||
): Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
const { detailType, ...restProps } = props;
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/consumer-lag/${props.detailType}`,
|
||||
{
|
||||
...restProps,
|
||||
},
|
||||
);
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/consumer-lag/${props.detailType}`,
|
||||
{
|
||||
...restProps,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG);
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import { DropRateAPIResponse } from '../DropRateView/dropRateViewUtils';
|
||||
import { MessagingQueueServicePayload } from './getConsumerLagDetails';
|
||||
|
||||
export const getKafkaSpanEval = async (
|
||||
props: Omit<MessagingQueueServicePayload, 'detailType' | 'variables'>,
|
||||
): Promise<SuccessResponse<DropRateAPIResponse['data']> | ErrorResponse> => {
|
||||
const { start, end, evalTime } = props;
|
||||
const response = await axios.post(`messaging-queues/kafka/span/evaluation`, {
|
||||
start,
|
||||
end,
|
||||
eval_time: evalTime,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,33 @@
|
||||
import axios from 'api';
|
||||
import { MessagingQueueServiceDetailType } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
MessagingQueueServicePayload,
|
||||
MessagingQueuesPayloadProps,
|
||||
} from './getConsumerLagDetails';
|
||||
|
||||
export const getPartitionLatencyDetails = async (
|
||||
props: MessagingQueueServicePayload,
|
||||
): Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
const { detailType, ...rest } = props;
|
||||
let endpoint = '';
|
||||
if (detailType === MessagingQueueServiceDetailType.ConsumerDetails) {
|
||||
endpoint = `/messaging-queues/kafka/partition-latency/consumer`;
|
||||
} else {
|
||||
endpoint = `/messaging-queues/kafka/consumer-lag/producer-details`;
|
||||
}
|
||||
|
||||
const response = await axios.post(endpoint, {
|
||||
...rest,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,27 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
MessagingQueueServicePayload,
|
||||
MessagingQueuesPayloadProps,
|
||||
} from './getConsumerLagDetails';
|
||||
|
||||
export const getPartitionLatencyOverview = async (
|
||||
props: Omit<MessagingQueueServicePayload, 'detailType' | 'variables'>,
|
||||
): Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/partition-latency/overview`,
|
||||
{
|
||||
...props,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,26 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
MessagingQueueServicePayload,
|
||||
MessagingQueuesPayloadProps,
|
||||
} from './getConsumerLagDetails';
|
||||
|
||||
export const getTopicThroughputDetails = async (
|
||||
props: MessagingQueueServicePayload,
|
||||
): Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
const { detailType, ...rest } = props;
|
||||
const endpoint = `/messaging-queues/kafka/topic-throughput/${detailType}`;
|
||||
const response = await axios.post(endpoint, {
|
||||
...rest,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,29 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
MessagingQueueServicePayload,
|
||||
MessagingQueuesPayloadProps,
|
||||
} from './getConsumerLagDetails';
|
||||
|
||||
export const getTopicThroughputOverview = async (
|
||||
props: Omit<MessagingQueueServicePayload, 'variables'>,
|
||||
): Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
const { detailType, start, end } = props;
|
||||
const response = await axios.post(
|
||||
`messaging-queues/kafka/topic-throughput/${detailType}`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,110 @@
|
||||
import './MQDetails.style.scss';
|
||||
|
||||
import { Radio } from 'antd';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import {
|
||||
MessagingQueuesViewType,
|
||||
MessagingQueuesViewTypeOptions,
|
||||
ProducerLatencyOptions,
|
||||
} from '../MessagingQueuesUtils';
|
||||
import { MessagingQueueServicePayload } from './MQTables/getConsumerLagDetails';
|
||||
import { getKafkaSpanEval } from './MQTables/getKafkaSpanEval';
|
||||
import { getPartitionLatencyOverview } from './MQTables/getPartitionLatencyOverview';
|
||||
import { getTopicThroughputOverview } from './MQTables/getTopicThroughputOverview';
|
||||
import MessagingQueuesTable from './MQTables/MQTables';
|
||||
|
||||
type SelectedViewType = keyof typeof MessagingQueuesViewType;
|
||||
|
||||
function PartitionLatencyTabs({
|
||||
option,
|
||||
setOption,
|
||||
}: {
|
||||
option: ProducerLatencyOptions;
|
||||
setOption: Dispatch<SetStateAction<ProducerLatencyOptions>>;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<Radio.Group
|
||||
onChange={(e): void => setOption(e.target.value)}
|
||||
value={option}
|
||||
className="mq-details-options"
|
||||
>
|
||||
<Radio.Button
|
||||
value={ProducerLatencyOptions.Producers}
|
||||
key={ProducerLatencyOptions.Producers}
|
||||
>
|
||||
{ProducerLatencyOptions.Producers}
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
value={ProducerLatencyOptions.Consumers}
|
||||
key={ProducerLatencyOptions.Consumers}
|
||||
>
|
||||
{ProducerLatencyOptions.Consumers}
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
);
|
||||
}
|
||||
|
||||
const getTableApi = (selectedView: MessagingQueuesViewTypeOptions): any => {
|
||||
if (selectedView === MessagingQueuesViewType.producerLatency.value) {
|
||||
return getTopicThroughputOverview;
|
||||
}
|
||||
if (selectedView === MessagingQueuesViewType.dropRate.value) {
|
||||
return getKafkaSpanEval;
|
||||
}
|
||||
return getPartitionLatencyOverview;
|
||||
};
|
||||
|
||||
function MessagingQueueOverview({
|
||||
selectedView,
|
||||
option,
|
||||
setOption,
|
||||
}: {
|
||||
selectedView: MessagingQueuesViewTypeOptions;
|
||||
option: ProducerLatencyOptions;
|
||||
setOption: Dispatch<SetStateAction<ProducerLatencyOptions>>;
|
||||
}): JSX.Element {
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const tableApiPayload: MessagingQueueServicePayload = {
|
||||
variables: {},
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
detailType:
|
||||
// eslint-disable-next-line no-nested-ternary
|
||||
selectedView === MessagingQueuesViewType.producerLatency.value
|
||||
? option === ProducerLatencyOptions.Producers
|
||||
? 'producer'
|
||||
: 'consumer'
|
||||
: undefined,
|
||||
evalTime:
|
||||
selectedView === MessagingQueuesViewType.dropRate.value
|
||||
? 2363404
|
||||
: undefined,
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="mq-overview-container">
|
||||
{selectedView === MessagingQueuesViewType.producerLatency.value ? (
|
||||
<PartitionLatencyTabs option={option} setOption={setOption} />
|
||||
) : (
|
||||
<div className="mq-overview-title">
|
||||
{MessagingQueuesViewType[selectedView as SelectedViewType].label}
|
||||
</div>
|
||||
)}
|
||||
<MessagingQueuesTable
|
||||
selectedView={selectedView}
|
||||
tableApiPayload={tableApiPayload}
|
||||
tableApi={getTableApi(selectedView)}
|
||||
validConfigPresent
|
||||
type="Overview"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
export default MessagingQueueOverview;
|
||||
@@ -0,0 +1,270 @@
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import './MessagingQueueHealthCheck.styles.scss';
|
||||
|
||||
import { CaretDownOutlined, LoadingOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
Modal,
|
||||
Select,
|
||||
Spin,
|
||||
Tooltip,
|
||||
Tree,
|
||||
TreeDataNode,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { OnboardingStatusResponse } from 'api/messagingQueues/onboarding/getOnboardingStatus';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { History } from 'history';
|
||||
import { Bolt, Check, OctagonAlert, X } from 'lucide-react';
|
||||
import { ReactNode, useEffect, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { isCloudUser } from 'utils/app';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import {
|
||||
KAFKA_SETUP_DOC_LINK,
|
||||
MessagingQueueHealthCheckService,
|
||||
} from '../MessagingQueuesUtils';
|
||||
|
||||
interface AttributeCheckListProps {
|
||||
visible: boolean;
|
||||
onClose: () => void;
|
||||
onboardingStatusResponses: {
|
||||
title: string;
|
||||
data: OnboardingStatusResponse['data'];
|
||||
errorMsg?: string;
|
||||
}[];
|
||||
loading: boolean;
|
||||
}
|
||||
|
||||
export enum AttributesFilters {
|
||||
ALL = 'all',
|
||||
SUCCESS = 'success',
|
||||
ERROR = 'error',
|
||||
}
|
||||
|
||||
function ErrorTitleAndKey({
|
||||
title,
|
||||
parentTitle,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
errorMsg,
|
||||
isLeaf,
|
||||
}: {
|
||||
title: string;
|
||||
parentTitle: string;
|
||||
isCloudUserVal: boolean;
|
||||
history: History<unknown>;
|
||||
errorMsg?: string;
|
||||
isLeaf?: boolean;
|
||||
}): TreeDataNode {
|
||||
const handleRedirection = (): void => {
|
||||
let link = '';
|
||||
|
||||
switch (parentTitle) {
|
||||
case 'Consumers':
|
||||
link = `${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Consumers}`;
|
||||
break;
|
||||
case 'Producers':
|
||||
link = `${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Producers}`;
|
||||
break;
|
||||
case 'Kafka':
|
||||
link = `${ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Kafka}`;
|
||||
break;
|
||||
default:
|
||||
link = '';
|
||||
}
|
||||
|
||||
if (isCloudUserVal && !!link) {
|
||||
history.push(link);
|
||||
} else {
|
||||
window.open(KAFKA_SETUP_DOC_LINK, '_blank');
|
||||
}
|
||||
};
|
||||
return {
|
||||
key: `${title}-key-${uuid()}`,
|
||||
title: (
|
||||
<div className="attribute-error-title">
|
||||
<Typography.Text className="tree-text" ellipsis={{ tooltip: title }}>
|
||||
{title}
|
||||
</Typography.Text>
|
||||
<Tooltip title={errorMsg}>
|
||||
<div
|
||||
className="attribute-error-warning"
|
||||
onClick={(e): void => {
|
||||
e.preventDefault();
|
||||
handleRedirection();
|
||||
}}
|
||||
>
|
||||
<OctagonAlert size={14} />
|
||||
Fix
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
),
|
||||
isLeaf,
|
||||
};
|
||||
}
|
||||
|
||||
function AttributeLabels({ title }: { title: ReactNode }): JSX.Element {
|
||||
return (
|
||||
<div className="attribute-label">
|
||||
<Bolt size={14} />
|
||||
{title}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function treeTitleAndKey({
|
||||
title,
|
||||
isLeaf,
|
||||
}: {
|
||||
title: string;
|
||||
isLeaf?: boolean;
|
||||
}): TreeDataNode {
|
||||
return {
|
||||
key: `${title}-key-${uuid()}`,
|
||||
title: (
|
||||
<div className="attribute-success-title">
|
||||
<Typography.Text className="tree-text" ellipsis={{ tooltip: title }}>
|
||||
{title}
|
||||
</Typography.Text>
|
||||
{isLeaf && (
|
||||
<div className="success-attribute-icon">
|
||||
<Tooltip title="Success">
|
||||
<Check size={14} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
isLeaf,
|
||||
};
|
||||
}
|
||||
|
||||
function generateTreeDataNodes(
|
||||
response: OnboardingStatusResponse['data'],
|
||||
parentTitle: string,
|
||||
isCloudUserVal: boolean,
|
||||
history: History<unknown>,
|
||||
): TreeDataNode[] {
|
||||
return response
|
||||
.map((item) => {
|
||||
if (item.attribute) {
|
||||
if (item.status === '1') {
|
||||
return treeTitleAndKey({ title: item.attribute, isLeaf: true });
|
||||
}
|
||||
if (item.status === '0') {
|
||||
return ErrorTitleAndKey({
|
||||
title: item.attribute,
|
||||
errorMsg: item.error_message || '',
|
||||
parentTitle,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
});
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter(Boolean) as TreeDataNode[];
|
||||
}
|
||||
|
||||
function AttributeCheckList({
|
||||
visible,
|
||||
onClose,
|
||||
onboardingStatusResponses,
|
||||
loading,
|
||||
}: AttributeCheckListProps): JSX.Element {
|
||||
const [filter, setFilter] = useState<AttributesFilters>(AttributesFilters.ALL);
|
||||
const [treeData, setTreeData] = useState<TreeDataNode[]>([]);
|
||||
|
||||
const handleFilterChange = (value: AttributesFilters): void => {
|
||||
setFilter(value);
|
||||
};
|
||||
const isCloudUserVal = isCloudUser();
|
||||
const history = useHistory();
|
||||
|
||||
useEffect(() => {
|
||||
const filteredData = onboardingStatusResponses.map((response) => {
|
||||
if (response.errorMsg) {
|
||||
return ErrorTitleAndKey({
|
||||
title: response.title,
|
||||
errorMsg: response.errorMsg,
|
||||
isLeaf: true,
|
||||
parentTitle: response.title,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
});
|
||||
}
|
||||
let filteredData = response.data;
|
||||
|
||||
if (filter === AttributesFilters.SUCCESS) {
|
||||
filteredData = response.data.filter((item) => item.status === '1');
|
||||
} else if (filter === AttributesFilters.ERROR) {
|
||||
filteredData = response.data.filter((item) => item.status === '0');
|
||||
}
|
||||
|
||||
return {
|
||||
...treeTitleAndKey({ title: response.title }),
|
||||
children: generateTreeDataNodes(
|
||||
filteredData,
|
||||
response.title,
|
||||
isCloudUserVal,
|
||||
history,
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
setTreeData(filteredData);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [filter, onboardingStatusResponses]);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title="Kafka Service Attributes"
|
||||
open={visible}
|
||||
onCancel={onClose}
|
||||
footer={false}
|
||||
className="mq-health-check-modal"
|
||||
closeIcon={<X size={14} />}
|
||||
>
|
||||
{loading ? (
|
||||
<div className="loader-container">
|
||||
<Spin indicator={<LoadingOutlined spin />} size="large" />
|
||||
</div>
|
||||
) : (
|
||||
<div className="modal-content">
|
||||
<Select
|
||||
defaultValue={AttributesFilters.ALL}
|
||||
className="attribute-select"
|
||||
onChange={handleFilterChange}
|
||||
options={[
|
||||
{
|
||||
value: AttributesFilters.ALL,
|
||||
label: AttributeLabels({ title: 'Attributes: All' }),
|
||||
},
|
||||
{
|
||||
value: AttributesFilters.SUCCESS,
|
||||
label: AttributeLabels({ title: 'Attributes: Success' }),
|
||||
},
|
||||
{
|
||||
value: AttributesFilters.ERROR,
|
||||
label: AttributeLabels({ title: 'Attributes: Error' }),
|
||||
},
|
||||
]}
|
||||
/>
|
||||
<Tree
|
||||
showLine
|
||||
switcherIcon={<CaretDownOutlined />}
|
||||
treeData={treeData}
|
||||
height={450}
|
||||
className="attribute-tree"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
|
||||
export default AttributeCheckList;
|
||||
@@ -0,0 +1,168 @@
|
||||
.mq-health-check-modal {
|
||||
.ant-modal-content {
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-400);
|
||||
box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
|
||||
|
||||
.ant-modal-close {
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.ant-modal-header {
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-400);
|
||||
margin-bottom: 16px;
|
||||
padding-bottom: 4px;
|
||||
|
||||
.ant-modal-title {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 22px;
|
||||
letter-spacing: 0.52px;
|
||||
}
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
padding: 8px;
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
.attribute-select {
|
||||
align-items: center;
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
width: 170px;
|
||||
|
||||
.ant-select-selector {
|
||||
display: flex;
|
||||
height: 28px !important;
|
||||
padding: 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-tree {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
.tree-text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Geist Mono';
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
width: 328px;
|
||||
}
|
||||
|
||||
.ant-tree {
|
||||
.ant-tree-title {
|
||||
cursor: default;
|
||||
|
||||
.attribute-error-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-amber-400);
|
||||
height: 24px;
|
||||
|
||||
.tree-text {
|
||||
color: var(--bg-amber-400);
|
||||
}
|
||||
|
||||
.attribute-error-warning {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
|
||||
font-family: 'Geist Mono';
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-success-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 24px;
|
||||
|
||||
.success-attribute-icon {
|
||||
width: 44px;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
|
||||
> svg {
|
||||
margin-left: auto;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-tree-treenode {
|
||||
width: 100%;
|
||||
|
||||
.ant-tree-node-content-wrapper {
|
||||
width: 100%;
|
||||
max-width: 380px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.loader-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
padding: 8px;
|
||||
background: var(--bg-ink-300);
|
||||
height: 156px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.config-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 28px;
|
||||
border-radius: 2px;
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
background: var(--bg-slate-500);
|
||||
|
||||
&.missing-config-btn {
|
||||
background: rgba(255, 205, 86, 0.1);
|
||||
color: var(--bg-amber-400);
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-amber-300) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.config-btn-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-right: 8px;
|
||||
border-right: 1px solid rgba(255, 215, 120, 0.1);
|
||||
padding-right: 8px;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import './MessagingQueueHealthCheck.styles.scss';
|
||||
|
||||
import { Button } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useOnboardingStatus } from 'hooks/messagingQueue / onboarding/useOnboardingStatus';
|
||||
import { Bolt, FolderTree } from 'lucide-react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
|
||||
import { MessagingQueueHealthCheckService } from '../MessagingQueuesUtils';
|
||||
import AttributeCheckList from './AttributeCheckList';
|
||||
|
||||
interface MessagingQueueHealthCheckProps {
|
||||
serviceToInclude: string[];
|
||||
}
|
||||
|
||||
function MessagingQueueHealthCheck({
|
||||
serviceToInclude,
|
||||
}: MessagingQueueHealthCheckProps): JSX.Element {
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [checkListOpen, setCheckListOpen] = useState(false);
|
||||
|
||||
// Consumer Data
|
||||
const {
|
||||
data: consumerData,
|
||||
error: consumerError,
|
||||
isFetching: consumerLoading,
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: !!serviceToInclude.filter(
|
||||
(service) => service === MessagingQueueHealthCheckService.Consumers,
|
||||
).length,
|
||||
},
|
||||
MessagingQueueHealthCheckService.Consumers,
|
||||
);
|
||||
|
||||
// Producer Data
|
||||
const {
|
||||
data: producerData,
|
||||
error: producerError,
|
||||
isFetching: producerLoading,
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: !!serviceToInclude.filter(
|
||||
(service) => service === MessagingQueueHealthCheckService.Producers,
|
||||
).length,
|
||||
},
|
||||
MessagingQueueHealthCheckService.Producers,
|
||||
);
|
||||
|
||||
// Kafka Data
|
||||
const {
|
||||
data: kafkaData,
|
||||
error: kafkaError,
|
||||
isFetching: kafkaLoading,
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: !!serviceToInclude.filter(
|
||||
(service) => service === MessagingQueueHealthCheckService.Kafka,
|
||||
).length,
|
||||
},
|
||||
MessagingQueueHealthCheckService.Kafka,
|
||||
);
|
||||
|
||||
// combined loading and update state
|
||||
useEffect(() => {
|
||||
setLoading(consumerLoading || producerLoading || kafkaLoading);
|
||||
}, [consumerLoading, producerLoading, kafkaLoading]);
|
||||
|
||||
const missingConfiguration = useMemo(() => {
|
||||
const consumerMissing =
|
||||
(serviceToInclude.includes(MessagingQueueHealthCheckService.Consumers) &&
|
||||
consumerData?.payload?.data?.filter((item) => item.status === '0')
|
||||
.length) ||
|
||||
0;
|
||||
const producerMissing =
|
||||
(serviceToInclude.includes(MessagingQueueHealthCheckService.Producers) &&
|
||||
producerData?.payload?.data?.filter((item) => item.status === '0')
|
||||
.length) ||
|
||||
0;
|
||||
const kafkaMissing =
|
||||
(serviceToInclude.includes(MessagingQueueHealthCheckService.Kafka) &&
|
||||
kafkaData?.payload?.data?.filter((item) => item.status === '0').length) ||
|
||||
0;
|
||||
|
||||
return consumerMissing + producerMissing + kafkaMissing;
|
||||
}, [consumerData, producerData, kafkaData, serviceToInclude]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Button
|
||||
onClick={(): void => setCheckListOpen(true)}
|
||||
loading={loading}
|
||||
className={cx(
|
||||
'config-btn',
|
||||
missingConfiguration ? 'missing-config-btn' : '',
|
||||
)}
|
||||
icon={<Bolt size={12} />}
|
||||
>
|
||||
<div className="config-btn-content">
|
||||
{missingConfiguration
|
||||
? `Missing Configuration (${missingConfiguration})`
|
||||
: 'Configuration'}
|
||||
</div>
|
||||
<FolderTree size={14} />
|
||||
</Button>
|
||||
<AttributeCheckList
|
||||
visible={checkListOpen}
|
||||
onClose={(): void => setCheckListOpen(false)}
|
||||
onboardingStatusResponses={[
|
||||
{
|
||||
title: 'Consumers',
|
||||
data: consumerData?.payload?.data || [],
|
||||
errorMsg: (consumerError || consumerData?.error) as string,
|
||||
},
|
||||
{
|
||||
title: 'Producers',
|
||||
data: producerData?.payload?.data || [],
|
||||
errorMsg: (producerError || producerData?.error) as string,
|
||||
},
|
||||
{
|
||||
title: 'Kafka',
|
||||
data: kafkaData?.payload?.data || [],
|
||||
errorMsg: (kafkaError || kafkaData?.error) as string,
|
||||
},
|
||||
].filter((item) => serviceToInclude.includes(item.title.toLowerCase()))}
|
||||
loading={loading}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MessagingQueueHealthCheck;
|
||||
@@ -47,7 +47,7 @@
|
||||
|
||||
.header-config {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
gap: 12px;
|
||||
align-items: center;
|
||||
|
||||
.messaging-queue-options {
|
||||
@@ -106,6 +106,8 @@
|
||||
|
||||
.mq-details-options {
|
||||
letter-spacing: -0.06px;
|
||||
cursor: pointer;
|
||||
|
||||
.ant-radio-button-wrapper {
|
||||
border-color: var(--bg-slate-400);
|
||||
color: var(--bg-vanilla-400);
|
||||
@@ -219,6 +221,23 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&.summary-section {
|
||||
.overview-info-card {
|
||||
min-height: 144px;
|
||||
|
||||
.card-title {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 22px;
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.summary-section {
|
||||
|
||||
@@ -1,47 +1,38 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import './MessagingQueues.styles.scss';
|
||||
|
||||
import { ExclamationCircleFilled } from '@ant-design/icons';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Modal } from 'antd';
|
||||
import { Button } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { Calendar, ListMinus } from 'lucide-react';
|
||||
import { ListMinus } from 'lucide-react';
|
||||
import { useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { isCloudUser } from 'utils/app';
|
||||
|
||||
import MessagingQueueHealthCheck from './MessagingQueueHealthCheck/MessagingQueueHealthCheck';
|
||||
import {
|
||||
KAFKA_SETUP_DOC_LINK,
|
||||
MessagingQueueHealthCheckService,
|
||||
MessagingQueuesViewType,
|
||||
} from './MessagingQueuesUtils';
|
||||
import { ComingSoon } from './MQCommon/MQCommon';
|
||||
|
||||
function MessagingQueues(): JSX.Element {
|
||||
const history = useHistory();
|
||||
const { t } = useTranslation('messagingQueuesKafkaOverview');
|
||||
|
||||
const { confirm } = Modal;
|
||||
|
||||
const showConfirm = (): void => {
|
||||
const redirectToDetailsPage = (callerView?: string): void => {
|
||||
logEvent('Messaging Queues: View details clicked', {
|
||||
page: 'Messaging Queues Overview',
|
||||
source: 'Consumer Latency view',
|
||||
source: callerView,
|
||||
});
|
||||
|
||||
confirm({
|
||||
icon: <ExclamationCircleFilled />,
|
||||
content: t('confirmModal.content'),
|
||||
className: 'overview-confirm-modal',
|
||||
onOk() {
|
||||
logEvent('Messaging Queues: Proceed button clicked', {
|
||||
page: 'Messaging Queues Overview',
|
||||
});
|
||||
history.push(ROUTES.MESSAGING_QUEUES_DETAIL);
|
||||
},
|
||||
okText: t('confirmModal.okText'),
|
||||
});
|
||||
history.push(
|
||||
`${ROUTES.MESSAGING_QUEUES_DETAIL}?${QueryParams.mqServiceView}=${callerView}`,
|
||||
);
|
||||
};
|
||||
|
||||
const isCloudUserVal = isCloudUser();
|
||||
@@ -69,7 +60,16 @@ function MessagingQueues(): JSX.Element {
|
||||
{t('breadcrumb')}
|
||||
</div>
|
||||
<div className="messaging-header">
|
||||
<div className="header-config">{t('header')}</div>
|
||||
<div className="header-config">
|
||||
{t('header')} /
|
||||
<MessagingQueueHealthCheck
|
||||
serviceToInclude={[
|
||||
MessagingQueueHealthCheckService.Consumers,
|
||||
MessagingQueueHealthCheckService.Producers,
|
||||
MessagingQueueHealthCheckService.Kafka,
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
<DateTimeSelectionV2 showAutoRefresh={false} hideShareModal />
|
||||
</div>
|
||||
<div className="messaging-overview">
|
||||
@@ -86,7 +86,7 @@ function MessagingQueues(): JSX.Element {
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
getStartedRedirect(
|
||||
ROUTES.GET_STARTED_APPLICATION_MONITORING,
|
||||
`${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Consumers}`,
|
||||
'Configure Consumer',
|
||||
)
|
||||
}
|
||||
@@ -105,7 +105,7 @@ function MessagingQueues(): JSX.Element {
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
getStartedRedirect(
|
||||
ROUTES.GET_STARTED_APPLICATION_MONITORING,
|
||||
`${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Producers}`,
|
||||
'Configure Producer',
|
||||
)
|
||||
}
|
||||
@@ -124,7 +124,7 @@ function MessagingQueues(): JSX.Element {
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
getStartedRedirect(
|
||||
ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING,
|
||||
`${ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Kafka}`,
|
||||
'Monitor kafka',
|
||||
)
|
||||
}
|
||||
@@ -134,55 +134,80 @@ function MessagingQueues(): JSX.Element {
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-section">
|
||||
<div className="summary-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.consumerLag.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
|
||||
<p className="overview-text">{t('overviewSummarySection.title')}</p>
|
||||
<p className="overview-subtext">{t('overviewSummarySection.subtitle')}</p>
|
||||
<div className={cx('overview-doc-area', 'summary-section')}>
|
||||
<div className="overview-info-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.consumer.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.consumer.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<Button type="primary" onClick={showConfirm}>
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.consumerLag.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-card coming-soon-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.partitionLatency.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
<div className="overview-info-card middle-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.producer.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.producer.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<ComingSoon />
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.producerLatency.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-card coming-soon-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.producerLatency.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
<div className="overview-info-card middle-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.partition.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.partition.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<ComingSoon />
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.partitionLatency.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-card coming-soon-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.consumerLatency.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
<div className="overview-info-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.dropRate.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.dropRate.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<ComingSoon />
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.dropRate.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,14 +1,24 @@
|
||||
import { OnboardingStatusResponse } from 'api/messagingQueues/onboarding/getOnboardingStatus';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetWidgetQueryBuilderProps } from 'container/MetricsApplication/types';
|
||||
import { History, Location } from 'history';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import {
|
||||
getConsumerLagDetails,
|
||||
MessagingQueueServicePayload,
|
||||
MessagingQueuesPayloadProps,
|
||||
} from './MQDetails/MQTables/getConsumerLagDetails';
|
||||
import { getPartitionLatencyDetails } from './MQDetails/MQTables/getPartitionLatencyDetails';
|
||||
import { getTopicThroughputDetails } from './MQDetails/MQTables/getTopicThroughputDetails';
|
||||
|
||||
export const KAFKA_SETUP_DOC_LINK =
|
||||
'https://signoz.io/docs/messaging-queues/kafka?utm_source=product&utm_medium=kafka-get-started';
|
||||
|
||||
@@ -24,14 +34,17 @@ export type RowData = {
|
||||
[key: string]: string | number;
|
||||
};
|
||||
|
||||
export enum ConsumerLagDetailType {
|
||||
export enum MessagingQueueServiceDetailType {
|
||||
ConsumerDetails = 'consumer-details',
|
||||
ProducerDetails = 'producer-details',
|
||||
NetworkLatency = 'network-latency',
|
||||
PartitionHostMetrics = 'partition-host-metric',
|
||||
}
|
||||
|
||||
export const ConsumerLagDetailTitle: Record<ConsumerLagDetailType, string> = {
|
||||
export const ConsumerLagDetailTitle: Record<
|
||||
MessagingQueueServiceDetailType,
|
||||
string
|
||||
> = {
|
||||
'consumer-details': 'Consumer Groups Details',
|
||||
'producer-details': 'Producer Details',
|
||||
'network-latency': 'Network Latency',
|
||||
@@ -205,21 +218,172 @@ export function setSelectedTimelineQuery(
|
||||
history.replace(generatedUrl);
|
||||
}
|
||||
|
||||
export enum MessagingQueuesViewTypeOptions {
|
||||
ConsumerLag = 'consumerLag',
|
||||
PartitionLatency = 'partitionLatency',
|
||||
ProducerLatency = 'producerLatency',
|
||||
ConsumerLatency = 'consumerLatency',
|
||||
}
|
||||
|
||||
export const MessagingQueuesViewType = {
|
||||
consumerLag: {
|
||||
label: 'Consumer Lag view',
|
||||
value: 'consumerLag',
|
||||
value: MessagingQueuesViewTypeOptions.ConsumerLag,
|
||||
},
|
||||
partitionLatency: {
|
||||
label: 'Partition Latency view',
|
||||
value: 'partitionLatency',
|
||||
value: MessagingQueuesViewTypeOptions.PartitionLatency,
|
||||
},
|
||||
producerLatency: {
|
||||
label: 'Producer Latency view',
|
||||
value: 'producerLatency',
|
||||
value: MessagingQueuesViewTypeOptions.ProducerLatency,
|
||||
},
|
||||
consumerLatency: {
|
||||
label: 'Consumer latency view',
|
||||
value: 'consumerLatency',
|
||||
dropRate: {
|
||||
label: 'Drop Rate view',
|
||||
value: 'dropRate',
|
||||
},
|
||||
};
|
||||
|
||||
export function setConfigDetail(
|
||||
urlQuery: URLSearchParams,
|
||||
location: Location<unknown>,
|
||||
history: History<unknown>,
|
||||
paramsToSet?: {
|
||||
[key: string]: string;
|
||||
},
|
||||
): void {
|
||||
// remove "key" and its value from the paramsToSet object
|
||||
const { key, ...restParamsToSet } = paramsToSet || {};
|
||||
|
||||
if (!isEmpty(restParamsToSet)) {
|
||||
const configDetail = {
|
||||
...restParamsToSet,
|
||||
};
|
||||
urlQuery.set(
|
||||
QueryParams.configDetail,
|
||||
encodeURIComponent(JSON.stringify(configDetail)),
|
||||
);
|
||||
} else {
|
||||
urlQuery.delete(QueryParams.configDetail);
|
||||
}
|
||||
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
|
||||
history.replace(generatedUrl);
|
||||
}
|
||||
|
||||
export enum ProducerLatencyOptions {
|
||||
Producers = 'Producers',
|
||||
Consumers = 'Consumers',
|
||||
}
|
||||
|
||||
interface MetaDataAndAPI {
|
||||
tableApiPayload: MessagingQueueServicePayload;
|
||||
tableApi: (
|
||||
props: MessagingQueueServicePayload,
|
||||
) => Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
>;
|
||||
}
|
||||
interface MetaDataAndAPIPerView {
|
||||
detailType: MessagingQueueServiceDetailType;
|
||||
selectedTimelineQuery: SelectedTimelineQuery;
|
||||
configDetails?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
minTime: number;
|
||||
maxTime: number;
|
||||
}
|
||||
|
||||
export const getMetaDataAndAPIPerView = (
|
||||
metaDataProps: MetaDataAndAPIPerView,
|
||||
): Record<string, MetaDataAndAPI> => {
|
||||
const {
|
||||
detailType,
|
||||
minTime,
|
||||
maxTime,
|
||||
selectedTimelineQuery,
|
||||
configDetails,
|
||||
} = metaDataProps;
|
||||
return {
|
||||
[MessagingQueuesViewType.consumerLag.value]: {
|
||||
tableApiPayload: {
|
||||
start: (selectedTimelineQuery?.start || 0) * 1e9,
|
||||
end: (selectedTimelineQuery?.end || 0) * 1e9,
|
||||
variables: {
|
||||
partition: selectedTimelineQuery?.partition,
|
||||
topic: selectedTimelineQuery?.topic,
|
||||
consumer_group: selectedTimelineQuery?.group,
|
||||
},
|
||||
detailType,
|
||||
},
|
||||
tableApi: getConsumerLagDetails,
|
||||
},
|
||||
[MessagingQueuesViewType.partitionLatency.value]: {
|
||||
tableApiPayload: {
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
variables: {
|
||||
partition: configDetails?.partition,
|
||||
topic: configDetails?.topic,
|
||||
consumer_group: configDetails?.group,
|
||||
},
|
||||
detailType,
|
||||
},
|
||||
tableApi: getPartitionLatencyDetails,
|
||||
},
|
||||
[MessagingQueuesViewType.producerLatency.value]: {
|
||||
tableApiPayload: {
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
variables: {
|
||||
partition: configDetails?.partition,
|
||||
topic: configDetails?.topic,
|
||||
service_name: configDetails?.service_name,
|
||||
},
|
||||
detailType,
|
||||
},
|
||||
tableApi: getTopicThroughputDetails,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
interface OnboardingStatusAttributeData {
|
||||
overallStatus: string;
|
||||
allAvailableAttributes: string[];
|
||||
attributeDataWithError: { attributeName: string; errorMsg: string }[];
|
||||
}
|
||||
|
||||
export const getAttributeDataFromOnboardingStatus = (
|
||||
onboardingStatus?: OnboardingStatusResponse | null,
|
||||
): OnboardingStatusAttributeData => {
|
||||
const allAvailableAttributes: string[] = [];
|
||||
const attributeDataWithError: {
|
||||
attributeName: string;
|
||||
errorMsg: string;
|
||||
}[] = [];
|
||||
|
||||
if (onboardingStatus?.data && !isEmpty(onboardingStatus?.data)) {
|
||||
onboardingStatus.data.forEach((status) => {
|
||||
if (status.attribute) {
|
||||
allAvailableAttributes.push(status.attribute);
|
||||
if (status.status === '0') {
|
||||
attributeDataWithError.push({
|
||||
attributeName: status.attribute,
|
||||
errorMsg: status.error_message || '',
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
overallStatus: attributeDataWithError.length ? 'error' : 'success',
|
||||
allAvailableAttributes,
|
||||
attributeDataWithError,
|
||||
};
|
||||
};
|
||||
|
||||
export enum MessagingQueueHealthCheckService {
|
||||
Consumers = 'consumers',
|
||||
Producers = 'producers',
|
||||
Kafka = 'kafka',
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ export interface ILog {
|
||||
severityNumber: number;
|
||||
body: string;
|
||||
resources_string: Record<string, never>;
|
||||
scope_string: Record<string, never>;
|
||||
attributesString: Record<string, never>;
|
||||
attributes_string: Record<string, never>;
|
||||
attributesInt: Record<string, never>;
|
||||
@@ -22,6 +23,7 @@ type OmitAttributesResources = Pick<
|
||||
Exclude<
|
||||
keyof ILog,
|
||||
| 'resources_string'
|
||||
| 'scope_string'
|
||||
| 'attributesString'
|
||||
| 'attributes_string'
|
||||
| 'attributesInt'
|
||||
@@ -32,4 +34,5 @@ type OmitAttributesResources = Pick<
|
||||
export type ILogAggregateAttributesResources = OmitAttributesResources & {
|
||||
attributes: Record<string, never>;
|
||||
resources: Record<string, never>;
|
||||
scope: Record<string, never>;
|
||||
};
|
||||
|
||||
@@ -2212,6 +2212,13 @@
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.7.6":
|
||||
version "7.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.0.tgz#8600c2f595f277c60815256418b85356a65173c1"
|
||||
integrity sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/template@^7.18.10", "@babel/template@^7.20.7", "@babel/template@^7.3.3":
|
||||
version "7.20.7"
|
||||
resolved "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz"
|
||||
@@ -3534,6 +3541,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@react-dnd/shallowequal/-/shallowequal-4.0.2.tgz#d1b4befa423f692fa4abf1c79209702e7d8ae4b4"
|
||||
integrity sha512-/RVXdLvJxLg4QKvMoM5WlwNR9ViO9z8B/qPcc+C0Sa/teJY7QG7kJ441DwzOjMYEY7GmU4dj5EcGHIkKZiQZCA==
|
||||
|
||||
"@remix-run/router@1.20.0":
|
||||
version "1.20.0"
|
||||
resolved "https://registry.yarnpkg.com/@remix-run/router/-/router-1.20.0.tgz#03554155b45d8b529adf635b2f6ad1165d70d8b4"
|
||||
integrity sha512-mUnk8rPJBI9loFDZ+YzPGdeniYK+FTmRD1TMCz7ev2SNIozyKKpnGgsxO34u6Z4z/t0ITuu7voi/AshfsGsgFg==
|
||||
|
||||
"@rollup/pluginutils@^5.1.0":
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.1.0.tgz#7e53eddc8c7f483a4ad0b94afb1f7f5fd3c771e0"
|
||||
@@ -6064,7 +6076,7 @@ bl@^4.1.0:
|
||||
inherits "^2.0.4"
|
||||
readable-stream "^3.4.0"
|
||||
|
||||
body-parser@1.20.2, body-parser@1.20.3:
|
||||
body-parser@1.20.3:
|
||||
version "1.20.3"
|
||||
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6"
|
||||
integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==
|
||||
@@ -6796,10 +6808,10 @@ cookie-signature@1.0.6:
|
||||
resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz"
|
||||
integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==
|
||||
|
||||
cookie@0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
|
||||
integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
|
||||
cookie@0.7.1:
|
||||
version "0.7.1"
|
||||
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9"
|
||||
integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==
|
||||
|
||||
cookie@^0.4.2:
|
||||
version "0.4.2"
|
||||
@@ -7813,6 +7825,11 @@ encodeurl@~1.0.2:
|
||||
resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz"
|
||||
integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==
|
||||
|
||||
encodeurl@~2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58"
|
||||
integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==
|
||||
|
||||
enhanced-resolve@^5.17.1, enhanced-resolve@^5.7.0:
|
||||
version "5.17.1"
|
||||
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15"
|
||||
@@ -8463,36 +8480,36 @@ expect@^29.0.0:
|
||||
jest-util "^29.5.0"
|
||||
|
||||
express@^4.17.3:
|
||||
version "4.19.2"
|
||||
resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465"
|
||||
integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==
|
||||
version "4.21.1"
|
||||
resolved "https://registry.yarnpkg.com/express/-/express-4.21.1.tgz#9dae5dda832f16b4eec941a4e44aa89ec481b281"
|
||||
integrity sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==
|
||||
dependencies:
|
||||
accepts "~1.3.8"
|
||||
array-flatten "1.1.1"
|
||||
body-parser "1.20.2"
|
||||
body-parser "1.20.3"
|
||||
content-disposition "0.5.4"
|
||||
content-type "~1.0.4"
|
||||
cookie "0.6.0"
|
||||
cookie "0.7.1"
|
||||
cookie-signature "1.0.6"
|
||||
debug "2.6.9"
|
||||
depd "2.0.0"
|
||||
encodeurl "~1.0.2"
|
||||
encodeurl "~2.0.0"
|
||||
escape-html "~1.0.3"
|
||||
etag "~1.8.1"
|
||||
finalhandler "1.2.0"
|
||||
finalhandler "1.3.1"
|
||||
fresh "0.5.2"
|
||||
http-errors "2.0.0"
|
||||
merge-descriptors "1.0.1"
|
||||
merge-descriptors "1.0.3"
|
||||
methods "~1.1.2"
|
||||
on-finished "2.4.1"
|
||||
parseurl "~1.3.3"
|
||||
path-to-regexp "0.1.7"
|
||||
path-to-regexp "0.1.10"
|
||||
proxy-addr "~2.0.7"
|
||||
qs "6.11.0"
|
||||
qs "6.13.0"
|
||||
range-parser "~1.2.1"
|
||||
safe-buffer "5.2.1"
|
||||
send "0.18.0"
|
||||
serve-static "1.15.0"
|
||||
send "0.19.0"
|
||||
serve-static "1.16.2"
|
||||
setprototypeof "1.2.0"
|
||||
statuses "2.0.1"
|
||||
type-is "~1.6.18"
|
||||
@@ -8636,13 +8653,13 @@ fill-range@^7.1.1:
|
||||
dependencies:
|
||||
to-regex-range "^5.0.1"
|
||||
|
||||
finalhandler@1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz"
|
||||
integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==
|
||||
finalhandler@1.3.1:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019"
|
||||
integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==
|
||||
dependencies:
|
||||
debug "2.6.9"
|
||||
encodeurl "~1.0.2"
|
||||
encodeurl "~2.0.0"
|
||||
escape-html "~1.0.3"
|
||||
on-finished "2.4.1"
|
||||
parseurl "~1.3.3"
|
||||
@@ -9393,6 +9410,13 @@ history@4.10.1, history@^4.9.0:
|
||||
tiny-warning "^1.0.0"
|
||||
value-equal "^1.0.1"
|
||||
|
||||
history@^5.3.0:
|
||||
version "5.3.0"
|
||||
resolved "https://registry.yarnpkg.com/history/-/history-5.3.0.tgz#1548abaa245ba47992f063a0783db91ef201c73b"
|
||||
integrity sha512-ZqaKwjjrAYUYfLG+htGaIIZ4nioX2L70ZUMIFysS3xvBsSG4x/n1V6TXV3N8ZYNuFGlDirFg32T7B6WOUPDYcQ==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.7.6"
|
||||
|
||||
hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.1.0, hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.2:
|
||||
version "3.3.2"
|
||||
resolved "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz"
|
||||
@@ -11636,10 +11660,10 @@ meow@^8.0.0:
|
||||
type-fest "^0.18.0"
|
||||
yargs-parser "^20.2.3"
|
||||
|
||||
merge-descriptors@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz"
|
||||
integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==
|
||||
merge-descriptors@1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5"
|
||||
integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==
|
||||
|
||||
merge-stream@^2.0.0:
|
||||
version "2.0.0"
|
||||
@@ -12971,10 +12995,10 @@ path-scurry@^1.6.1:
|
||||
lru-cache "^9.1.1 || ^10.0.0"
|
||||
minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
|
||||
|
||||
path-to-regexp@0.1.7:
|
||||
version "0.1.7"
|
||||
resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz"
|
||||
integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==
|
||||
path-to-regexp@0.1.10:
|
||||
version "0.1.10"
|
||||
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b"
|
||||
integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==
|
||||
|
||||
path-to-regexp@^1.7.0:
|
||||
version "1.8.0"
|
||||
@@ -13548,13 +13572,6 @@ qrcode.react@^3.1.0:
|
||||
resolved "https://registry.yarnpkg.com/qrcode.react/-/qrcode.react-3.1.0.tgz#5c91ddc0340f768316fbdb8fff2765134c2aecd8"
|
||||
integrity sha512-oyF+Urr3oAMUG/OiOuONL3HXM+53wvuH3mtIWQrYmsXoAq0DkvZp2RYUWFSMFtbdOpuS++9v+WAkzNVkMlNW6Q==
|
||||
|
||||
qs@6.11.0:
|
||||
version "6.11.0"
|
||||
resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz"
|
||||
integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
|
||||
dependencies:
|
||||
side-channel "^1.0.4"
|
||||
|
||||
qs@6.13.0:
|
||||
version "6.13.0"
|
||||
resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906"
|
||||
@@ -14269,6 +14286,15 @@ react-resizable@^3.0.4:
|
||||
prop-types "15.x"
|
||||
react-draggable "^4.0.3"
|
||||
|
||||
react-router-dom-v5-compat@6.27.0:
|
||||
version "6.27.0"
|
||||
resolved "https://registry.yarnpkg.com/react-router-dom-v5-compat/-/react-router-dom-v5-compat-6.27.0.tgz#19429aefa130ee151e6f4487d073d919ec22d458"
|
||||
integrity sha512-hq5yCVoW73mljLhRjyzpYmooLNPR/xb17S3CTz9fe2/P7q821ivMEa9c8OtrAk2Bs83PcNAtst5okT/aUhJtgg==
|
||||
dependencies:
|
||||
"@remix-run/router" "1.20.0"
|
||||
history "^5.3.0"
|
||||
react-router "6.27.0"
|
||||
|
||||
react-router-dom@^5.2.0:
|
||||
version "5.3.4"
|
||||
resolved "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.4.tgz"
|
||||
@@ -14297,6 +14323,13 @@ react-router@5.3.4:
|
||||
tiny-invariant "^1.0.2"
|
||||
tiny-warning "^1.0.0"
|
||||
|
||||
react-router@6.27.0:
|
||||
version "6.27.0"
|
||||
resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.27.0.tgz#db292474926c814c996c0ff3ef0162d1f9f60ed4"
|
||||
integrity sha512-YA+HGZXz4jaAkVoYBE98VQl+nVzI+cVI2Oj/06F5ZM+0u3TgedN9Y9kmMRo2mnkSK2nCpNQn0DVob4HCsY/WLw==
|
||||
dependencies:
|
||||
"@remix-run/router" "1.20.0"
|
||||
|
||||
react-syntax-highlighter@15.5.0:
|
||||
version "15.5.0"
|
||||
resolved "https://registry.yarnpkg.com/react-syntax-highlighter/-/react-syntax-highlighter-15.5.0.tgz#4b3eccc2325fa2ec8eff1e2d6c18fa4a9e07ab20"
|
||||
@@ -15039,10 +15072,10 @@ selfsigned@^2.1.1:
|
||||
dependencies:
|
||||
lru-cache "^6.0.0"
|
||||
|
||||
send@0.18.0:
|
||||
version "0.18.0"
|
||||
resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz"
|
||||
integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==
|
||||
send@0.19.0:
|
||||
version "0.19.0"
|
||||
resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8"
|
||||
integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==
|
||||
dependencies:
|
||||
debug "2.6.9"
|
||||
depd "2.0.0"
|
||||
@@ -15094,15 +15127,15 @@ serve-index@^1.9.1:
|
||||
mime-types "~2.1.17"
|
||||
parseurl "~1.3.2"
|
||||
|
||||
serve-static@1.15.0:
|
||||
version "1.15.0"
|
||||
resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz"
|
||||
integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==
|
||||
serve-static@1.16.2:
|
||||
version "1.16.2"
|
||||
resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296"
|
||||
integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==
|
||||
dependencies:
|
||||
encodeurl "~1.0.2"
|
||||
encodeurl "~2.0.0"
|
||||
escape-html "~1.0.3"
|
||||
parseurl "~1.3.3"
|
||||
send "0.18.0"
|
||||
send "0.19.0"
|
||||
|
||||
set-cookie-parser@^2.4.6:
|
||||
version "2.6.0"
|
||||
|
||||
@@ -69,7 +69,7 @@ func EnrichmentRequired(params *v3.QueryRangeParamsV3) bool {
|
||||
// but the query should work regardless and shouldn't fail
|
||||
func isEnriched(field v3.AttributeKey) bool {
|
||||
// if it is timestamp/id dont check
|
||||
if field.Key == "timestamp" || field.Key == "id" || field.Key == constants.SigNozOrderByValue {
|
||||
if field.Key == "timestamp" || field.Key == "id" || field.Key == constants.SigNozOrderByValue || field.Type == v3.AttributeKeyTypeInstrumentationScope {
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -57,6 +57,9 @@ func GetClickhouseLogsColumnType(columnType v3.AttributeKeyType) string {
|
||||
if columnType == v3.AttributeKeyTypeTag {
|
||||
return "attributes"
|
||||
}
|
||||
if columnType == v3.AttributeKeyTypeInstrumentationScope {
|
||||
return "scope"
|
||||
}
|
||||
return "resources"
|
||||
}
|
||||
|
||||
@@ -416,9 +419,6 @@ func orderBy(panelType v3.PanelType, items []v3.OrderBy, tagLookup map[string]st
|
||||
} else if panelType == v3.PanelTypeList {
|
||||
attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn}
|
||||
name := getClickhouseColumnName(attr)
|
||||
if item.IsColumn {
|
||||
name = "`" + name + "`"
|
||||
}
|
||||
orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,6 +53,11 @@ var testGetClickhouseColumnNameData = []struct {
|
||||
AttributeKey: v3.AttributeKey{Key: "test-attr", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
|
||||
ExpectedColumnName: "`attribute_string_test-attr`",
|
||||
},
|
||||
{
|
||||
Name: "instrumentation scope attribute",
|
||||
AttributeKey: v3.AttributeKey{Key: "version", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeInstrumentationScope, IsColumn: false},
|
||||
ExpectedColumnName: "scope_string_value[indexOf(scope_string_key, 'version')]",
|
||||
},
|
||||
}
|
||||
|
||||
func TestGetClickhouseColumnName(t *testing.T) {
|
||||
@@ -130,6 +135,13 @@ var timeSeriesFilterQueryData = []struct {
|
||||
}},
|
||||
ExpectedFilter: "attributes_string_value[indexOf(attributes_string_key, 'user_name')] = 'john' AND resources_string_value[indexOf(resources_string_key, 'k8s_namespace')] != 'my_service'",
|
||||
},
|
||||
{
|
||||
Name: "Test instrumentation scope attribute",
|
||||
FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "version", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeInstrumentationScope}, Value: "v1", Operator: "="},
|
||||
}},
|
||||
ExpectedFilter: "scope_string_value[indexOf(scope_string_key, 'version')] = 'v1'",
|
||||
},
|
||||
{
|
||||
Name: "Test attribute and resource attribute with different case",
|
||||
FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
@@ -758,10 +770,11 @@ var testBuildLogsQueryData = []struct {
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}},
|
||||
},
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
"CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by timestamp DESC",
|
||||
},
|
||||
{
|
||||
@@ -775,13 +788,14 @@ var testBuildLogsQueryData = []struct {
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}},
|
||||
OrderBy: []v3.OrderBy{{ColumnName: "method", DataType: v3.AttributeKeyDataTypeString, Order: "ASC", IsColumn: true}},
|
||||
OrderBy: []v3.OrderBy{{ColumnName: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, Order: "ASC", IsColumn: true}},
|
||||
},
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
"CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by `method` ASC",
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by `attribute_string_method` ASC",
|
||||
},
|
||||
{
|
||||
Name: "Test Noop with filter",
|
||||
@@ -797,10 +811,11 @@ var testBuildLogsQueryData = []struct {
|
||||
{Key: v3.AttributeKey{Key: "severity_number", DataType: v3.AttributeKeyDataTypeInt64, IsColumn: true}, Operator: "!=", Value: 0},
|
||||
}},
|
||||
},
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
"CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND severity_number != 0 order by timestamp DESC",
|
||||
},
|
||||
{
|
||||
@@ -1333,7 +1348,7 @@ var testPrepLogsQueryData = []struct {
|
||||
},
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND ",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND ",
|
||||
Options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
{
|
||||
@@ -1352,7 +1367,7 @@ var testPrepLogsQueryData = []struct {
|
||||
},
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] ILIKE '%GET%' AND ",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] ILIKE '%GET%' AND ",
|
||||
Options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
{
|
||||
@@ -1368,7 +1383,7 @@ var testPrepLogsQueryData = []struct {
|
||||
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}},
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where ",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where ",
|
||||
Options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
{
|
||||
@@ -1428,9 +1443,10 @@ var testPrepLogsQueryData = []struct {
|
||||
PageSize: 100,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
"attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < 'logid' order by " +
|
||||
"timestamp DESC LIMIT 100",
|
||||
},
|
||||
@@ -1458,9 +1474,10 @@ var testPrepLogsQueryData = []struct {
|
||||
PageSize: 100,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
"attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' order by " +
|
||||
"resources_string_value[indexOf(resources_string_key, 'mycolumn')] DESC LIMIT 100 OFFSET 100",
|
||||
},
|
||||
@@ -1507,7 +1524,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 5,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by `timestamp` desc LIMIT 1",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by timestamp desc LIMIT 1",
|
||||
},
|
||||
{
|
||||
Name: "Test limit greater than pageSize - order by ts",
|
||||
@@ -1528,7 +1545,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 10,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by `timestamp` desc LIMIT 10",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by timestamp desc LIMIT 10",
|
||||
},
|
||||
{
|
||||
Name: "Test limit less than pageSize - order by custom",
|
||||
@@ -1547,7 +1564,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 5,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0",
|
||||
},
|
||||
{
|
||||
Name: "Test limit greater than pageSize - order by custom",
|
||||
@@ -1568,7 +1585,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 50,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -255,9 +255,6 @@ func orderBy(panelType v3.PanelType, items []v3.OrderBy, tagLookup map[string]st
|
||||
} else if panelType == v3.PanelTypeList {
|
||||
attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn}
|
||||
name := getClickhouseKey(attr)
|
||||
if item.IsColumn {
|
||||
name = "`" + name + "`"
|
||||
}
|
||||
orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -520,14 +520,16 @@ func Test_orderByAttributeKeyTags(t *testing.T) {
|
||||
{
|
||||
ColumnName: "bytes",
|
||||
Order: "asc",
|
||||
IsColumn: true,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
},
|
||||
},
|
||||
tags: []v3.AttributeKey{
|
||||
{Key: "name"},
|
||||
{Key: "bytes"},
|
||||
},
|
||||
},
|
||||
want: "`name` asc,value asc,`bytes` asc",
|
||||
want: "`name` asc,value asc,`attribute_string_bytes` asc",
|
||||
},
|
||||
{
|
||||
name: "test 4",
|
||||
@@ -722,7 +724,7 @@ func Test_buildLogsQuery(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string " +
|
||||
"from signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"AND attributes_string['service.name'] = 'test' AND mapContains(attributes_string, 'service.name') order by timestamp desc",
|
||||
},
|
||||
@@ -931,7 +933,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
},
|
||||
options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string " +
|
||||
"from signoz_logs.distributed_logs_v2 where attributes_string['method'] = 'GET' AND mapContains(attributes_string, 'method') AND ",
|
||||
},
|
||||
{
|
||||
@@ -954,7 +956,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
},
|
||||
options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where attributes_string['method'] = 'GET' AND mapContains(attributes_string, 'method') AND " +
|
||||
"(resource_fingerprint GLOBAL IN (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE simpleJSONExtractString(lower(labels), 'service.name') LIKE '%app%' AND lower(labels) like '%service.name%app%' AND ",
|
||||
},
|
||||
@@ -974,7 +976,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
},
|
||||
options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string " +
|
||||
"from signoz_logs.distributed_logs_v2 where ",
|
||||
},
|
||||
{
|
||||
@@ -1014,9 +1016,9 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 5,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"order by `timestamp` desc LIMIT 1",
|
||||
"order by timestamp desc LIMIT 1",
|
||||
},
|
||||
{
|
||||
name: "Test limit greater than pageSize - order by ts",
|
||||
@@ -1039,9 +1041,9 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 10,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by `timestamp` desc LIMIT 10",
|
||||
"AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by timestamp desc LIMIT 10",
|
||||
},
|
||||
{
|
||||
name: "Test limit less than pageSize - order by custom",
|
||||
@@ -1062,7 +1064,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 5,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"order by attributes_string['method'] desc LIMIT 1 OFFSET 0",
|
||||
},
|
||||
@@ -1087,7 +1089,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 50,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND " +
|
||||
"id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string['method'] desc LIMIT 50 OFFSET 50",
|
||||
},
|
||||
|
||||
@@ -38,8 +38,8 @@ import (
|
||||
//
|
||||
// The `increase` function is similar to the `rate` function, except that it does not divide by the time interval.
|
||||
const (
|
||||
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))`
|
||||
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window)))`
|
||||
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window))`
|
||||
)
|
||||
|
||||
// prepareTimeAggregationSubQueryTimeSeries prepares the sub-query to be used for temporal aggregation
|
||||
@@ -151,6 +151,7 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
|
||||
subQuery = fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||
case v3.TimeAggregationRate:
|
||||
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||
rateWithoutNegative := fmt.Sprintf(rateWithoutNegative, start)
|
||||
rateQueryTmpl :=
|
||||
"SELECT %s ts, " + rateWithoutNegative +
|
||||
" as per_series_value FROM (%s) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)"
|
||||
|
||||
@@ -326,18 +326,20 @@ var StaticSelectedLogFields = []model.LogField{
|
||||
|
||||
const (
|
||||
LogsSQLSelect = "SELECT " +
|
||||
"timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body," +
|
||||
"timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body," +
|
||||
"CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
"CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64," +
|
||||
"CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string "
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope "
|
||||
LogsSQLSelectV2 = "SELECT " +
|
||||
"timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, " +
|
||||
"timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, " +
|
||||
"attributes_string, " +
|
||||
"attributes_number, " +
|
||||
"attributes_bool, " +
|
||||
"resources_string "
|
||||
"resources_string, " +
|
||||
"scope_string "
|
||||
TracesExplorerViewSQLSelectWithSubQuery = "(SELECT traceID, durationNano, " +
|
||||
"serviceName, name FROM %s.%s WHERE parentSpanID = '' AND %s %s ORDER BY durationNano DESC LIMIT 1 BY traceID "
|
||||
TracesExplorerViewSQLSelectBeforeSubQuery = "SELECT subQuery.serviceName, subQuery.name, count() AS " +
|
||||
@@ -414,6 +416,18 @@ var StaticFieldsLogsV3 = map[string]v3.AttributeKey{
|
||||
Type: v3.AttributeKeyTypeUnspecified,
|
||||
IsColumn: true,
|
||||
},
|
||||
"scope_name": {
|
||||
Key: "scope_name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeUnspecified,
|
||||
IsColumn: true,
|
||||
},
|
||||
"scope_version": {
|
||||
Key: "scope_version",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeUnspecified,
|
||||
IsColumn: true,
|
||||
},
|
||||
}
|
||||
|
||||
const SigNozOrderByValue = "#SIGNOZ_VALUE"
|
||||
|
||||
@@ -617,6 +617,7 @@ type AlertsInfo struct {
|
||||
TotalAlerts int `json:"totalAlerts"`
|
||||
LogsBasedAlerts int `json:"logsBasedAlerts"`
|
||||
MetricBasedAlerts int `json:"metricBasedAlerts"`
|
||||
AnomalyBasedAlerts int `json:"anomalyBasedAlerts"`
|
||||
TracesBasedAlerts int `json:"tracesBasedAlerts"`
|
||||
TotalChannels int `json:"totalChannels"`
|
||||
SlackChannels int `json:"slackChannels"`
|
||||
|
||||
@@ -229,13 +229,14 @@ type AggregateAttributeRequest struct {
|
||||
type TagType string
|
||||
|
||||
const (
|
||||
TagTypeTag TagType = "tag"
|
||||
TagTypeResource TagType = "resource"
|
||||
TagTypeTag TagType = "tag"
|
||||
TagTypeResource TagType = "resource"
|
||||
TagTypeInstrumentationScope TagType = "scope"
|
||||
)
|
||||
|
||||
func (q TagType) Validate() error {
|
||||
switch q {
|
||||
case TagTypeTag, TagTypeResource:
|
||||
case TagTypeTag, TagTypeResource, TagTypeInstrumentationScope:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("invalid tag type: %s", q)
|
||||
@@ -317,9 +318,10 @@ type FilterAttributeKeyResponse struct {
|
||||
type AttributeKeyType string
|
||||
|
||||
const (
|
||||
AttributeKeyTypeUnspecified AttributeKeyType = ""
|
||||
AttributeKeyTypeTag AttributeKeyType = "tag"
|
||||
AttributeKeyTypeResource AttributeKeyType = "resource"
|
||||
AttributeKeyTypeUnspecified AttributeKeyType = ""
|
||||
AttributeKeyTypeTag AttributeKeyType = "tag"
|
||||
AttributeKeyTypeResource AttributeKeyType = "resource"
|
||||
AttributeKeyTypeInstrumentationScope AttributeKeyType = "scope"
|
||||
)
|
||||
|
||||
func (t AttributeKeyType) String() string {
|
||||
@@ -348,7 +350,7 @@ func (a AttributeKey) Validate() error {
|
||||
|
||||
if a.IsColumn {
|
||||
switch a.Type {
|
||||
case AttributeKeyTypeResource, AttributeKeyTypeTag, AttributeKeyTypeUnspecified:
|
||||
case AttributeKeyTypeResource, AttributeKeyTypeTag, AttributeKeyTypeUnspecified, AttributeKeyTypeInstrumentationScope:
|
||||
break
|
||||
default:
|
||||
return fmt.Errorf("invalid attribute type: %s", a.Type)
|
||||
|
||||
@@ -42,16 +42,6 @@ var (
|
||||
// this file contains api request and responses to be
|
||||
// served over http
|
||||
|
||||
// newApiErrorInternal returns a new api error object of type internal
|
||||
func newApiErrorInternal(err error) *model.ApiError {
|
||||
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
// newApiErrorBadData returns a new api error object of bad request type
|
||||
func newApiErrorBadData(err error) *model.ApiError {
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
}
|
||||
|
||||
// PostableRule is used to create alerting rule from HTTP api
|
||||
type PostableRule struct {
|
||||
AlertName string `yaml:"alert,omitempty" json:"alert,omitempty"`
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
|
||||
func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
testCases := []*v3.CompositeQuery{
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"query1": {
|
||||
Disabled: true,
|
||||
@@ -20,10 +20,10 @@ func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
},
|
||||
nil,
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
},
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"query1": {
|
||||
@@ -34,10 +34,10 @@ func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
},
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"query3": {
|
||||
@@ -45,7 +45,7 @@ func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"query3": {
|
||||
@@ -53,10 +53,10 @@ func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
},
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
ClickHouseQueries: map[string]*v3.ClickHouseQuery{
|
||||
"query4": {
|
||||
@@ -64,7 +64,7 @@ func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
&v3.CompositeQuery{
|
||||
{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
ClickHouseQueries: map[string]*v3.ClickHouseQuery{
|
||||
"query4": {
|
||||
|
||||
@@ -599,6 +599,9 @@ func (r *ruleDB) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||
}
|
||||
}
|
||||
}
|
||||
if rule.RuleType == RuleTypeAnomaly {
|
||||
alertsInfo.AnomalyBasedAlerts = alertsInfo.AnomalyBasedAlerts + 1
|
||||
}
|
||||
} else if rule.AlertType == AlertTypeTraces {
|
||||
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
|
||||
}
|
||||
|
||||
@@ -10,8 +10,6 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"errors"
|
||||
@@ -24,7 +22,6 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine"
|
||||
"go.signoz.io/signoz/pkg/query-service/telemetry"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
)
|
||||
|
||||
type PrepareTaskOptions struct {
|
||||
@@ -41,6 +38,19 @@ type PrepareTaskOptions struct {
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
type PrepareTestRuleOptions struct {
|
||||
Rule *PostableRule
|
||||
RuleDB RuleDB
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
FF interfaces.FeatureLookup
|
||||
ManagerOpts *ManagerOptions
|
||||
NotifyFunc NotifyFunc
|
||||
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
const taskNamesuffix = "webAppEditor"
|
||||
|
||||
func RuleIdFromTaskName(n string) string {
|
||||
@@ -81,6 +91,8 @@ type ManagerOptions struct {
|
||||
|
||||
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
|
||||
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
@@ -99,10 +111,11 @@ type Manager struct {
|
||||
|
||||
logger *zap.Logger
|
||||
|
||||
featureFlags interfaces.FeatureLookup
|
||||
reader interfaces.Reader
|
||||
cache cache.Cache
|
||||
prepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
featureFlags interfaces.FeatureLookup
|
||||
reader interfaces.Reader
|
||||
cache cache.Cache
|
||||
prepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
prepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
@@ -123,6 +136,9 @@ func defaultOptions(o *ManagerOptions) *ManagerOptions {
|
||||
if o.PrepareTaskFunc == nil {
|
||||
o.PrepareTaskFunc = defaultPrepareTaskFunc
|
||||
}
|
||||
if o.PrepareTestRuleFunc == nil {
|
||||
o.PrepareTestRuleFunc = defaultTestNotification
|
||||
}
|
||||
return o
|
||||
}
|
||||
|
||||
@@ -203,17 +219,18 @@ func NewManager(o *ManagerOptions) (*Manager, error) {
|
||||
telemetry.GetInstance().SetAlertsInfoCallback(db.GetAlertsInfo)
|
||||
|
||||
m := &Manager{
|
||||
tasks: map[string]Task{},
|
||||
rules: map[string]Rule{},
|
||||
notifier: notifier,
|
||||
ruleDB: db,
|
||||
opts: o,
|
||||
block: make(chan struct{}),
|
||||
logger: o.Logger,
|
||||
featureFlags: o.FeatureFlags,
|
||||
reader: o.Reader,
|
||||
cache: o.Cache,
|
||||
prepareTaskFunc: o.PrepareTaskFunc,
|
||||
tasks: map[string]Task{},
|
||||
rules: map[string]Rule{},
|
||||
notifier: notifier,
|
||||
ruleDB: db,
|
||||
opts: o,
|
||||
block: make(chan struct{}),
|
||||
logger: o.Logger,
|
||||
featureFlags: o.FeatureFlags,
|
||||
reader: o.Reader,
|
||||
cache: o.Cache,
|
||||
prepareTaskFunc: o.PrepareTaskFunc,
|
||||
prepareTestRuleFunc: o.PrepareTestRuleFunc,
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
@@ -788,78 +805,20 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m
|
||||
parsedRule, err := ParsePostableRule([]byte(ruleStr))
|
||||
|
||||
if err != nil {
|
||||
return 0, newApiErrorBadData(err)
|
||||
return 0, model.BadRequest(err)
|
||||
}
|
||||
|
||||
var alertname = parsedRule.AlertName
|
||||
if alertname == "" {
|
||||
// alertname is not mandatory for testing, so picking
|
||||
// a random string here
|
||||
alertname = uuid.New().String()
|
||||
}
|
||||
alertCount, apiErr := m.prepareTestRuleFunc(PrepareTestRuleOptions{
|
||||
Rule: parsedRule,
|
||||
RuleDB: m.ruleDB,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
FF: m.featureFlags,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
UseLogsNewSchema: m.opts.UseLogsNewSchema,
|
||||
})
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, TestAlertPostFix)
|
||||
|
||||
var rule Rule
|
||||
|
||||
if parsedRule.RuleType == RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
parsedRule.Labels[labels.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
|
||||
|
||||
// create a threshold rule
|
||||
rule, err = NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
m.featureFlags,
|
||||
m.reader,
|
||||
m.opts.UseLogsNewSchema,
|
||||
WithSendAlways(),
|
||||
WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, newApiErrorBadData(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = NewPromRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
m.logger,
|
||||
m.reader,
|
||||
m.opts.PqlEngine,
|
||||
WithSendAlways(),
|
||||
WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, newApiErrorBadData(err)
|
||||
}
|
||||
} else {
|
||||
return 0, newApiErrorBadData(fmt.Errorf("failed to derive ruletype with given information"))
|
||||
}
|
||||
|
||||
// set timestamp to current utc time
|
||||
ts := time.Now().UTC()
|
||||
|
||||
count, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
||||
return 0, newApiErrorInternal(fmt.Errorf("rule evaluation failed"))
|
||||
}
|
||||
alertsFound, ok := count.(int)
|
||||
if !ok {
|
||||
return 0, newApiErrorInternal(fmt.Errorf("something went wrong"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), m.prepareNotifyFunc())
|
||||
|
||||
return alertsFound, nil
|
||||
return alertCount, apiErr
|
||||
}
|
||||
|
||||
@@ -233,6 +233,7 @@ func AlertTemplateData(labels map[string]string, value string, threshold string)
|
||||
// consistent across the platform.
|
||||
// If there is a go template block, it won't be replaced.
|
||||
// The example for existing go template block is: {{$threshold}} or {{$value}} or any other valid go template syntax.
|
||||
// See templates_test.go for examples.
|
||||
func (te *TemplateExpander) preprocessTemplate() {
|
||||
// Handle the $variable syntax
|
||||
reDollar := regexp.MustCompile(`({{.*?}})|(\$(\w+(?:\.\w+)*))`)
|
||||
@@ -256,6 +257,19 @@ func (te *TemplateExpander) preprocessTemplate() {
|
||||
rest := submatches[2]
|
||||
return fmt.Sprintf(`{{index .Labels "%s"%s}}`, path, rest)
|
||||
})
|
||||
|
||||
// Handle the {{$variable}} syntax
|
||||
// skip the special case for {{$threshold}} and {{$value}}
|
||||
reVariable := regexp.MustCompile(`{{\s*\$\s*([a-zA-Z0-9_.]+)\s*}}`)
|
||||
te.text = reVariable.ReplaceAllStringFunc(te.text, func(match string) string {
|
||||
if strings.HasPrefix(match, "{{$threshold}}") || strings.HasPrefix(match, "{{$value}}") {
|
||||
return match
|
||||
}
|
||||
// get the variable name from {{$variable}} syntax
|
||||
variable := strings.TrimPrefix(match, "{{$")
|
||||
variable = strings.TrimSuffix(variable, "}}")
|
||||
return fmt.Sprintf(`{{index .Labels "%s"}}`, variable)
|
||||
})
|
||||
}
|
||||
|
||||
// Funcs adds the functions in fm to the Expander's function map.
|
||||
@@ -335,6 +349,7 @@ func (te TemplateExpander) ExpandHTML(templateFiles []string) (result string, re
|
||||
|
||||
// ParseTest parses the templates and returns the error if any.
|
||||
func (te TemplateExpander) ParseTest() error {
|
||||
te.preprocessTemplate()
|
||||
_, err := text_template.New(te.name).Funcs(te.funcMap).Option("missingkey=zero").Parse(te.text)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -74,3 +74,14 @@ func TestTemplateExpander_WithLablesDotSyntax(t *testing.T) {
|
||||
}
|
||||
require.Equal(t, "test my-service exceeds 100 and observed at 200", result)
|
||||
}
|
||||
|
||||
func TestTemplateExpander_WithVariableSyntax(t *testing.T) {
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
data := AlertTemplateData(map[string]string{"service.name": "my-service"}, "200", "100")
|
||||
expander := NewTemplateExpander(context.Background(), defs+"test {{$service.name}} exceeds {{$threshold}} and observed at {{$value}}", "test", data, times.Time(time.Now().Unix()), nil)
|
||||
result, err := expander.Expand()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
require.Equal(t, "test my-service exceeds 100 and observed at 200", result)
|
||||
}
|
||||
|
||||
97
pkg/query-service/rules/test_notification.go
Normal file
97
pkg/query-service/rules/test_notification.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// TestNotification prepares a dummy rule for given rule parameters and
|
||||
// sends a test notification. returns alert count and error (if any)
|
||||
func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError) {
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if opts.Rule == nil {
|
||||
return 0, model.BadRequest(fmt.Errorf("rule is required"))
|
||||
}
|
||||
|
||||
parsedRule := opts.Rule
|
||||
var alertname = parsedRule.AlertName
|
||||
if alertname == "" {
|
||||
// alertname is not mandatory for testing, so picking
|
||||
// a random string here
|
||||
alertname = uuid.New().String()
|
||||
}
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, TestAlertPostFix)
|
||||
|
||||
var rule Rule
|
||||
var err error
|
||||
|
||||
if parsedRule.RuleType == RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
parsedRule.Labels[labels.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
|
||||
|
||||
// create a threshold rule
|
||||
rule, err = NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
WithSendAlways(),
|
||||
WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, model.BadRequest(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = NewPromRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
WithSendAlways(),
|
||||
WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, model.BadRequest(err)
|
||||
}
|
||||
} else {
|
||||
return 0, model.BadRequest(fmt.Errorf("failed to derive ruletype with given information"))
|
||||
}
|
||||
|
||||
// set timestamp to current utc time
|
||||
ts := time.Now().UTC()
|
||||
|
||||
count, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
||||
return 0, model.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||
}
|
||||
alertsFound, ok := count.(int)
|
||||
if !ok {
|
||||
return 0, model.InternalError(fmt.Errorf("something went wrong"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), opts.NotifyFunc)
|
||||
|
||||
return alertsFound, nil
|
||||
}
|
||||
Reference in New Issue
Block a user