Compare commits
23 Commits
v0.56.0-tr
...
fixProduce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf74ac7b5e | ||
|
|
55a4056aa5 | ||
|
|
e974e9d47f | ||
|
|
577a169508 | ||
|
|
939e2a3570 | ||
|
|
b64326070c | ||
|
|
63872983c6 | ||
|
|
831540eaf0 | ||
|
|
22c10f9479 | ||
|
|
e748fb0655 | ||
|
|
fdc54a62a9 | ||
|
|
abe0ab69b0 | ||
|
|
e623c92615 | ||
|
|
dc5917db01 | ||
|
|
d6a7f0b6f4 | ||
|
|
471803115e | ||
|
|
8403a3362d | ||
|
|
64d46bc855 | ||
|
|
c9fee27604 | ||
|
|
f1b6b2d3d8 | ||
|
|
468f056530 | ||
|
|
7086470ce2 | ||
|
|
352296c6cd |
@@ -38,9 +38,9 @@ type APIHandlerOptions struct {
|
||||
Cache cache.Cache
|
||||
Gateway *httputil.ReverseProxy
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
FluxInterval time.Duration
|
||||
UseLogsNewSchema bool
|
||||
UseLicensesV3 bool
|
||||
}
|
||||
|
||||
type APIHandler struct {
|
||||
@@ -66,7 +66,7 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
|
||||
Cache: opts.Cache,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
UseLicensesV3: opts.UseLicensesV3,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -175,10 +175,25 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
|
||||
router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut)
|
||||
router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut)
|
||||
|
||||
// v2
|
||||
router.HandleFunc("/api/v2/licenses",
|
||||
am.ViewAccess(ah.listLicensesV2)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
// v3
|
||||
router.HandleFunc("/api/v3/licenses",
|
||||
am.ViewAccess(ah.listLicensesV3)).
|
||||
Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v3/licenses",
|
||||
am.AdminAccess(ah.applyLicenseV3)).
|
||||
Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v3/licenses",
|
||||
am.AdminAccess(ah.refreshLicensesV3)).
|
||||
Methods(http.MethodPut)
|
||||
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/constants"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
"go.signoz.io/signoz/pkg/http/render"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -59,6 +60,21 @@ type billingDetails struct {
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type ApplyLicenseRequest struct {
|
||||
LicenseKey string `json:"key"`
|
||||
}
|
||||
|
||||
type ListLicenseResponse map[string]interface{}
|
||||
|
||||
func convertLicenseV3ToListLicenseResponse(licensesV3 []*model.LicenseV3) []ListLicenseResponse {
|
||||
listLicenses := []ListLicenseResponse{}
|
||||
|
||||
for _, license := range licensesV3 {
|
||||
listLicenses = append(listLicenses, license.Data)
|
||||
}
|
||||
return listLicenses
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicenses(w http.ResponseWriter, r *http.Request) {
|
||||
licenses, apiError := ah.LM().GetLicenses(context.Background())
|
||||
if apiError != nil {
|
||||
@@ -88,6 +104,51 @@ func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, license)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
licenses, apiError := ah.LM().GetLicensesV3(r.Context())
|
||||
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, convertLicenseV3ToListLicenseResponse(licenses))
|
||||
}
|
||||
|
||||
// this function is called by zeus when inserting licenses in the query-service
|
||||
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
|
||||
var licenseKey ApplyLicenseRequest
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
if licenseKey.LicenseKey == "" {
|
||||
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
_, apiError := ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusAccepted, nil)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
apiError := ah.LM().RefreshLicense(r.Context())
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
type checkoutResponse struct {
|
||||
@@ -154,11 +215,45 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
|
||||
ah.Respond(w, billingResponse.Data)
|
||||
}
|
||||
|
||||
func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License {
|
||||
licensesV2 := []model.License{}
|
||||
for _, l := range licenses {
|
||||
licenseV2 := model.License{
|
||||
Key: l.Key,
|
||||
ActivationId: "",
|
||||
PlanDetails: "",
|
||||
FeatureSet: l.Features,
|
||||
ValidationMessage: "",
|
||||
IsCurrent: l.IsCurrent,
|
||||
LicensePlan: model.LicensePlan{
|
||||
PlanKey: l.PlanName,
|
||||
ValidFrom: l.ValidFrom,
|
||||
ValidUntil: l.ValidUntil,
|
||||
Status: l.Status},
|
||||
}
|
||||
licensesV2 = append(licensesV2, licenseV2)
|
||||
}
|
||||
return licensesV2
|
||||
}
|
||||
|
||||
func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
licenses, apiError := ah.LM().GetLicenses(context.Background())
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
var licenses []model.License
|
||||
|
||||
if ah.UseLicensesV3 {
|
||||
licensesV3, err := ah.LM().GetLicensesV3(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, err, nil)
|
||||
return
|
||||
}
|
||||
licenses = convertLicenseV3ToLicenseV2(licensesV3)
|
||||
} else {
|
||||
_licenses, apiError := ah.LM().GetLicenses(r.Context())
|
||||
if apiError != nil {
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
licenses = _licenses
|
||||
}
|
||||
|
||||
resp := model.Licenses{
|
||||
|
||||
@@ -2,31 +2,32 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"go.signoz.io/signoz/ee/query-service/app/db"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
baseapp "go.signoz.io/signoz/pkg/query-service/app"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
ah.APIHandler.SearchTraces(w, r)
|
||||
return
|
||||
}
|
||||
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
return
|
||||
}
|
||||
|
||||
// This is commented since this will be taken care by new trace API
|
||||
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
||||
if ah.HandleError(w, err, http.StatusBadRequest) {
|
||||
return
|
||||
}
|
||||
|
||||
// if !ah.CheckFeature(basemodel.SmartTraceDetail) {
|
||||
// zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
|
||||
// ah.APIHandler.SearchTraces(w, r)
|
||||
// return
|
||||
// }
|
||||
// searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
|
||||
// if err != nil {
|
||||
// RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
|
||||
// return
|
||||
// }
|
||||
|
||||
// result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
|
||||
// if ah.HandleError(w, err, http.StatusBadRequest) {
|
||||
// return
|
||||
// }
|
||||
|
||||
// ah.WriteJSON(w, r, result)
|
||||
ah.WriteJSON(w, r, result)
|
||||
|
||||
}
|
||||
|
||||
@@ -26,9 +26,8 @@ func NewDataConnector(
|
||||
dialTimeout time.Duration,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
) *ClickhouseReader {
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster, useLogsNewSchema, useTraceNewSchema)
|
||||
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster, useLogsNewSchema)
|
||||
return &ClickhouseReader{
|
||||
conn: ch.GetConn(),
|
||||
appdb: localDB,
|
||||
|
||||
@@ -31,7 +31,6 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/rules"
|
||||
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/migrate"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
|
||||
licensepkg "go.signoz.io/signoz/ee/query-service/license"
|
||||
@@ -78,7 +77,7 @@ type ServerOptions struct {
|
||||
Cluster string
|
||||
GatewayUrl string
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
UseLicensesV3 bool
|
||||
}
|
||||
|
||||
// Server runs HTTP api service
|
||||
@@ -135,7 +134,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// initiate license manager
|
||||
lm, err := licensepkg.StartManager("sqlite", localDB)
|
||||
lm, err := licensepkg.StartManager("sqlite", localDB, serverOptions.UseLicensesV3)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -157,7 +156,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.DialTimeout,
|
||||
serverOptions.Cluster,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
)
|
||||
go qb.Start(readerReady)
|
||||
reader = qb
|
||||
@@ -191,7 +189,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
serverOptions.DisableRules,
|
||||
lm,
|
||||
serverOptions.UseLogsNewSchema,
|
||||
serverOptions.UseTraceNewSchema,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -273,7 +270,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
FluxInterval: fluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
|
||||
UseTraceNewSchema: serverOptions.UseTraceNewSchema,
|
||||
UseLicensesV3: serverOptions.UseLicensesV3,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts)
|
||||
@@ -352,7 +349,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
|
||||
}
|
||||
|
||||
if user.User.OrgId == "" {
|
||||
return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
return nil, basemodel.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
}
|
||||
|
||||
return user, nil
|
||||
@@ -740,8 +737,7 @@ func makeRulesManager(
|
||||
cache cache.Cache,
|
||||
disableRules bool,
|
||||
fm baseint.FeatureLookup,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool) (*baserules.Manager, error) {
|
||||
useLogsNewSchema bool) (*baserules.Manager, error) {
|
||||
|
||||
// create engine
|
||||
pqle, err := pqle.FromConfigPath(promConfigPath)
|
||||
@@ -770,9 +766,9 @@ func makeRulesManager(
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -13,6 +13,7 @@ var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
|
||||
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
var ZeusURL = GetOrDefaultEnv("ZEUS_URL", "ZeusURL")
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
|
||||
@@ -13,3 +13,8 @@ type ActivationResponse struct {
|
||||
ActivationId string `json:"ActivationId"`
|
||||
PlanDetails string `json:"PlanDetails"`
|
||||
}
|
||||
|
||||
type ValidateLicenseResponse struct {
|
||||
Status status `json:"status"`
|
||||
Data map[string]interface{} `json:"data"`
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
@@ -23,12 +24,14 @@ const (
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
Prefix string
|
||||
Prefix string
|
||||
GatewayUrl string
|
||||
}
|
||||
|
||||
func New() *Client {
|
||||
return &Client{
|
||||
Prefix: constants.LicenseSignozIo,
|
||||
Prefix: constants.LicenseSignozIo,
|
||||
GatewayUrl: constants.ZeusURL,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,6 +119,60 @@ func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError)
|
||||
|
||||
}
|
||||
|
||||
func ValidateLicenseV3(licenseKey string) (*model.LicenseV3, *model.ApiError) {
|
||||
|
||||
// Creating an HTTP client with a timeout for better control
|
||||
client := &http.Client{
|
||||
Timeout: 10 * time.Second,
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("GET", C.GatewayUrl+"/v2/licenses/me", nil)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to create request: %w", err)))
|
||||
}
|
||||
|
||||
// Setting the custom header
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
|
||||
response, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to make post request: %w", err)))
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read validation response from %v", C.GatewayUrl)))
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
switch response.StatusCode {
|
||||
case 200:
|
||||
a := ValidateLicenseResponse{}
|
||||
err = json.Unmarshal(body, &a)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3(a.Data)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err, "failed to generate new license v3"))
|
||||
}
|
||||
|
||||
return license, nil
|
||||
case 400:
|
||||
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
|
||||
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
|
||||
case 401:
|
||||
return nil, model.Unauthorized(errors.Wrap(fmt.Errorf(string(body)),
|
||||
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
|
||||
default:
|
||||
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
|
||||
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func NewPostRequestWithCtx(ctx context.Context, url string, contentType string, body io.Reader) (*http.Request, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, POST, url, body)
|
||||
if err != nil {
|
||||
|
||||
@@ -3,6 +3,7 @@ package license
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
@@ -48,6 +49,34 @@ func (r *Repo) GetLicenses(ctx context.Context) ([]model.License, error) {
|
||||
return licenses, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) {
|
||||
licensesData := []model.LicenseDB{}
|
||||
licenseV3Data := []*model.LicenseV3{}
|
||||
|
||||
query := "SELECT id,key,data FROM licenses_v3"
|
||||
|
||||
err := r.db.Select(&licensesData, query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses from db: %v", err)
|
||||
}
|
||||
|
||||
for _, l := range licensesData {
|
||||
var licenseData map[string]interface{}
|
||||
err := json.Unmarshal([]byte(l.Data), &licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
|
||||
}
|
||||
licenseV3Data = append(licenseV3Data, license)
|
||||
}
|
||||
|
||||
return licenseV3Data, nil
|
||||
}
|
||||
|
||||
// GetActiveLicense fetches the latest active license from DB.
|
||||
// If the license is not present, expect a nil license and a nil error in the output.
|
||||
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
|
||||
@@ -79,6 +108,45 @@ func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel
|
||||
return active, nil
|
||||
}
|
||||
|
||||
func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) {
|
||||
var err error
|
||||
licenses := []model.LicenseDB{}
|
||||
|
||||
query := "SELECT id,key,data FROM licenses_v3"
|
||||
|
||||
err = r.db.Select(&licenses, query)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
|
||||
}
|
||||
|
||||
var active *model.LicenseV3
|
||||
for _, l := range licenses {
|
||||
var licenseData map[string]interface{}
|
||||
err := json.Unmarshal([]byte(l.Data), &licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err)
|
||||
}
|
||||
|
||||
license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err)
|
||||
}
|
||||
|
||||
if active == nil &&
|
||||
(license.ValidFrom != 0) &&
|
||||
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
|
||||
active = license
|
||||
}
|
||||
if active != nil &&
|
||||
license.ValidFrom > active.ValidFrom &&
|
||||
(license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) {
|
||||
active = license
|
||||
}
|
||||
}
|
||||
|
||||
return active, nil
|
||||
}
|
||||
|
||||
// InsertLicense inserts a new license in db
|
||||
func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error {
|
||||
|
||||
@@ -204,3 +272,53 @@ func (r *Repo) InitFeatures(req basemodel.FeatureSet) error {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// InsertLicenseV3 inserts a new license v3 in db
|
||||
func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) error {
|
||||
|
||||
query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)`
|
||||
|
||||
// licsense is the entity of zeus so putting the entire license here without defining schema
|
||||
licenseData, err := json.Marshal(l.Data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert license failed: license marshal error")
|
||||
}
|
||||
|
||||
_, err = r.db.ExecContext(ctx,
|
||||
query,
|
||||
l.ID,
|
||||
l.Key,
|
||||
string(licenseData),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("error in inserting license data: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to insert license in db: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateLicenseV3 updates a new license v3 in db
|
||||
func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error {
|
||||
|
||||
// the key and id for the license can't change so only update the data here!
|
||||
query := `UPDATE licenses_v3 SET data=$1 WHERE id=$2;`
|
||||
|
||||
license, err := json.Marshal(l.Data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert license failed: license marshal error")
|
||||
}
|
||||
_, err = r.db.ExecContext(ctx,
|
||||
query,
|
||||
license,
|
||||
l.ID,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("error in updating license data: ", zap.Error(err))
|
||||
return fmt.Errorf("failed to update license in db: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"sync"
|
||||
|
||||
@@ -45,11 +46,12 @@ type Manager struct {
|
||||
failedAttempts uint64
|
||||
|
||||
// keep track of active license and features
|
||||
activeLicense *model.License
|
||||
activeFeatures basemodel.FeatureSet
|
||||
activeLicense *model.License
|
||||
activeLicenseV3 *model.LicenseV3
|
||||
activeFeatures basemodel.FeatureSet
|
||||
}
|
||||
|
||||
func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*Manager, error) {
|
||||
func StartManager(dbType string, db *sqlx.DB, useLicensesV3 bool, features ...basemodel.Feature) (*Manager, error) {
|
||||
if LM != nil {
|
||||
return LM, nil
|
||||
}
|
||||
@@ -65,7 +67,7 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
|
||||
repo: &repo,
|
||||
}
|
||||
|
||||
if err := m.start(features...); err != nil {
|
||||
if err := m.start(useLicensesV3, features...); err != nil {
|
||||
return m, err
|
||||
}
|
||||
LM = m
|
||||
@@ -73,8 +75,14 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
|
||||
}
|
||||
|
||||
// start loads active license in memory and initiates validator
|
||||
func (lm *Manager) start(features ...basemodel.Feature) error {
|
||||
err := lm.LoadActiveLicense(features...)
|
||||
func (lm *Manager) start(useLicensesV3 bool, features ...basemodel.Feature) error {
|
||||
|
||||
var err error
|
||||
if useLicensesV3 {
|
||||
err = lm.LoadActiveLicenseV3(features...)
|
||||
} else {
|
||||
err = lm.LoadActiveLicense(features...)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
@@ -108,6 +116,31 @@ func (lm *Manager) SetActive(l *model.License, features ...basemodel.Feature) {
|
||||
go lm.Validator(context.Background())
|
||||
}
|
||||
|
||||
}
|
||||
func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) {
|
||||
lm.mutex.Lock()
|
||||
defer lm.mutex.Unlock()
|
||||
|
||||
if l == nil {
|
||||
return
|
||||
}
|
||||
|
||||
lm.activeLicenseV3 = l
|
||||
lm.activeFeatures = append(l.Features, features...)
|
||||
// set default features
|
||||
setDefaultFeatures(lm)
|
||||
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Panic("Couldn't activate features", zap.Error(err))
|
||||
}
|
||||
if !lm.validatorRunning {
|
||||
// we want to make sure only one validator runs,
|
||||
// we already have lock() so good to go
|
||||
lm.validatorRunning = true
|
||||
go lm.ValidatorV3(context.Background())
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func setDefaultFeatures(lm *Manager) {
|
||||
@@ -137,6 +170,28 @@ func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error {
|
||||
active, err := lm.repo.GetActiveLicenseV3(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if active != nil {
|
||||
lm.SetActiveV3(active, features...)
|
||||
} else {
|
||||
zap.L().Info("No active license found, defaulting to basic plan")
|
||||
// if no active license is found, we default to basic(free) plan with all default features
|
||||
lm.activeFeatures = model.BasicPlan
|
||||
setDefaultFeatures(lm)
|
||||
err := lm.InitFeatures(lm.activeFeatures)
|
||||
if err != nil {
|
||||
zap.L().Error("Couldn't initialize features", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *model.ApiError) {
|
||||
|
||||
licenses, err := lm.repo.GetLicenses(ctx)
|
||||
@@ -163,6 +218,23 @@ func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, a
|
||||
return
|
||||
}
|
||||
|
||||
func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.LicenseV3, apiError *model.ApiError) {
|
||||
|
||||
licenses, err := lm.repo.GetLicensesV3(ctx)
|
||||
if err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
for _, l := range licenses {
|
||||
if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key {
|
||||
l.IsCurrent = true
|
||||
}
|
||||
response = append(response, l)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Validator validates license after an epoch of time
|
||||
func (lm *Manager) Validator(ctx context.Context) {
|
||||
defer close(lm.terminated)
|
||||
@@ -187,6 +259,30 @@ func (lm *Manager) Validator(ctx context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
// Validator validates license after an epoch of time
|
||||
func (lm *Manager) ValidatorV3(ctx context.Context) {
|
||||
defer close(lm.terminated)
|
||||
tick := time.NewTicker(validationFrequency)
|
||||
defer tick.Stop()
|
||||
|
||||
lm.ValidateV3(ctx)
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
default:
|
||||
select {
|
||||
case <-lm.done:
|
||||
return
|
||||
case <-tick.C:
|
||||
lm.ValidateV3(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// Validate validates the current active license
|
||||
func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
zap.L().Info("License validation started")
|
||||
@@ -254,6 +350,54 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// todo[vikrantgupta25]: check the comparison here between old and new license!
|
||||
func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
|
||||
|
||||
license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key)
|
||||
if apiError != nil {
|
||||
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
|
||||
return apiError
|
||||
}
|
||||
|
||||
err := lm.repo.UpdateLicenseV3(ctx, license)
|
||||
if err != nil {
|
||||
return model.BadRequest(errors.Wrap(err, "failed to update the new license"))
|
||||
}
|
||||
lm.SetActiveV3(license)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
|
||||
zap.L().Info("License validation started")
|
||||
if lm.activeLicenseV3 == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
defer func() {
|
||||
lm.mutex.Lock()
|
||||
|
||||
lm.lastValidated = time.Now().Unix()
|
||||
if reterr != nil {
|
||||
zap.L().Error("License validation completed with error", zap.Error(reterr))
|
||||
atomic.AddUint64(&lm.failedAttempts, 1)
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
|
||||
map[string]interface{}{"err": reterr.Error()}, "", true, false)
|
||||
} else {
|
||||
zap.L().Info("License validation completed with no errors")
|
||||
}
|
||||
|
||||
lm.mutex.Unlock()
|
||||
}()
|
||||
|
||||
err := lm.RefreshLicense(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Activate activates a license key with signoz server
|
||||
func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) {
|
||||
defer func() {
|
||||
@@ -298,6 +442,35 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
|
||||
return l, nil
|
||||
}
|
||||
|
||||
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseResponse *model.LicenseV3, errResponse *model.ApiError) {
|
||||
defer func() {
|
||||
if errResponse != nil {
|
||||
userEmail, err := auth.GetEmailFromJwt(ctx)
|
||||
if err == nil {
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
|
||||
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
license, apiError := validate.ValidateLicenseV3(licenseKey)
|
||||
if apiError != nil {
|
||||
zap.L().Error("failed to get the license", zap.Error(apiError.Err))
|
||||
return nil, apiError
|
||||
}
|
||||
|
||||
// insert the new license to the sqlite db
|
||||
err := lm.repo.InsertLicenseV3(ctx, license)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to activate license", zap.Error(err))
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
// license is valid, activate it
|
||||
lm.SetActiveV3(license)
|
||||
return license, nil
|
||||
}
|
||||
|
||||
// CheckFeature will be internally used by backend routines
|
||||
// for feature gating
|
||||
func (lm *Manager) CheckFeature(featureKey string) error {
|
||||
|
||||
@@ -48,5 +48,16 @@ func InitDB(db *sqlx.DB) error {
|
||||
return fmt.Errorf("error in creating feature_status table: %s", err.Error())
|
||||
}
|
||||
|
||||
table_schema = `CREATE TABLE IF NOT EXISTS licenses_v3 (
|
||||
id TEXT PRIMARY KEY,
|
||||
key TEXT NOT NULL UNIQUE,
|
||||
data TEXT
|
||||
);`
|
||||
|
||||
_, err = db.Exec(table_schema)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error in creating licenses_v3 table: %s", err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -94,7 +94,7 @@ func main() {
|
||||
var cluster string
|
||||
|
||||
var useLogsNewSchema bool
|
||||
var useTraceNewSchema bool
|
||||
var useLicensesV3 bool
|
||||
var cacheConfigPath, fluxInterval string
|
||||
var enableQueryServiceLogOTLPExport bool
|
||||
var preferSpanMetrics bool
|
||||
@@ -105,7 +105,7 @@ func main() {
|
||||
var gatewayUrl string
|
||||
|
||||
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
|
||||
flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces")
|
||||
flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses")
|
||||
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
|
||||
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
|
||||
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
|
||||
@@ -145,7 +145,7 @@ func main() {
|
||||
Cluster: cluster,
|
||||
GatewayUrl: gatewayUrl,
|
||||
UseLogsNewSchema: useLogsNewSchema,
|
||||
UseTraceNewSchema: useTraceNewSchema,
|
||||
UseLicensesV3: useLicensesV3,
|
||||
}
|
||||
|
||||
// Read the jwt secret key
|
||||
|
||||
@@ -46,6 +46,13 @@ func BadRequest(err error) *ApiError {
|
||||
}
|
||||
}
|
||||
|
||||
func Unauthorized(err error) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorUnauthorized,
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
// BadRequestStr returns a ApiError object of bad request for string input
|
||||
func BadRequestStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
|
||||
@@ -3,6 +3,8 @@ package model
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
@@ -104,3 +106,144 @@ type SubscriptionServerResp struct {
|
||||
Status string `json:"status"`
|
||||
Data Licenses `json:"data"`
|
||||
}
|
||||
|
||||
type Plan struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type LicenseDB struct {
|
||||
ID string `json:"id"`
|
||||
Key string `json:"key"`
|
||||
Data string `json:"data"`
|
||||
}
|
||||
type LicenseV3 struct {
|
||||
ID string
|
||||
Key string
|
||||
Data map[string]interface{}
|
||||
PlanName string
|
||||
Features basemodel.FeatureSet
|
||||
Status string
|
||||
IsCurrent bool
|
||||
ValidFrom int64
|
||||
ValidUntil int64
|
||||
}
|
||||
|
||||
func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) {
|
||||
var zeroValue T
|
||||
if val, ok := data[key]; ok {
|
||||
if value, ok := val.(T); ok {
|
||||
return value, nil
|
||||
}
|
||||
return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue))
|
||||
}
|
||||
return zeroValue, fmt.Errorf("%s key is missing", key)
|
||||
}
|
||||
|
||||
func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) {
|
||||
var features basemodel.FeatureSet
|
||||
|
||||
// extract id from data
|
||||
licenseID, err := extractKeyFromMapStringInterface[string](data, "id")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
delete(data, "id")
|
||||
|
||||
// extract key from data
|
||||
licenseKey, err := extractKeyFromMapStringInterface[string](data, "key")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
delete(data, "key")
|
||||
|
||||
// extract status from data
|
||||
status, err := extractKeyFromMapStringInterface[string](data, "status")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
planName, err := extractKeyFromMapStringInterface[string](planMap, "name")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// if license status is inactive then default it to basic
|
||||
if status == LicenseStatusInactive {
|
||||
planName = PlanNameBasic
|
||||
}
|
||||
|
||||
featuresFromZeus := basemodel.FeatureSet{}
|
||||
if _features, ok := data["features"]; ok {
|
||||
featuresData, err := json.Marshal(_features)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to marshal features data")
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to unmarshal features data")
|
||||
}
|
||||
}
|
||||
|
||||
switch planName {
|
||||
case PlanNameTeams:
|
||||
features = append(features, ProPlan...)
|
||||
case PlanNameEnterprise:
|
||||
features = append(features, EnterprisePlan...)
|
||||
case PlanNameBasic:
|
||||
features = append(features, BasicPlan...)
|
||||
default:
|
||||
features = append(features, BasicPlan...)
|
||||
}
|
||||
|
||||
if len(featuresFromZeus) > 0 {
|
||||
for _, feature := range featuresFromZeus {
|
||||
exists := false
|
||||
for i, existingFeature := range features {
|
||||
if existingFeature.Name == feature.Name {
|
||||
features[i] = feature // Replace existing feature
|
||||
exists = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !exists {
|
||||
features = append(features, feature) // Append if it doesn't exist
|
||||
}
|
||||
}
|
||||
}
|
||||
data["features"] = features
|
||||
|
||||
_validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from")
|
||||
if err != nil {
|
||||
_validFrom = 0
|
||||
}
|
||||
validFrom := int64(_validFrom)
|
||||
|
||||
_validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until")
|
||||
if err != nil {
|
||||
_validUntil = 0
|
||||
}
|
||||
validUntil := int64(_validUntil)
|
||||
|
||||
return &LicenseV3{
|
||||
ID: licenseID,
|
||||
Key: licenseKey,
|
||||
Data: data,
|
||||
PlanName: planName,
|
||||
Features: features,
|
||||
ValidFrom: validFrom,
|
||||
ValidUntil: validUntil,
|
||||
Status: status,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) {
|
||||
licenseDataWithIdAndKey := data
|
||||
licenseDataWithIdAndKey["id"] = id
|
||||
licenseDataWithIdAndKey["key"] = key
|
||||
return NewLicenseV3(licenseDataWithIdAndKey)
|
||||
}
|
||||
|
||||
170
ee/query-service/model/license_test.go
Normal file
170
ee/query-service/model/license_test.go
Normal file
@@ -0,0 +1,170 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
)
|
||||
|
||||
func TestNewLicenseV3(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
pass bool
|
||||
expected *LicenseV3
|
||||
error error
|
||||
}{
|
||||
{
|
||||
name: "Error for missing license id",
|
||||
data: []byte(`{}`),
|
||||
pass: false,
|
||||
error: errors.New("id key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for license id not being a valid string",
|
||||
data: []byte(`{"id": 10}`),
|
||||
pass: false,
|
||||
error: errors.New("id key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license key",
|
||||
data: []byte(`{"id":"does-not-matter"}`),
|
||||
pass: false,
|
||||
error: errors.New("key key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid string license key",
|
||||
data: []byte(`{"id":"does-not-matter","key":10}`),
|
||||
pass: false,
|
||||
error: errors.New("key key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license status",
|
||||
data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`),
|
||||
pass: false,
|
||||
error: errors.New("status key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid string license status",
|
||||
data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`),
|
||||
pass: false,
|
||||
error: errors.New("status key is not a valid string"),
|
||||
},
|
||||
{
|
||||
name: "Error for missing license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`),
|
||||
pass: false,
|
||||
error: errors.New("plan key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid json license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`),
|
||||
pass: false,
|
||||
error: errors.New("plan key is not a valid map[string]interface {}"),
|
||||
},
|
||||
{
|
||||
name: "Error for invalid license plan",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`),
|
||||
pass: false,
|
||||
error: errors.New("name key is missing"),
|
||||
},
|
||||
{
|
||||
name: "Parse the entire license properly",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "ACTIVE",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Fallback to basic plan if license status is inactive",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INACTIVE","plan":{"name":"TEAMS"},"valid_from": 1730899309,"valid_until": -1}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"category": "FREE",
|
||||
"status": "INACTIVE",
|
||||
"valid_from": float64(1730899309),
|
||||
"valid_until": float64(-1),
|
||||
},
|
||||
PlanName: PlanNameBasic,
|
||||
ValidFrom: 1730899309,
|
||||
ValidUntil: -1,
|
||||
Status: "INACTIVE",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "fallback states for validFrom and validUntil",
|
||||
data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"TEAMS"},"valid_from":1234.456,"valid_until":5678.567}`),
|
||||
pass: true,
|
||||
expected: &LicenseV3{
|
||||
ID: "does-not-matter",
|
||||
Key: "does-not-matter-key",
|
||||
Data: map[string]interface{}{
|
||||
"plan": map[string]interface{}{
|
||||
"name": "TEAMS",
|
||||
},
|
||||
"valid_from": 1234.456,
|
||||
"valid_until": 5678.567,
|
||||
"category": "FREE",
|
||||
"status": "ACTIVE",
|
||||
},
|
||||
PlanName: PlanNameTeams,
|
||||
ValidFrom: 1234,
|
||||
ValidUntil: 5678,
|
||||
Status: "ACTIVE",
|
||||
IsCurrent: false,
|
||||
Features: model.FeatureSet{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
var licensePayload map[string]interface{}
|
||||
err := json.Unmarshal(tc.data, &licensePayload)
|
||||
require.NoError(t, err)
|
||||
license, err := NewLicenseV3(licensePayload)
|
||||
if license != nil {
|
||||
license.Features = make(model.FeatureSet, 0)
|
||||
delete(license.Data, "features")
|
||||
}
|
||||
|
||||
if tc.pass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, license)
|
||||
assert.Equal(t, tc.expected, license)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
assert.EqualError(t, err, tc.error.Error())
|
||||
require.Nil(t, license)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,17 @@ const SSO = "SSO"
|
||||
const Basic = "BASIC_PLAN"
|
||||
const Pro = "PRO_PLAN"
|
||||
const Enterprise = "ENTERPRISE_PLAN"
|
||||
|
||||
var (
|
||||
PlanNameEnterprise = "ENTERPRISE"
|
||||
PlanNameTeams = "TEAMS"
|
||||
PlanNameBasic = "BASIC"
|
||||
)
|
||||
|
||||
var (
|
||||
LicenseStatusInactive = "INACTIVE"
|
||||
)
|
||||
|
||||
const DisableUpsell = "DISABLE_UPSELL"
|
||||
const Onboarding = "ONBOARDING"
|
||||
const ChatSupport = "CHAT_SUPPORT"
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
basemodel "go.signoz.io/signoz/pkg/query-service/model"
|
||||
baserules "go.signoz.io/signoz/pkg/query-service/rules"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils/labels"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
|
||||
@@ -21,7 +26,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
opts.UseTraceNewSchema,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
)
|
||||
|
||||
@@ -80,6 +84,106 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
return task, nil
|
||||
}
|
||||
|
||||
// TestNotification prepares a dummy rule for given rule parameters and
|
||||
// sends a test notification. returns alert count and error (if any)
|
||||
func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.ApiError) {
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if opts.Rule == nil {
|
||||
return 0, basemodel.BadRequest(fmt.Errorf("rule is required"))
|
||||
}
|
||||
|
||||
parsedRule := opts.Rule
|
||||
var alertname = parsedRule.AlertName
|
||||
if alertname == "" {
|
||||
// alertname is not mandatory for testing, so picking
|
||||
// a random string here
|
||||
alertname = uuid.New().String()
|
||||
}
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, baserules.TestAlertPostFix)
|
||||
|
||||
var rule baserules.Rule
|
||||
var err error
|
||||
|
||||
if parsedRule.RuleType == baserules.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
parsedRule.Labels[labels.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
|
||||
|
||||
// create a threshold rule
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.UseLogsNewSchema,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = baserules.NewPromRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.PqlEngine,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
parsedRule,
|
||||
opts.FF,
|
||||
opts.Reader,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else {
|
||||
return 0, basemodel.BadRequest(fmt.Errorf("failed to derive ruletype with given information"))
|
||||
}
|
||||
|
||||
// set timestamp to current utc time
|
||||
ts := time.Now().UTC()
|
||||
|
||||
count, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||
}
|
||||
alertsFound, ok := count.(int)
|
||||
if !ok {
|
||||
return 0, basemodel.InternalError(fmt.Errorf("something went wrong"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), opts.NotifyFunc)
|
||||
|
||||
return alertsFound, nil
|
||||
}
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task {
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "7.102.1",
|
||||
"@sentry/webpack-plugin": "2.16.0",
|
||||
"@signozhq/design-tokens": "0.0.8",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/shape": "3.5.0",
|
||||
@@ -87,6 +87,8 @@
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "0.379.0",
|
||||
"mini-css-extract-plugin": "2.4.5",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.160.3",
|
||||
"rc-tween-one": "3.0.6",
|
||||
@@ -107,11 +109,10 @@
|
||||
"react-query": "3.39.3",
|
||||
"react-redux": "^7.2.2",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"react-router-dom-v5-compat": "6.27.0",
|
||||
"react-syntax-highlighter": "15.5.0",
|
||||
"react-use": "^17.3.2",
|
||||
"react-virtuoso": "4.0.3",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"redux": "^4.0.5",
|
||||
"redux-thunk": "^2.3.0",
|
||||
"rehype-raw": "7.0.0",
|
||||
|
||||
24
frontend/public/locales/en-GB/messagingQueues.json
Normal file
24
frontend/public/locales/en-GB/messagingQueues.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"metricGraphCategory": {
|
||||
"brokerMetrics": {
|
||||
"title": "Broker Metrics",
|
||||
"description": "The Kafka Broker metrics here inform you of data loss/delay through unclean leader elections and network throughputs, as well as request fails through request purgatories and timeouts metrics"
|
||||
},
|
||||
"consumerMetrics": {
|
||||
"title": "Consumer Metrics",
|
||||
"description": "Kafka Consumer metrics provide insights into lag between message production and consumption, success rates and latency of message delivery, and the volume of data consumed."
|
||||
},
|
||||
"producerMetrics": {
|
||||
"title": "Producer Metrics",
|
||||
"description": "Kafka Producers send messages to brokers for storage and distribution by topic. These metrics inform you of the volume and rate of data sent, and the success rate of message delivery."
|
||||
},
|
||||
"brokerJVMMetrics": {
|
||||
"title": "Broker JVM Metrics",
|
||||
"description": "Kafka brokers are Java applications that expose JVM metrics to inform on the broker's system health. Garbage collection metrics like those below provide key insights into free memory, broker performance, and heap size. You need to enable new_gc_metrics for this section to populate."
|
||||
},
|
||||
"partitionMetrics": {
|
||||
"title": "Partition Metrics",
|
||||
"description": "Kafka partitions are the unit of parallelism in Kafka. These metrics inform you of the number of partitions per topic, the current offset of each partition, the oldest offset, and the number of in-sync replicas."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,30 +1,54 @@
|
||||
{
|
||||
"breadcrumb": "Messaging Queues",
|
||||
"header": "Kafka / Overview",
|
||||
"overview": {
|
||||
"title": "Start sending data in as little as 20 minutes",
|
||||
"subtitle": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"configureConsumer": {
|
||||
"title": "Configure Consumer",
|
||||
"description": "Add consumer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"configureProducer": {
|
||||
"title": "Configure Producer",
|
||||
"description": "Add producer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"monitorKafka": {
|
||||
"title": "Monitor kafka",
|
||||
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"summarySection": {
|
||||
"viewDetailsButton": "View Details"
|
||||
},
|
||||
"confirmModal": {
|
||||
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
|
||||
"okText": "Proceed"
|
||||
}
|
||||
}
|
||||
"breadcrumb": "Messaging Queues",
|
||||
"header": "Kafka / Overview",
|
||||
"overview": {
|
||||
"title": "Start sending data in as little as 20 minutes",
|
||||
"subtitle": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"configureConsumer": {
|
||||
"title": "Configure Consumer",
|
||||
"description": "Add consumer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"configureProducer": {
|
||||
"title": "Configure Producer",
|
||||
"description": "Add producer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"monitorKafka": {
|
||||
"title": "Monitor kafka",
|
||||
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"summarySection": {
|
||||
"viewDetailsButton": "View Details",
|
||||
"consumer": {
|
||||
"title": "Consumer lag view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"producer": {
|
||||
"title": "Producer latency view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"partition": {
|
||||
"title": "Partition Latency view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"dropRate": {
|
||||
"title": "Drop Rate view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"metricPage": {
|
||||
"title": "Metric View",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
}
|
||||
},
|
||||
"confirmModal": {
|
||||
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
|
||||
"okText": "Proceed"
|
||||
},
|
||||
"overviewSummarySection": {
|
||||
"title": "Monitor Your Data Streams",
|
||||
"subtitle": "Monitor key Kafka metrics like consumer lag and latency to ensure efficient data flow and troubleshoot in real time."
|
||||
}
|
||||
}
|
||||
|
||||
24
frontend/public/locales/en/messagingQueues.json
Normal file
24
frontend/public/locales/en/messagingQueues.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"metricGraphCategory": {
|
||||
"brokerMetrics": {
|
||||
"title": "Broker Metrics",
|
||||
"description": "The Kafka Broker metrics here inform you of data loss/delay through unclean leader elections and network throughputs, as well as request fails through request purgatories and timeouts metrics"
|
||||
},
|
||||
"consumerMetrics": {
|
||||
"title": "Consumer Metrics",
|
||||
"description": "Kafka Consumer metrics provide insights into lag between message production and consumption, success rates and latency of message delivery, and the volume of data consumed."
|
||||
},
|
||||
"producerMetrics": {
|
||||
"title": "Producer Metrics",
|
||||
"description": "Kafka Producers send messages to brokers for storage and distribution by topic. These metrics inform you of the volume and rate of data sent, and the success rate of message delivery."
|
||||
},
|
||||
"brokerJVMMetrics": {
|
||||
"title": "Broker JVM Metrics",
|
||||
"description": "Kafka brokers are Java applications that expose JVM metrics to inform on the broker's system health. Garbage collection metrics like those below provide key insights into free memory, broker performance, and heap size. You need to enable new_gc_metrics for this section to populate."
|
||||
},
|
||||
"partitionMetrics": {
|
||||
"title": "Partition Metrics",
|
||||
"description": "Kafka partitions are the unit of parallelism in Kafka. These metrics inform you of the number of partitions per topic, the current offset of each partition, the oldest offset, and the number of in-sync replicas."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,30 +1,54 @@
|
||||
{
|
||||
"breadcrumb": "Messaging Queues",
|
||||
"header": "Kafka / Overview",
|
||||
"overview": {
|
||||
"title": "Start sending data in as little as 20 minutes",
|
||||
"subtitle": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"configureConsumer": {
|
||||
"title": "Configure Consumer",
|
||||
"description": "Add consumer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"configureProducer": {
|
||||
"title": "Configure Producer",
|
||||
"description": "Add producer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"monitorKafka": {
|
||||
"title": "Monitor kafka",
|
||||
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"summarySection": {
|
||||
"viewDetailsButton": "View Details"
|
||||
},
|
||||
"confirmModal": {
|
||||
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
|
||||
"okText": "Proceed"
|
||||
}
|
||||
}
|
||||
"breadcrumb": "Messaging Queues",
|
||||
"header": "Kafka / Overview",
|
||||
"overview": {
|
||||
"title": "Start sending data in as little as 20 minutes",
|
||||
"subtitle": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"configureConsumer": {
|
||||
"title": "Configure Consumer",
|
||||
"description": "Add consumer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"configureProducer": {
|
||||
"title": "Configure Producer",
|
||||
"description": "Add producer data sources to gain insights and enhance monitoring.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"monitorKafka": {
|
||||
"title": "Monitor kafka",
|
||||
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
|
||||
"button": "Get Started"
|
||||
},
|
||||
"summarySection": {
|
||||
"viewDetailsButton": "View Details",
|
||||
"consumer": {
|
||||
"title": "Consumer lag view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"producer": {
|
||||
"title": "Producer latency view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"partition": {
|
||||
"title": "Partition Latency view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"dropRate": {
|
||||
"title": "Drop Rate view",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
},
|
||||
"metricPage": {
|
||||
"title": "Metric View",
|
||||
"description": "Connect and Monitor Your Data Streams"
|
||||
}
|
||||
},
|
||||
"confirmModal": {
|
||||
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
|
||||
"okText": "Proceed"
|
||||
},
|
||||
"overviewSummarySection": {
|
||||
"title": "Monitor Your Data Streams",
|
||||
"subtitle": "Monitor key Kafka metrics like consumer lag and latency to ensure efficient data flow and troubleshoot in real time."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import { Suspense, useEffect, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { Dispatch } from 'redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
@@ -292,36 +293,38 @@ function App(): JSX.Element {
|
||||
return (
|
||||
<ConfigProvider theme={themeConfig}>
|
||||
<Router history={history}>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<CompatRouter>
|
||||
<NotificationProvider>
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AlertRuleProvider>
|
||||
<AppLayout>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</AppLayout>
|
||||
</AlertRuleProvider>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
</NotificationProvider>
|
||||
</CompatRouter>
|
||||
</Router>
|
||||
</ConfigProvider>
|
||||
);
|
||||
|
||||
@@ -16,11 +16,13 @@ export interface OnboardingStatusResponse {
|
||||
const getOnboardingStatus = async (props: {
|
||||
start: number;
|
||||
end: number;
|
||||
endpointService?: string;
|
||||
}): Promise<SuccessResponse<OnboardingStatusResponse> | ErrorResponse> => {
|
||||
const { endpointService, ...rest } = props;
|
||||
try {
|
||||
const response = await ApiBaseInstance.post(
|
||||
'/messaging-queues/kafka/onboarding/consumers',
|
||||
props,
|
||||
`/messaging-queues/kafka/onboarding/${endpointService || 'consumers'}`,
|
||||
rest,
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
@@ -17,6 +17,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
@@ -40,6 +41,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
@@ -62,6 +64,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
@@ -83,6 +86,7 @@ describe('getLogIndicatorType', () => {
|
||||
body: 'Sample log',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {
|
||||
log_level: 'INFO' as never,
|
||||
},
|
||||
@@ -112,6 +116,7 @@ describe('getLogIndicatorTypeForTable', () => {
|
||||
attributesString: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
scope_string: {},
|
||||
attributesFloat: {},
|
||||
severity_text: 'WARN',
|
||||
};
|
||||
@@ -130,6 +135,7 @@ describe('getLogIndicatorTypeForTable', () => {
|
||||
severity_number: 0,
|
||||
body: 'Sample log message',
|
||||
resources_string: {},
|
||||
scope_string: {},
|
||||
attributesString: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
@@ -166,6 +172,7 @@ describe('logIndicatorBySeverityNumber', () => {
|
||||
body: 'Sample log Message',
|
||||
resources_string: {},
|
||||
attributesString: {},
|
||||
scope_string: {},
|
||||
attributes_string: {},
|
||||
attributesInt: {},
|
||||
attributesFloat: {},
|
||||
|
||||
@@ -40,4 +40,5 @@ export enum QueryParams {
|
||||
configDetail = 'configDetail',
|
||||
getStartedSource = 'getStartedSource',
|
||||
getStartedSourceService = 'getStartedSourceService',
|
||||
mqServiceView = 'mqServiceView',
|
||||
}
|
||||
|
||||
@@ -18,4 +18,5 @@ export const REACT_QUERY_KEY = {
|
||||
GET_ALL_ALLERTS: 'GET_ALL_ALLERTS',
|
||||
REMOVE_ALERT_RULE: 'REMOVE_ALERT_RULE',
|
||||
DUPLICATE_ALERT_RULE: 'DUPLICATE_ALERT_RULE',
|
||||
UPDATE_ALERT_RULE: 'UPDATE_ALERT_RULE',
|
||||
};
|
||||
|
||||
@@ -57,6 +57,7 @@ export const alertDefaults: AlertDef = {
|
||||
},
|
||||
annotations: defaultAnnotations,
|
||||
evalWindow: defaultEvalWindow,
|
||||
alert: '',
|
||||
};
|
||||
|
||||
export const anamolyAlertDefaults: AlertDef = {
|
||||
@@ -94,12 +95,14 @@ export const anamolyAlertDefaults: AlertDef = {
|
||||
matchType: defaultMatchType,
|
||||
algorithm: defaultAlgorithm,
|
||||
seasonality: defaultSeasonality,
|
||||
target: 3,
|
||||
},
|
||||
labels: {
|
||||
severity: 'warning',
|
||||
},
|
||||
annotations: defaultAnnotations,
|
||||
evalWindow: defaultEvalWindow,
|
||||
alert: '',
|
||||
};
|
||||
|
||||
export const logAlertDefaults: AlertDef = {
|
||||
@@ -131,6 +134,7 @@ export const logAlertDefaults: AlertDef = {
|
||||
},
|
||||
annotations: defaultAnnotations,
|
||||
evalWindow: defaultEvalWindow,
|
||||
alert: '',
|
||||
};
|
||||
|
||||
export const traceAlertDefaults: AlertDef = {
|
||||
@@ -162,6 +166,7 @@ export const traceAlertDefaults: AlertDef = {
|
||||
},
|
||||
annotations: defaultAnnotations,
|
||||
evalWindow: defaultEvalWindow,
|
||||
alert: '',
|
||||
};
|
||||
|
||||
export const exceptionAlertDefaults: AlertDef = {
|
||||
@@ -193,6 +198,7 @@ export const exceptionAlertDefaults: AlertDef = {
|
||||
},
|
||||
annotations: defaultAnnotations,
|
||||
evalWindow: defaultEvalWindow,
|
||||
alert: '',
|
||||
};
|
||||
|
||||
export const ALERTS_VALUES_MAP: Record<AlertTypes, AlertDef> = {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Color, ColorType } from '@signozhq/design-tokens';
|
||||
import { showErrorNotification } from 'components/ExplorerCard/utils';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { QueryParams } from 'constants/query';
|
||||
@@ -8,7 +8,7 @@ import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { SaveNewViewHandlerProps } from './types';
|
||||
|
||||
export const getRandomColor = (): Color => {
|
||||
export const getRandomColor = (): ColorType => {
|
||||
const colorKeys = Object.keys(Color) as (keyof typeof Color)[];
|
||||
const randomKey = colorKeys[Math.floor(Math.random() * colorKeys.length)];
|
||||
return Color[randomKey];
|
||||
|
||||
@@ -53,6 +53,7 @@ import {
|
||||
QueryFunctionProps,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import BasicInfo from './BasicInfo';
|
||||
@@ -105,6 +106,11 @@ function FormAlertRules({
|
||||
const location = useLocation();
|
||||
const queryParams = new URLSearchParams(location.search);
|
||||
|
||||
const dataSource = useMemo(
|
||||
() => urlQuery.get(QueryParams.alertType) as DataSource,
|
||||
[urlQuery],
|
||||
);
|
||||
|
||||
// In case of alert the panel types should always be "Graph" only
|
||||
const panelType = PANEL_TYPES.TIME_SERIES;
|
||||
|
||||
@@ -114,13 +120,12 @@ function FormAlertRules({
|
||||
handleSetQueryData,
|
||||
handleRunQuery,
|
||||
handleSetConfig,
|
||||
initialDataSource,
|
||||
redirectWithQueryBuilderData,
|
||||
} = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
handleSetConfig(panelType || PANEL_TYPES.TIME_SERIES, initialDataSource);
|
||||
}, [handleSetConfig, initialDataSource, panelType]);
|
||||
handleSetConfig(panelType || PANEL_TYPES.TIME_SERIES, dataSource);
|
||||
}, [handleSetConfig, dataSource, panelType]);
|
||||
|
||||
// use query client
|
||||
const ruleCache = useQueryClient();
|
||||
|
||||
@@ -138,6 +138,9 @@ function LabelSelect({
|
||||
if (e.key === 'Enter' || e.code === 'Enter' || e.key === ':') {
|
||||
send('NEXT');
|
||||
}
|
||||
if (state.value === 'Idle') {
|
||||
send('NEXT');
|
||||
}
|
||||
}}
|
||||
bordered={false}
|
||||
value={currentVal as never}
|
||||
|
||||
@@ -157,6 +157,11 @@ export const getFieldAttributes = (field: string): IFieldAttributes => {
|
||||
const stringWithoutPrefix = field.slice('resources_'.length);
|
||||
const parts = splitOnce(stringWithoutPrefix, '.');
|
||||
[dataType, newField] = parts;
|
||||
} else if (field.startsWith('scope_string')) {
|
||||
logType = MetricsType.Scope;
|
||||
const stringWithoutPrefix = field.slice('scope_'.length);
|
||||
const parts = splitOnce(stringWithoutPrefix, '.');
|
||||
[dataType, newField] = parts;
|
||||
}
|
||||
|
||||
return { dataType, newField, logType };
|
||||
@@ -187,6 +192,7 @@ export const aggregateAttributesResourcesToString = (logData: ILog): string => {
|
||||
traceId: logData.traceId,
|
||||
attributes: {},
|
||||
resources: {},
|
||||
scope: {},
|
||||
severity_text: logData.severity_text,
|
||||
severity_number: logData.severity_number,
|
||||
};
|
||||
@@ -198,6 +204,9 @@ export const aggregateAttributesResourcesToString = (logData: ILog): string => {
|
||||
} else if (key.startsWith('resources_')) {
|
||||
outputJson.resources = outputJson.resources || {};
|
||||
Object.assign(outputJson.resources, logData[key as keyof ILog]);
|
||||
} else if (key.startsWith('scope_string')) {
|
||||
outputJson.scope = outputJson.scope || {};
|
||||
Object.assign(outputJson.scope, logData[key as keyof ILog]);
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
|
||||
@@ -53,6 +53,7 @@ export enum KeyOperationTableHeader {
|
||||
export enum MetricsType {
|
||||
Tag = 'tag',
|
||||
Resource = 'resource',
|
||||
Scope = 'scope',
|
||||
}
|
||||
|
||||
export enum WidgetKeys {
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
|
||||
|
||||
Once you are done instrumenting your Java application, you can run it using the below commands
|
||||
|
||||
**Note:**
|
||||
- Ensure you have Java and Maven installed. Compile your Java consumer applications: Ensure your consumer apps are compiled and ready to run.
|
||||
|
||||
**Run Consumer App with Java Agent:**
|
||||
|
||||
```bash
|
||||
java -javaagent:/path/to/opentelemetry-javaagent.jar \
|
||||
-Dotel.service.name=consumer-svc \
|
||||
-Dotel.traces.exporter=otlp \
|
||||
-Dotel.metrics.exporter=otlp \
|
||||
-Dotel.logs.exporter=otlp \
|
||||
-Dotel.instrumentation.kafka.producer-propagation.enabled=true \
|
||||
-Dotel.instrumentation.kafka.experimental-span-attributes=true \
|
||||
-Dotel.instrumentation.kafka.metric-reporter.enabled=true \
|
||||
-jar /path/to/your/consumer.jar
|
||||
```
|
||||
|
||||
<path> - update it to the path where you downloaded the Java JAR agent in previous step
|
||||
<my-app> - Jar file of your application
|
||||
|
||||
|
||||
|
||||
**Note:**
|
||||
- In case you're dockerising your application, make sure to dockerise it along with OpenTelemetry instrumentation done in previous step.
|
||||
|
||||
|
||||
|
||||
If you encounter any difficulties, please consult the [troubleshooting section](https://signoz.io/docs/instrumentation/springboot/#troubleshooting-your-installation) for assistance.
|
||||
@@ -1,9 +1,9 @@
|
||||
|
||||
|
||||
Once you are done intrumenting your Java application, you can run it using the below commands
|
||||
Once you are done instrumenting your Java application, you can run it using the below commands
|
||||
|
||||
**Note:**
|
||||
- Ensure you have Java and Maven installed. Compile your Java producer applications: Ensure your producer and consumer apps are compiled and ready to run.
|
||||
- Ensure you have Java and Maven installed. Compile your Java producer applications: Ensure your producer apps are compiled and ready to run.
|
||||
|
||||
**Run Producer App with Java Agent:**
|
||||
|
||||
@@ -14,6 +14,7 @@ import { useQueryService } from 'hooks/useQueryService';
|
||||
import useResourceAttribute from 'hooks/useResourceAttribute';
|
||||
import { convertRawQueriesToTraceSelectedTags } from 'hooks/useResourceAttribute/utils';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import MessagingQueueHealthCheck from 'pages/MessagingQueues/MessagingQueueHealthCheck/MessagingQueueHealthCheck';
|
||||
import { getAttributeDataFromOnboardingStatus } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
@@ -33,6 +34,9 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
|
||||
const urlQuery = useUrlQuery();
|
||||
const getStartedSource = urlQuery.get(QueryParams.getStartedSource);
|
||||
const getStartedSourceService = urlQuery.get(
|
||||
QueryParams.getStartedSourceService,
|
||||
);
|
||||
|
||||
const {
|
||||
serviceName,
|
||||
@@ -74,10 +78,14 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
data: onbData,
|
||||
error: onbErr,
|
||||
isFetching: onbFetching,
|
||||
} = useOnboardingStatus({
|
||||
enabled: getStartedSource === 'kafka',
|
||||
refetchInterval: pollInterval,
|
||||
});
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: getStartedSource === 'kafka',
|
||||
refetchInterval: pollInterval,
|
||||
},
|
||||
getStartedSourceService || '',
|
||||
'query-key-onboarding-status',
|
||||
);
|
||||
|
||||
const [
|
||||
shouldRetryOnboardingCall,
|
||||
@@ -326,18 +334,30 @@ export default function ConnectionStatus(): JSX.Element {
|
||||
|
||||
<div className="status">
|
||||
{isQueryServiceLoading && <LoadingOutlined />}
|
||||
{!isQueryServiceLoading && isReceivingData && (
|
||||
<>
|
||||
<CheckCircleTwoTone twoToneColor="#52c41a" />
|
||||
<span> Success </span>
|
||||
</>
|
||||
)}
|
||||
{!isQueryServiceLoading && !isReceivingData && (
|
||||
<>
|
||||
<CloseCircleTwoTone twoToneColor="#e84749" />
|
||||
<span> Failed </span>
|
||||
</>
|
||||
)}
|
||||
{!isQueryServiceLoading &&
|
||||
isReceivingData &&
|
||||
(getStartedSource !== 'kafka' ? (
|
||||
<>
|
||||
<CheckCircleTwoTone twoToneColor="#52c41a" />
|
||||
<span> Success </span>
|
||||
</>
|
||||
) : (
|
||||
<MessagingQueueHealthCheck
|
||||
serviceToInclude={[getStartedSourceService || '']}
|
||||
/>
|
||||
))}
|
||||
{!isQueryServiceLoading &&
|
||||
!isReceivingData &&
|
||||
(getStartedSource !== 'kafka' ? (
|
||||
<>
|
||||
<CloseCircleTwoTone twoToneColor="#e84749" />
|
||||
<span> Failed </span>
|
||||
</>
|
||||
) : (
|
||||
<MessagingQueueHealthCheck
|
||||
serviceToInclude={[getStartedSourceService || '']}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="details-info">
|
||||
|
||||
@@ -9,7 +9,10 @@ import cx from 'classnames';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext';
|
||||
import { useCases } from 'container/OnboardingContainer/OnboardingContainer';
|
||||
import {
|
||||
ModulesMap,
|
||||
useCases,
|
||||
} from 'container/OnboardingContainer/OnboardingContainer';
|
||||
import {
|
||||
getDataSources,
|
||||
getSupportedFrameworks,
|
||||
@@ -49,6 +52,9 @@ export default function DataSource(): JSX.Element {
|
||||
updateSelectedFramework,
|
||||
} = useOnboardingContext();
|
||||
|
||||
const isKafkaAPM =
|
||||
getStartedSource === 'kafka' && selectedModule?.id === ModulesMap.APM;
|
||||
|
||||
const [supportedDataSources, setSupportedDataSources] = useState<
|
||||
DataSourceType[]
|
||||
>([]);
|
||||
@@ -155,14 +161,14 @@ export default function DataSource(): JSX.Element {
|
||||
className={cx(
|
||||
'supported-language',
|
||||
selectedDataSource?.name === dataSource.name ? 'selected' : '',
|
||||
getStartedSource === 'kafka' &&
|
||||
isKafkaAPM &&
|
||||
!messagingQueueKakfaSupportedDataSources.includes(dataSource?.id || '')
|
||||
? 'disabled'
|
||||
: '',
|
||||
)}
|
||||
key={dataSource.name}
|
||||
onClick={(): void => {
|
||||
if (getStartedSource !== 'kafka') {
|
||||
if (!isKafkaAPM) {
|
||||
updateSelectedFramework(null);
|
||||
updateSelectedEnvironment(null);
|
||||
updateSelectedDataSource(dataSource);
|
||||
|
||||
@@ -252,7 +252,8 @@ import APM_java_springBoot_docker_recommendedSteps_runApplication from '../Modul
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-installOtelCollector.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-instrumentApplication.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producer from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producer.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_consumers from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-consumers.md';
|
||||
import APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producers from '../Modules/APM/Java/md-docs/SpringBoot/Kubernetes/springBoot-kubernetes-runApplication-producers.md';
|
||||
// SpringBoot-LinuxAMD64-quickstart
|
||||
import APM_java_springBoot_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Java/md-docs/SpringBoot/LinuxAMD64/QuickStart/springBoot-linuxamd64-quickStart-instrumentApplication.md';
|
||||
import APM_java_springBoot_linuxAMD64_quickStart_runApplication from '../Modules/APM/Java/md-docs/SpringBoot/LinuxAMD64/QuickStart/springBoot-linuxamd64-quickStart-runApplication.md';
|
||||
@@ -1054,7 +1055,8 @@ export const ApmDocFilePaths = {
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_setupOtelCollector,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_instrumentApplication,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_runApplication,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producer,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_runApplication_producers,
|
||||
APM_java_springBoot_kubernetes_recommendedSteps_runApplication_consumers,
|
||||
|
||||
// SpringBoot-LinuxAMD64-recommended
|
||||
APM_java_springBoot_linuxAMD64_recommendedSteps_setupOtelCollector,
|
||||
|
||||
@@ -82,7 +82,7 @@ export function AboutSigNozQuestions({
|
||||
otherInterestInSignoz,
|
||||
});
|
||||
|
||||
logEvent('User Onboarding: About SigNoz Questions Answered', {
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
hearAboutSignoz,
|
||||
otherAboutSignoz,
|
||||
interestInSignoz,
|
||||
|
||||
@@ -161,6 +161,13 @@ function InviteTeamMembers({
|
||||
|
||||
setInviteUsersSuccessResponse(successfulInvites);
|
||||
|
||||
logEvent('Org Onboarding: Invite Team Members Success', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
totalInvites: inviteUsersResponse.summary.total_invites,
|
||||
successfulInvites: inviteUsersResponse.summary.successful_invites,
|
||||
failedInvites: inviteUsersResponse.summary.failed_invites,
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
setDisableNextButton(false);
|
||||
onNext();
|
||||
@@ -172,6 +179,13 @@ function InviteTeamMembers({
|
||||
|
||||
setInviteUsersSuccessResponse(successfulInvites);
|
||||
|
||||
logEvent('Org Onboarding: Invite Team Members Partial Success', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
totalInvites: inviteUsersResponse.summary.total_invites,
|
||||
successfulInvites: inviteUsersResponse.summary.successful_invites,
|
||||
failedInvites: inviteUsersResponse.summary.failed_invites,
|
||||
});
|
||||
|
||||
if (inviteUsersResponse.failed_invites.length > 0) {
|
||||
setHasErrors(true);
|
||||
|
||||
@@ -182,27 +196,21 @@ function InviteTeamMembers({
|
||||
}
|
||||
};
|
||||
|
||||
const {
|
||||
mutate: sendInvites,
|
||||
isLoading: isSendingInvites,
|
||||
data: inviteUsersApiResponseData,
|
||||
} = useMutation(inviteUsers, {
|
||||
onSuccess: (response: SuccessResponse<InviteUsersResponse>): void => {
|
||||
logEvent('User Onboarding: Invite Team Members Sent', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
});
|
||||
const { mutate: sendInvites, isLoading: isSendingInvites } = useMutation(
|
||||
inviteUsers,
|
||||
{
|
||||
onSuccess: (response: SuccessResponse<InviteUsersResponse>): void => {
|
||||
handleInviteUsersSuccess(response);
|
||||
},
|
||||
onError: (error: AxiosError): void => {
|
||||
logEvent('Org Onboarding: Invite Team Members Failed', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
});
|
||||
|
||||
handleInviteUsersSuccess(response);
|
||||
handleError(error);
|
||||
},
|
||||
},
|
||||
onError: (error: AxiosError): void => {
|
||||
logEvent('User Onboarding: Invite Team Members Failed', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
error,
|
||||
});
|
||||
|
||||
handleError(error);
|
||||
},
|
||||
});
|
||||
);
|
||||
|
||||
const handleNext = (): void => {
|
||||
if (validateAllUsers()) {
|
||||
@@ -254,9 +262,8 @@ function InviteTeamMembers({
|
||||
};
|
||||
|
||||
const handleDoLater = (): void => {
|
||||
logEvent('User Onboarding: Invite Team Members Skipped', {
|
||||
teamMembers: teamMembersToInvite,
|
||||
apiResponse: inviteUsersApiResponseData,
|
||||
logEvent('Org Onboarding: Clicked Do Later', {
|
||||
currentPageID: 4,
|
||||
});
|
||||
|
||||
onNext();
|
||||
|
||||
@@ -122,7 +122,7 @@ function OptimiseSignozNeeds({
|
||||
}, [services, hostsPerDay, logsPerDay]);
|
||||
|
||||
const handleOnNext = (): void => {
|
||||
logEvent('User Onboarding: Optimise SigNoz Needs Answered', {
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
logsPerDay,
|
||||
hostsPerDay,
|
||||
services,
|
||||
@@ -144,10 +144,8 @@ function OptimiseSignozNeeds({
|
||||
|
||||
onWillDoLater();
|
||||
|
||||
logEvent('User Onboarding: Optimise SigNoz Needs Skipped', {
|
||||
logsPerDay: 0,
|
||||
hostsPerDay: 0,
|
||||
services: 0,
|
||||
logEvent('Org Onboarding: Clicked Do Later', {
|
||||
currentPageID: 3,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -94,6 +94,13 @@ function OrgQuestions({
|
||||
organisationName === '' ||
|
||||
orgDetails.organisationName === organisationName
|
||||
) {
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
usesObservability,
|
||||
observabilityTool,
|
||||
otherTool,
|
||||
familiarity,
|
||||
});
|
||||
|
||||
onNext({
|
||||
organisationName,
|
||||
usesObservability,
|
||||
@@ -121,10 +128,17 @@ function OrgQuestions({
|
||||
},
|
||||
});
|
||||
|
||||
logEvent('User Onboarding: Org Name Updated', {
|
||||
logEvent('Org Onboarding: Org Name Updated', {
|
||||
organisationName: orgDetails.organisationName,
|
||||
});
|
||||
|
||||
logEvent('Org Onboarding: Answered', {
|
||||
usesObservability,
|
||||
observabilityTool,
|
||||
otherTool,
|
||||
familiarity,
|
||||
});
|
||||
|
||||
onNext({
|
||||
organisationName,
|
||||
usesObservability,
|
||||
@@ -133,7 +147,7 @@ function OrgQuestions({
|
||||
familiarity,
|
||||
});
|
||||
} else {
|
||||
logEvent('User Onboarding: Org Name Update Failed', {
|
||||
logEvent('Org Onboarding: Org Name Update Failed', {
|
||||
organisationName: orgDetails.organisationName,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import './OnboardingQuestionaire.styles.scss';
|
||||
|
||||
import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import updateProfileAPI from 'api/onboarding/updateProfile';
|
||||
import getAllOrgPreferences from 'api/preferences/getAllOrgPreferences';
|
||||
import updateOrgPreferenceAPI from 'api/preferences/updateOrgPreference';
|
||||
@@ -61,6 +62,10 @@ const INITIAL_OPTIMISE_SIGNOZ_DETAILS: OptimiseSignozDetails = {
|
||||
services: 0,
|
||||
};
|
||||
|
||||
const BACK_BUTTON_EVENT_NAME = 'Org Onboarding: Back Button Clicked';
|
||||
const NEXT_BUTTON_EVENT_NAME = 'Org Onboarding: Next Button Clicked';
|
||||
const ONBOARDING_COMPLETE_EVENT_NAME = 'Org Onboarding: Complete';
|
||||
|
||||
function OnboardingQuestionaire(): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const { org } = useSelector<AppState, AppReducer>((state) => state.app);
|
||||
@@ -98,6 +103,13 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [org]);
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('Org Onboarding: Started', {
|
||||
org_id: org?.[0]?.id,
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const { refetch: refetchOrgPreferences } = useQuery({
|
||||
queryFn: () => getAllOrgPreferences(),
|
||||
queryKey: ['getOrgPreferences'],
|
||||
@@ -120,6 +132,8 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
|
||||
setUpdatingOrgOnboardingStatus(false);
|
||||
|
||||
logEvent('Org Onboarding: Redirecting to Get Started', {});
|
||||
|
||||
history.push(ROUTES.GET_STARTED);
|
||||
},
|
||||
onError: () => {
|
||||
@@ -156,6 +170,11 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
});
|
||||
|
||||
const handleUpdateProfile = (): void => {
|
||||
logEvent(NEXT_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 3,
|
||||
nextPageID: 4,
|
||||
});
|
||||
|
||||
updateProfile({
|
||||
familiarity_with_observability: orgDetails?.familiarity as string,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
@@ -180,6 +199,10 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
};
|
||||
|
||||
const handleOnboardingComplete = (): void => {
|
||||
logEvent(ONBOARDING_COMPLETE_EVENT_NAME, {
|
||||
currentPageID: 4,
|
||||
});
|
||||
|
||||
setUpdatingOrgOnboardingStatus(true);
|
||||
updateOrgPreference({
|
||||
preferenceID: 'ORG_ONBOARDING',
|
||||
@@ -199,6 +222,11 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
currentOrgData={currentOrgData}
|
||||
orgDetails={orgDetails}
|
||||
onNext={(orgDetails: OrgDetails): void => {
|
||||
logEvent(NEXT_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 1,
|
||||
nextPageID: 2,
|
||||
});
|
||||
|
||||
setOrgDetails(orgDetails);
|
||||
setCurrentStep(2);
|
||||
}}
|
||||
@@ -209,8 +237,20 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
<AboutSigNozQuestions
|
||||
signozDetails={signozDetails}
|
||||
setSignozDetails={setSignozDetails}
|
||||
onBack={(): void => setCurrentStep(1)}
|
||||
onNext={(): void => setCurrentStep(3)}
|
||||
onBack={(): void => {
|
||||
logEvent(BACK_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 2,
|
||||
prevPageID: 1,
|
||||
});
|
||||
setCurrentStep(1);
|
||||
}}
|
||||
onNext={(): void => {
|
||||
logEvent(NEXT_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 2,
|
||||
nextPageID: 3,
|
||||
});
|
||||
setCurrentStep(3);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -220,9 +260,15 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
isUpdatingProfile={isUpdatingProfile}
|
||||
optimiseSignozDetails={optimiseSignozDetails}
|
||||
setOptimiseSignozDetails={setOptimiseSignozDetails}
|
||||
onBack={(): void => setCurrentStep(2)}
|
||||
onBack={(): void => {
|
||||
logEvent(BACK_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 3,
|
||||
prevPageID: 2,
|
||||
});
|
||||
setCurrentStep(2);
|
||||
}}
|
||||
onNext={handleUpdateProfile}
|
||||
onWillDoLater={(): void => setCurrentStep(4)} // This is temporary, only to skip gateway api call as it's not setup on staging yet
|
||||
onWillDoLater={(): void => setCurrentStep(4)}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -231,7 +277,13 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
isLoading={updatingOrgOnboardingStatus}
|
||||
teamMembers={teamMembers}
|
||||
setTeamMembers={setTeamMembers}
|
||||
onBack={(): void => setCurrentStep(3)}
|
||||
onBack={(): void => {
|
||||
logEvent(BACK_BUTTON_EVENT_NAME, {
|
||||
currentPageID: 4,
|
||||
prevPageID: 3,
|
||||
});
|
||||
setCurrentStep(3);
|
||||
}}
|
||||
onNext={handleOnboardingComplete}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -81,8 +81,10 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
}),
|
||||
!item.isColumn && item.type ? item.type : '',
|
||||
)}
|
||||
dataType={item.dataType}
|
||||
type={item.type || ''}
|
||||
/>
|
||||
),
|
||||
value: `${item.key}${selectValueDivider}${createIdFromObjectFields(
|
||||
@@ -187,6 +189,9 @@ export const AggregatorFilter = memo(function AggregatorFilter({
|
||||
prefix: query.aggregateAttribute.type || '',
|
||||
condition: !query.aggregateAttribute.isColumn,
|
||||
}),
|
||||
!query.aggregateAttribute.isColumn && query.aggregateAttribute.type
|
||||
? query.aggregateAttribute.type
|
||||
: '',
|
||||
);
|
||||
|
||||
return (
|
||||
|
||||
@@ -75,8 +75,10 @@ export const GroupByFilter = memo(function GroupByFilter({
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
}),
|
||||
!item.isColumn && item.type ? item.type : '',
|
||||
)}
|
||||
dataType={item.dataType || ''}
|
||||
type={item.type || ''}
|
||||
/>
|
||||
),
|
||||
value: `${item.id}`,
|
||||
@@ -166,6 +168,7 @@ export const GroupByFilter = memo(function GroupByFilter({
|
||||
prefix: item.type || '',
|
||||
condition: !item.isColumn,
|
||||
}),
|
||||
!item.isColumn && item.type ? item.type : '',
|
||||
)}`,
|
||||
value: `${item.id}`,
|
||||
}),
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { MetricsType } from 'container/MetricsApplication/constant';
|
||||
|
||||
export function removePrefix(str: string): string {
|
||||
export function removePrefix(str: string, type: string): string {
|
||||
const tagPrefix = `${MetricsType.Tag}_`;
|
||||
const resourcePrefix = `${MetricsType.Resource}_`;
|
||||
const scopePrefix = `${MetricsType.Scope}_`;
|
||||
|
||||
if (str.startsWith(tagPrefix)) {
|
||||
return str.slice(tagPrefix.length);
|
||||
@@ -10,5 +11,9 @@ export function removePrefix(str: string): string {
|
||||
if (str.startsWith(resourcePrefix)) {
|
||||
return str.slice(resourcePrefix.length);
|
||||
}
|
||||
if (str.startsWith(scopePrefix) && type === MetricsType.Scope) {
|
||||
return str.slice(scopePrefix.length);
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
@@ -3,25 +3,23 @@ import './QueryBuilderSearch.styles.scss';
|
||||
import { Tooltip } from 'antd';
|
||||
|
||||
import { TagContainer, TagLabel, TagValue } from './style';
|
||||
import { getOptionType } from './utils';
|
||||
|
||||
function OptionRenderer({
|
||||
label,
|
||||
value,
|
||||
dataType,
|
||||
type,
|
||||
}: OptionRendererProps): JSX.Element {
|
||||
const optionType = getOptionType(label);
|
||||
|
||||
return (
|
||||
<span className="option">
|
||||
{optionType ? (
|
||||
{type ? (
|
||||
<Tooltip title={`${value}`} placement="topLeft">
|
||||
<div className="selectOptionContainer">
|
||||
<div className="option-value">{value}</div>
|
||||
<div className="option-meta-data-container">
|
||||
<TagContainer>
|
||||
<TagLabel>Type: </TagLabel>
|
||||
<TagValue>{optionType}</TagValue>
|
||||
<TagValue>{type}</TagValue>
|
||||
</TagContainer>
|
||||
<TagContainer>
|
||||
<TagLabel>Data type: </TagLabel>
|
||||
@@ -43,6 +41,7 @@ interface OptionRendererProps {
|
||||
label: string;
|
||||
value: string;
|
||||
dataType: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
export default OptionRenderer;
|
||||
|
||||
@@ -410,6 +410,7 @@ function QueryBuilderSearch({
|
||||
label={option.label}
|
||||
value={option.value}
|
||||
dataType={option.dataType || ''}
|
||||
type={option.type || ''}
|
||||
/>
|
||||
{option.selected && <StyledCheckOutlined />}
|
||||
</Select.Option>
|
||||
|
||||
@@ -260,6 +260,20 @@
|
||||
background: rgba(189, 153, 121, 0.1);
|
||||
}
|
||||
}
|
||||
|
||||
&.scope {
|
||||
border: 1px solid rgba(113, 144, 249, 0.2);
|
||||
|
||||
.ant-typography {
|
||||
color: var(--bg-robin-400);
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.ant-tag-close-icon {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,6 +94,25 @@
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
}
|
||||
|
||||
&.scope {
|
||||
border-radius: 50px;
|
||||
background: rgba(113, 144, 249, 0.1) !important;
|
||||
color: var(--bg-robin-400) !important;
|
||||
|
||||
.dot {
|
||||
background-color: var(--bg-robin-400);
|
||||
}
|
||||
.text {
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 150% */
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.option-meta-data-container {
|
||||
|
||||
@@ -16,4 +16,5 @@ export type Option = {
|
||||
selected?: boolean;
|
||||
dataType?: string;
|
||||
isIndexed?: boolean;
|
||||
type?: string;
|
||||
};
|
||||
|
||||
@@ -8,15 +8,22 @@ type UseOnboardingStatus = (
|
||||
options?: UseQueryOptions<
|
||||
SuccessResponse<OnboardingStatusResponse> | ErrorResponse
|
||||
>,
|
||||
endpointService?: string,
|
||||
queryKey?: string,
|
||||
) => UseQueryResult<SuccessResponse<OnboardingStatusResponse> | ErrorResponse>;
|
||||
|
||||
export const useOnboardingStatus: UseOnboardingStatus = (options) =>
|
||||
export const useOnboardingStatus: UseOnboardingStatus = (
|
||||
options,
|
||||
endpointService,
|
||||
queryKey,
|
||||
) =>
|
||||
useQuery<SuccessResponse<OnboardingStatusResponse> | ErrorResponse>({
|
||||
queryKey: ['onboardingStatus'],
|
||||
queryKey: [queryKey || `onboardingStatus-${endpointService}`],
|
||||
queryFn: () =>
|
||||
getOnboardingStatus({
|
||||
start: (Date.now() - 15 * 60 * 1000) * 1_000_000,
|
||||
end: Date.now() * 1_000_000,
|
||||
endpointService,
|
||||
}),
|
||||
...options,
|
||||
});
|
||||
|
||||
@@ -46,6 +46,7 @@ export const useOptions = (
|
||||
value: item.key,
|
||||
dataType: item.dataType,
|
||||
isIndexed: item?.isIndexed,
|
||||
type: item?.type || '',
|
||||
})),
|
||||
[getLabel],
|
||||
);
|
||||
|
||||
@@ -2,82 +2,90 @@ import './ActionButtons.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Divider, Dropdown, MenuProps, Switch, Tooltip } from 'antd';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import { Copy, Ellipsis, PenLine, Trash2 } from 'lucide-react';
|
||||
import {
|
||||
useAlertRuleDelete,
|
||||
useAlertRuleDuplicate,
|
||||
useAlertRuleStatusToggle,
|
||||
useAlertRuleUpdate,
|
||||
} from 'pages/AlertDetails/hooks';
|
||||
import CopyToClipboard from 'periscope/components/CopyToClipboard';
|
||||
import { useAlertRule } from 'providers/Alert';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { CSSProperties } from 'styled-components';
|
||||
import { AlertDef } from 'types/api/alerts/def';
|
||||
|
||||
import { AlertHeaderProps } from '../AlertHeader';
|
||||
import RenameModal from './RenameModal';
|
||||
|
||||
const menuItemStyle: CSSProperties = {
|
||||
fontSize: '14px',
|
||||
letterSpacing: '0.14px',
|
||||
};
|
||||
|
||||
function AlertActionButtons({
|
||||
ruleId,
|
||||
alertDetails,
|
||||
setUpdatedName,
|
||||
}: {
|
||||
ruleId: string;
|
||||
alertDetails: AlertHeaderProps['alertDetails'];
|
||||
setUpdatedName: (name: string) => void;
|
||||
}): JSX.Element {
|
||||
const { alertRuleState, setAlertRuleState } = useAlertRule();
|
||||
const { handleAlertStateToggle } = useAlertRuleStatusToggle({ ruleId });
|
||||
const [intermediateName, setIntermediateName] = useState<string>(
|
||||
alertDetails.alert,
|
||||
);
|
||||
const [isRenameAlertOpen, setIsRenameAlertOpen] = useState<boolean>(false);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const { handleAlertStateToggle } = useAlertRuleStatusToggle({ ruleId });
|
||||
const { handleAlertDuplicate } = useAlertRuleDuplicate({
|
||||
alertDetails: (alertDetails as unknown) as AlertDef,
|
||||
});
|
||||
const { handleAlertDelete } = useAlertRuleDelete({ ruleId: Number(ruleId) });
|
||||
const { handleAlertUpdate, isLoading } = useAlertRuleUpdate({
|
||||
alertDetails: (alertDetails as unknown) as AlertDef,
|
||||
setUpdatedName,
|
||||
intermediateName,
|
||||
});
|
||||
|
||||
const params = useUrlQuery();
|
||||
const handleRename = useCallback(() => {
|
||||
setIsRenameAlertOpen(true);
|
||||
}, []);
|
||||
|
||||
const handleRename = React.useCallback(() => {
|
||||
params.set(QueryParams.ruleId, String(ruleId));
|
||||
history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`);
|
||||
}, [params, ruleId]);
|
||||
const onNameChangeHandler = useCallback(() => {
|
||||
handleAlertUpdate();
|
||||
setIsRenameAlertOpen(false);
|
||||
}, [handleAlertUpdate]);
|
||||
|
||||
const menu: MenuProps['items'] = React.useMemo(
|
||||
() => [
|
||||
{
|
||||
key: 'rename-rule',
|
||||
label: 'Rename',
|
||||
icon: <PenLine size={16} color={Color.BG_VANILLA_400} />,
|
||||
onClick: (): void => handleRename(),
|
||||
style: menuItemStyle,
|
||||
const menuItems: MenuProps['items'] = [
|
||||
{
|
||||
key: 'rename-rule',
|
||||
label: 'Rename',
|
||||
icon: <PenLine size={16} color={Color.BG_VANILLA_400} />,
|
||||
onClick: handleRename,
|
||||
style: menuItemStyle,
|
||||
},
|
||||
{
|
||||
key: 'duplicate-rule',
|
||||
label: 'Duplicate',
|
||||
icon: <Copy size={16} color={Color.BG_VANILLA_400} />,
|
||||
onClick: handleAlertDuplicate,
|
||||
style: menuItemStyle,
|
||||
},
|
||||
{
|
||||
key: 'delete-rule',
|
||||
label: 'Delete',
|
||||
icon: <Trash2 size={16} color={Color.BG_CHERRY_400} />,
|
||||
onClick: handleAlertDelete,
|
||||
style: {
|
||||
...menuItemStyle,
|
||||
color: Color.BG_CHERRY_400,
|
||||
},
|
||||
{
|
||||
key: 'duplicate-rule',
|
||||
label: 'Duplicate',
|
||||
icon: <Copy size={16} color={Color.BG_VANILLA_400} />,
|
||||
onClick: (): void => handleAlertDuplicate(),
|
||||
style: menuItemStyle,
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: 'delete-rule',
|
||||
label: 'Delete',
|
||||
icon: <Trash2 size={16} color={Color.BG_CHERRY_400} />,
|
||||
onClick: (): void => handleAlertDelete(),
|
||||
style: {
|
||||
...menuItemStyle,
|
||||
color: Color.BG_CHERRY_400,
|
||||
},
|
||||
},
|
||||
],
|
||||
[handleAlertDelete, handleAlertDuplicate, handleRename],
|
||||
);
|
||||
const isDarkMode = useIsDarkMode();
|
||||
},
|
||||
];
|
||||
|
||||
// state for immediate UI feedback rather than waiting for onSuccess of handleAlertStateTiggle to updating the alertRuleState
|
||||
const [isAlertRuleDisabled, setIsAlertRuleDisabled] = useState<
|
||||
@@ -95,35 +103,48 @@ function AlertActionButtons({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
useEffect(() => (): void => setAlertRuleState(undefined), []);
|
||||
|
||||
const toggleAlertRule = useCallback(() => {
|
||||
setIsAlertRuleDisabled((prev) => !prev);
|
||||
handleAlertStateToggle();
|
||||
}, [handleAlertStateToggle]);
|
||||
|
||||
return (
|
||||
<div className="alert-action-buttons">
|
||||
<Tooltip title={alertRuleState ? 'Enable alert' : 'Disable alert'}>
|
||||
{isAlertRuleDisabled !== undefined && (
|
||||
<Switch
|
||||
size="small"
|
||||
onChange={(): void => {
|
||||
setIsAlertRuleDisabled((prev) => !prev);
|
||||
handleAlertStateToggle();
|
||||
}}
|
||||
checked={!isAlertRuleDisabled}
|
||||
/>
|
||||
)}
|
||||
</Tooltip>
|
||||
<CopyToClipboard textToCopy={window.location.href} />
|
||||
|
||||
<Divider type="vertical" />
|
||||
|
||||
<Dropdown trigger={['click']} menu={{ items: menu }}>
|
||||
<Tooltip title="More options">
|
||||
<Ellipsis
|
||||
size={16}
|
||||
color={isDarkMode ? Color.BG_VANILLA_400 : Color.BG_INK_400}
|
||||
cursor="pointer"
|
||||
className="dropdown-icon"
|
||||
/>
|
||||
<>
|
||||
<div className="alert-action-buttons">
|
||||
<Tooltip title={alertRuleState ? 'Enable alert' : 'Disable alert'}>
|
||||
{isAlertRuleDisabled !== undefined && (
|
||||
<Switch
|
||||
size="small"
|
||||
onChange={toggleAlertRule}
|
||||
checked={!isAlertRuleDisabled}
|
||||
/>
|
||||
)}
|
||||
</Tooltip>
|
||||
</Dropdown>
|
||||
</div>
|
||||
<CopyToClipboard textToCopy={window.location.href} />
|
||||
|
||||
<Divider type="vertical" />
|
||||
|
||||
<Dropdown trigger={['click']} menu={{ items: menuItems }}>
|
||||
<Tooltip title="More options">
|
||||
<Ellipsis
|
||||
size={16}
|
||||
color={isDarkMode ? Color.BG_VANILLA_400 : Color.BG_INK_400}
|
||||
cursor="pointer"
|
||||
className="dropdown-icon"
|
||||
/>
|
||||
</Tooltip>
|
||||
</Dropdown>
|
||||
</div>
|
||||
|
||||
<RenameModal
|
||||
isOpen={isRenameAlertOpen}
|
||||
setIsOpen={setIsRenameAlertOpen}
|
||||
isLoading={isLoading}
|
||||
onNameChangeHandler={onNameChangeHandler}
|
||||
intermediateName={intermediateName}
|
||||
setIntermediateName={setIntermediateName}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,138 @@
|
||||
.rename-alert {
|
||||
.ant-modal-content {
|
||||
width: 384px;
|
||||
flex-shrink: 0;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-400);
|
||||
box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
|
||||
padding: 0px;
|
||||
|
||||
.ant-modal-header {
|
||||
height: 52px;
|
||||
padding: 16px;
|
||||
background: var(--bg-ink-400);
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
margin-bottom: 0px;
|
||||
.ant-modal-title {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
width: 349px;
|
||||
height: 20px;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-modal-body {
|
||||
padding: 16px;
|
||||
|
||||
.alert-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
.name-text {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px; /* 142.857% */
|
||||
}
|
||||
|
||||
.alert-name-input {
|
||||
display: flex;
|
||||
padding: 6px 6px 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
align-self: stretch;
|
||||
border-radius: 0px 2px 2px 0px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-modal-footer {
|
||||
padding: 16px;
|
||||
margin-top: 0px;
|
||||
.alert-rename {
|
||||
display: flex;
|
||||
flex-direction: row-reverse;
|
||||
gap: 12px;
|
||||
|
||||
.cancel-btn {
|
||||
display: flex;
|
||||
padding: 4px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-radius: 2px;
|
||||
background: var(--bg-slate-500);
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
.rename-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
display: flex;
|
||||
padding: 4px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-radius: 2px;
|
||||
background: var(--bg-robin-500);
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 0px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.rename-alert {
|
||||
.ant-modal-content {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
.ant-modal-header {
|
||||
background: var(--bg-vanilla-100);
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.ant-modal-title {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-modal-body {
|
||||
.alert-content {
|
||||
.name-text {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.alert-name-input {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-modal-footer {
|
||||
.alert-rename {
|
||||
.cancel-btn {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
import './RenameModal.styles.scss';
|
||||
|
||||
import { Button, Input, InputRef, Modal, Typography } from 'antd';
|
||||
import { Check, X } from 'lucide-react';
|
||||
import { useCallback, useEffect, useRef } from 'react';
|
||||
|
||||
type Props = {
|
||||
isOpen: boolean;
|
||||
setIsOpen: (isOpen: boolean) => void;
|
||||
onNameChangeHandler: () => void;
|
||||
isLoading: boolean;
|
||||
intermediateName: string;
|
||||
setIntermediateName: (name: string) => void;
|
||||
};
|
||||
|
||||
function RenameModal({
|
||||
isOpen,
|
||||
setIsOpen,
|
||||
onNameChangeHandler,
|
||||
isLoading,
|
||||
intermediateName,
|
||||
setIntermediateName,
|
||||
}: Props): JSX.Element {
|
||||
const inputRef = useRef<InputRef>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen && inputRef.current) {
|
||||
inputRef.current.focus();
|
||||
}
|
||||
}, [isOpen]);
|
||||
|
||||
const handleClose = useCallback((): void => setIsOpen(false), [setIsOpen]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent): void => {
|
||||
if (isOpen) {
|
||||
if (e.key === 'Enter') {
|
||||
onNameChangeHandler();
|
||||
} else if (e.key === 'Escape') {
|
||||
handleClose();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
|
||||
return (): void => {
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
};
|
||||
}, [isOpen, onNameChangeHandler, handleClose]);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
open={isOpen}
|
||||
title="Rename Alert"
|
||||
onOk={onNameChangeHandler}
|
||||
onCancel={handleClose}
|
||||
rootClassName="rename-alert"
|
||||
footer={
|
||||
<div className="alert-rename">
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<Check size={14} />}
|
||||
className="rename-btn"
|
||||
onClick={onNameChangeHandler}
|
||||
disabled={isLoading}
|
||||
>
|
||||
Rename Alert
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<X size={14} />}
|
||||
className="cancel-btn"
|
||||
onClick={handleClose}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div className="alert-content">
|
||||
<Typography.Text className="name-text">Enter a new name</Typography.Text>
|
||||
<Input
|
||||
ref={inputRef}
|
||||
data-testid="alert-name"
|
||||
className="alert-name-input"
|
||||
value={intermediateName}
|
||||
onChange={(e): void => setIntermediateName(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
|
||||
export default RenameModal;
|
||||
@@ -2,7 +2,7 @@ import './AlertHeader.styles.scss';
|
||||
|
||||
import LineClampedText from 'periscope/components/LineClampedText/LineClampedText';
|
||||
import { useAlertRule } from 'providers/Alert';
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
|
||||
import AlertActionButtons from './ActionButtons/ActionButtons';
|
||||
import AlertLabels from './AlertLabels/AlertLabels';
|
||||
@@ -19,7 +19,9 @@ export type AlertHeaderProps = {
|
||||
};
|
||||
};
|
||||
function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
|
||||
const { state, alert, labels } = alertDetails;
|
||||
const { state, alert: alertName, labels } = alertDetails;
|
||||
const { alertRuleState } = useAlertRule();
|
||||
const [updatedName, setUpdatedName] = useState(alertName);
|
||||
|
||||
const labelsWithoutSeverity = useMemo(
|
||||
() =>
|
||||
@@ -29,8 +31,6 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
|
||||
[labels],
|
||||
);
|
||||
|
||||
const { alertRuleState } = useAlertRule();
|
||||
|
||||
return (
|
||||
<div className="alert-info">
|
||||
<div className="alert-info__info-wrapper">
|
||||
@@ -38,7 +38,7 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
|
||||
<div className="alert-title-wrapper">
|
||||
<AlertState state={alertRuleState ?? state} />
|
||||
<div className="alert-title">
|
||||
<LineClampedText text={alert} />
|
||||
<LineClampedText text={updatedName || alertName} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -54,7 +54,11 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
|
||||
</div>
|
||||
</div>
|
||||
<div className="alert-info__action-buttons">
|
||||
<AlertActionButtons alertDetails={alertDetails} ruleId={alertDetails.id} />
|
||||
<AlertActionButtons
|
||||
alertDetails={alertDetails}
|
||||
ruleId={alertDetails.id}
|
||||
setUpdatedName={setUpdatedName}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -57,8 +57,11 @@ export const useAlertHistoryQueryParams = (): {
|
||||
|
||||
const startTime = params.get(QueryParams.startTime);
|
||||
const endTime = params.get(QueryParams.endTime);
|
||||
const relativeTimeParam = params.get(QueryParams.relativeTime);
|
||||
|
||||
const relativeTime =
|
||||
params.get(QueryParams.relativeTime) ?? RelativeTimeMap['6hr'];
|
||||
(relativeTimeParam === 'null' ? null : relativeTimeParam) ??
|
||||
RelativeTimeMap['6hr'];
|
||||
|
||||
const intStartTime = parseInt(startTime || '0', 10);
|
||||
const intEndTime = parseInt(endTime || '0', 10);
|
||||
@@ -464,6 +467,44 @@ export const useAlertRuleDuplicate = ({
|
||||
|
||||
return { handleAlertDuplicate };
|
||||
};
|
||||
export const useAlertRuleUpdate = ({
|
||||
alertDetails,
|
||||
setUpdatedName,
|
||||
intermediateName,
|
||||
}: {
|
||||
alertDetails: AlertDef;
|
||||
setUpdatedName: (name: string) => void;
|
||||
intermediateName: string;
|
||||
}): {
|
||||
handleAlertUpdate: () => void;
|
||||
isLoading: boolean;
|
||||
} => {
|
||||
const { notifications } = useNotifications();
|
||||
const handleError = useAxiosError();
|
||||
|
||||
const { mutate: updateAlertRule, isLoading } = useMutation(
|
||||
[REACT_QUERY_KEY.UPDATE_ALERT_RULE, alertDetails.id],
|
||||
save,
|
||||
{
|
||||
onMutate: () => setUpdatedName(intermediateName),
|
||||
onSuccess: () =>
|
||||
notifications.success({ message: 'Alert renamed successfully' }),
|
||||
onError: (error) => {
|
||||
setUpdatedName(alertDetails.alert);
|
||||
handleError(error);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const handleAlertUpdate = (): void => {
|
||||
updateAlertRule({
|
||||
data: { ...alertDetails, alert: intermediateName },
|
||||
id: alertDetails.id,
|
||||
});
|
||||
};
|
||||
|
||||
return { handleAlertUpdate, isLoading };
|
||||
};
|
||||
|
||||
export const useAlertRuleDelete = ({
|
||||
ruleId,
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import '../MessagingQueues.styles.scss';
|
||||
|
||||
import { Select, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { ListMinus } from 'lucide-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
@@ -13,8 +16,9 @@ import {
|
||||
MessagingQueuesViewTypeOptions,
|
||||
ProducerLatencyOptions,
|
||||
} from '../MessagingQueuesUtils';
|
||||
import { SelectLabelWithComingSoon } from '../MQCommon/MQCommon';
|
||||
import DropRateView from '../MQDetails/DropRateView/DropRateView';
|
||||
import MessagingQueueOverview from '../MQDetails/MessagingQueueOverview';
|
||||
import MetricPage from '../MQDetails/MetricPage/MetricPage';
|
||||
import MessagingQueuesDetails from '../MQDetails/MQDetails';
|
||||
import MessagingQueuesConfigOptions from '../MQGraph/MQConfigOptions';
|
||||
import MessagingQueuesGraph from '../MQGraph/MQGraph';
|
||||
@@ -33,10 +37,34 @@ function MQDetailPage(): JSX.Element {
|
||||
setproducerLatencyOption,
|
||||
] = useState<ProducerLatencyOptions>(ProducerLatencyOptions.Producers);
|
||||
|
||||
const mqServiceView = useUrlQuery().get(
|
||||
QueryParams.mqServiceView,
|
||||
) as MessagingQueuesViewTypeOptions;
|
||||
|
||||
useEffect(() => {
|
||||
logEvent('Messaging Queues: Detail page visited', {});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (mqServiceView) {
|
||||
setSelectedView(mqServiceView);
|
||||
}
|
||||
}, [mqServiceView]);
|
||||
|
||||
const updateUrlQuery = (query: Record<string, string | number>): void => {
|
||||
const searchParams = new URLSearchParams(history.location.search);
|
||||
Object.keys(query).forEach((key) => {
|
||||
searchParams.set(key, query[key].toString());
|
||||
});
|
||||
history.push({
|
||||
search: searchParams.toString(),
|
||||
});
|
||||
};
|
||||
|
||||
const showMessagingQueueDetails =
|
||||
selectedView !== MessagingQueuesViewType.dropRate.value &&
|
||||
selectedView !== MessagingQueuesViewType.metricPage.value;
|
||||
|
||||
return (
|
||||
<div className="messaging-queue-container">
|
||||
<div className="messaging-breadcrumb">
|
||||
@@ -55,7 +83,11 @@ function MQDetailPage(): JSX.Element {
|
||||
className="messaging-queue-options"
|
||||
defaultValue={MessagingQueuesViewType.consumerLag.value}
|
||||
popupClassName="messaging-queue-options-popup"
|
||||
onChange={(value): void => setSelectedView(value)}
|
||||
onChange={(value): void => {
|
||||
setSelectedView(value);
|
||||
updateUrlQuery({ [QueryParams.mqServiceView]: value });
|
||||
}}
|
||||
value={selectedView}
|
||||
options={[
|
||||
{
|
||||
label: MessagingQueuesViewType.consumerLag.label,
|
||||
@@ -70,37 +102,42 @@ function MQDetailPage(): JSX.Element {
|
||||
value: MessagingQueuesViewType.producerLatency.value,
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<SelectLabelWithComingSoon
|
||||
label={MessagingQueuesViewType.consumerLatency.label}
|
||||
/>
|
||||
),
|
||||
value: MessagingQueuesViewType.consumerLatency.value,
|
||||
disabled: true,
|
||||
label: MessagingQueuesViewType.dropRate.label,
|
||||
value: MessagingQueuesViewType.dropRate.value,
|
||||
},
|
||||
{
|
||||
label: MessagingQueuesViewType.metricPage.label,
|
||||
value: MessagingQueuesViewType.metricPage.value,
|
||||
},
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
<DateTimeSelectionV2 showAutoRefresh={false} hideShareModal />
|
||||
</div>
|
||||
<div className="messaging-queue-main-graph">
|
||||
<MessagingQueuesConfigOptions />
|
||||
{selectedView === MessagingQueuesViewType.consumerLag.value ? (
|
||||
{selectedView === MessagingQueuesViewType.consumerLag.value ? (
|
||||
<div className="messaging-queue-main-graph">
|
||||
<MessagingQueuesConfigOptions />
|
||||
<MessagingQueuesGraph />
|
||||
) : (
|
||||
<MessagingQueueOverview
|
||||
selectedView={selectedView}
|
||||
option={producerLatencyOption}
|
||||
setOption={setproducerLatencyOption}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="messaging-queue-details">
|
||||
<MessagingQueuesDetails
|
||||
</div>
|
||||
) : selectedView === MessagingQueuesViewType.dropRate.value ? (
|
||||
<DropRateView />
|
||||
) : selectedView === MessagingQueuesViewType.metricPage.value ? (
|
||||
<MetricPage />
|
||||
) : (
|
||||
<MessagingQueueOverview
|
||||
selectedView={selectedView}
|
||||
producerLatencyOption={producerLatencyOption}
|
||||
option={producerLatencyOption}
|
||||
setOption={setproducerLatencyOption}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{showMessagingQueueDetails && (
|
||||
<div className="messaging-queue-details">
|
||||
<MessagingQueuesDetails
|
||||
selectedView={selectedView}
|
||||
producerLatencyOption={producerLatencyOption}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
.evaluation-time-selector {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.eval-title {
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 28px;
|
||||
color: var(--bg-vanilla-200);
|
||||
}
|
||||
|
||||
.ant-selector {
|
||||
background-color: var(--bg-ink-400);
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
|
||||
.select-dropdown-render {
|
||||
padding: 8px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
width: 200px;
|
||||
margin: 6px;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.evaluation-time-selector {
|
||||
.eval-title {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.ant-selector {
|
||||
background-color: var(--bg-vanilla-200);
|
||||
border: 1px solid var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import '../MQDetails.style.scss';
|
||||
|
||||
import { Table, Typography } from 'antd';
|
||||
import axios from 'axios';
|
||||
import cx from 'classnames';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { isNumber } from 'lodash-es';
|
||||
import {
|
||||
convertToTitleCase,
|
||||
MessagingQueuesViewType,
|
||||
RowData,
|
||||
} from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { MessagingQueueServicePayload } from '../MQTables/getConsumerLagDetails';
|
||||
import { getKafkaSpanEval } from '../MQTables/getKafkaSpanEval';
|
||||
import {
|
||||
convertToMilliseconds,
|
||||
DropRateAPIResponse,
|
||||
DropRateResponse,
|
||||
} from './dropRateViewUtils';
|
||||
import EvaluationTimeSelector from './EvaluationTimeSelector';
|
||||
|
||||
export function getTableData(data: DropRateResponse[]): RowData[] {
|
||||
if (data?.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const tableData: RowData[] =
|
||||
data?.map(
|
||||
(row: DropRateResponse, index: number): RowData => ({
|
||||
...(row.data as any), // todo-sagar
|
||||
key: index,
|
||||
}),
|
||||
) || [];
|
||||
|
||||
return tableData;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function getColumns(
|
||||
data: DropRateResponse[],
|
||||
visibleCounts: Record<number, number>,
|
||||
handleShowMore: (index: number) => void,
|
||||
): any[] {
|
||||
if (data?.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const columnsOrder = [
|
||||
'producer_service',
|
||||
'consumer_service',
|
||||
'breach_percentage',
|
||||
'top_traceIDs',
|
||||
'breached_spans',
|
||||
'total_spans',
|
||||
];
|
||||
|
||||
const columns: {
|
||||
title: string;
|
||||
dataIndex: string;
|
||||
key: string;
|
||||
}[] = columnsOrder.map((column) => ({
|
||||
title: convertToTitleCase(column),
|
||||
dataIndex: column,
|
||||
key: column,
|
||||
render: (
|
||||
text: string | string[],
|
||||
_record: any,
|
||||
index: number,
|
||||
): JSX.Element => {
|
||||
if (Array.isArray(text)) {
|
||||
const visibleCount = visibleCounts[index] || 4;
|
||||
const visibleItems = text.slice(0, visibleCount);
|
||||
const remainingCount = (text || []).length - visibleCount;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="trace-id-list">
|
||||
{visibleItems.map((item, idx) => {
|
||||
const shouldShowMore = remainingCount > 0 && idx === visibleCount - 1;
|
||||
return (
|
||||
<div key={item} className="traceid-style">
|
||||
<Typography.Text
|
||||
key={item}
|
||||
className="traceid-text"
|
||||
onClick={(): void => {
|
||||
window.open(`${ROUTES.TRACE}/${item}`, '_blank');
|
||||
}}
|
||||
>
|
||||
{item}
|
||||
</Typography.Text>
|
||||
{shouldShowMore && (
|
||||
<Typography
|
||||
onClick={(): void => handleShowMore(index)}
|
||||
className="remaing-count"
|
||||
>
|
||||
+ {remainingCount} more
|
||||
</Typography>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (column === 'consumer_service' || column === 'producer_service') {
|
||||
return (
|
||||
<Typography.Link
|
||||
onClick={(e): void => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
window.open(`/services/${encodeURIComponent(text)}`, '_blank');
|
||||
}}
|
||||
>
|
||||
{text}
|
||||
</Typography.Link>
|
||||
);
|
||||
}
|
||||
|
||||
if (column === 'breach_percentage' && text) {
|
||||
if (!isNumber(text))
|
||||
return <Typography.Text>{text.toString()}</Typography.Text>;
|
||||
return (
|
||||
<Typography.Text>
|
||||
{(typeof text === 'string' ? parseFloat(text) : text).toFixed(2)} %
|
||||
</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
return <Typography.Text>{text}</Typography.Text>;
|
||||
},
|
||||
}));
|
||||
|
||||
return columns;
|
||||
}
|
||||
|
||||
const showPaginationItem = (total: number, range: number[]): JSX.Element => (
|
||||
<>
|
||||
<Typography.Text className="numbers">
|
||||
{range[0]} — {range[1]}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="total"> of {total}</Typography.Text>
|
||||
</>
|
||||
);
|
||||
|
||||
function DropRateView(): JSX.Element {
|
||||
const [columns, setColumns] = useState<any[]>([]);
|
||||
const [tableData, setTableData] = useState<any[]>([]);
|
||||
const { notifications } = useNotifications();
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const [data, setData] = useState<
|
||||
DropRateAPIResponse['data']['result'][0]['list']
|
||||
>([]);
|
||||
const [interval, setInterval] = useState<string>('');
|
||||
|
||||
const [visibleCounts, setVisibleCounts] = useState<Record<number, number>>({});
|
||||
|
||||
const paginationConfig = useMemo(
|
||||
() =>
|
||||
tableData?.length > 10 && {
|
||||
pageSize: 10,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
hideOnSinglePage: true,
|
||||
},
|
||||
[tableData],
|
||||
);
|
||||
|
||||
const evaluationTime = useMemo(() => convertToMilliseconds(interval), [
|
||||
interval,
|
||||
]);
|
||||
const tableApiPayload: MessagingQueueServicePayload = useMemo(
|
||||
() => ({
|
||||
start: minTime,
|
||||
end: maxTime,
|
||||
evalTime: evaluationTime * 1e6,
|
||||
}),
|
||||
[evaluationTime, maxTime, minTime],
|
||||
);
|
||||
|
||||
const handleOnError = (error: Error): void => {
|
||||
notifications.error({
|
||||
message: axios.isAxiosError(error) ? error?.message : SOMETHING_WENT_WRONG,
|
||||
});
|
||||
};
|
||||
|
||||
const handleShowMore = (index: number): void => {
|
||||
setVisibleCounts((prevCounts) => ({
|
||||
...prevCounts,
|
||||
[index]: (prevCounts[index] || 4) + 4,
|
||||
}));
|
||||
};
|
||||
|
||||
const { mutate: getViewDetails, isLoading } = useMutation(getKafkaSpanEval, {
|
||||
onSuccess: (data) => {
|
||||
if (data.payload) {
|
||||
setData(data.payload.result[0].list);
|
||||
}
|
||||
},
|
||||
onError: handleOnError,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.length > 0) {
|
||||
setColumns(getColumns(data, visibleCounts, handleShowMore));
|
||||
setTableData(getTableData(data));
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [data, visibleCounts]);
|
||||
|
||||
useEffect(() => {
|
||||
if (evaluationTime) {
|
||||
getViewDetails(tableApiPayload);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [minTime, maxTime, evaluationTime]);
|
||||
|
||||
return (
|
||||
<div className={cx('mq-overview-container', 'droprate-view')}>
|
||||
<div className="mq-overview-title">
|
||||
{MessagingQueuesViewType.dropRate.label}
|
||||
<EvaluationTimeSelector setInterval={setInterval} />
|
||||
</div>
|
||||
<Table
|
||||
className={cx('mq-table', 'pagination-left')}
|
||||
pagination={paginationConfig}
|
||||
size="middle"
|
||||
columns={columns}
|
||||
dataSource={tableData}
|
||||
bordered={false}
|
||||
loading={isLoading}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DropRateView;
|
||||
@@ -0,0 +1,111 @@
|
||||
import './DropRateView.styles.scss';
|
||||
|
||||
import { Input, Select, Typography } from 'antd';
|
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
interface SelectDropdownRenderProps {
|
||||
menu: React.ReactNode;
|
||||
inputValue: string;
|
||||
handleInputChange: (e: React.ChangeEvent<HTMLInputElement>) => void;
|
||||
handleKeyDown: (e: React.KeyboardEvent<HTMLInputElement>) => void;
|
||||
handleAddCustomValue: () => void;
|
||||
}
|
||||
|
||||
function SelectDropdownRender({
|
||||
menu,
|
||||
inputValue,
|
||||
handleInputChange,
|
||||
handleAddCustomValue,
|
||||
handleKeyDown,
|
||||
}: SelectDropdownRenderProps): JSX.Element {
|
||||
return (
|
||||
<>
|
||||
{menu}
|
||||
<Input
|
||||
placeholder="Enter custom time (ms)"
|
||||
value={inputValue}
|
||||
onChange={handleInputChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onBlur={handleAddCustomValue}
|
||||
className="select-dropdown-render"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
function EvaluationTimeSelector({
|
||||
setInterval,
|
||||
}: {
|
||||
setInterval: Dispatch<SetStateAction<string>>;
|
||||
}): JSX.Element {
|
||||
const [inputValue, setInputValue] = useState<string>('');
|
||||
const [selectedInterval, setSelectedInterval] = useState<string | null>('5ms');
|
||||
const [dropdownOpen, setDropdownOpen] = useState<boolean>(false);
|
||||
|
||||
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
setInputValue(e.target.value);
|
||||
};
|
||||
|
||||
const handleSelectChange = (value: string): void => {
|
||||
setSelectedInterval(value);
|
||||
setInputValue('');
|
||||
setDropdownOpen(false);
|
||||
};
|
||||
|
||||
const handleAddCustomValue = (): void => {
|
||||
setSelectedInterval(inputValue);
|
||||
setInputValue(inputValue);
|
||||
setDropdownOpen(false);
|
||||
};
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>): void => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
handleAddCustomValue();
|
||||
}
|
||||
};
|
||||
|
||||
const renderDropdown = (menu: React.ReactNode): JSX.Element => (
|
||||
<SelectDropdownRender
|
||||
menu={menu}
|
||||
inputValue={inputValue}
|
||||
handleInputChange={handleInputChange}
|
||||
handleAddCustomValue={handleAddCustomValue}
|
||||
handleKeyDown={handleKeyDown}
|
||||
/>
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedInterval) {
|
||||
setInterval(() => selectedInterval);
|
||||
}
|
||||
}, [selectedInterval, setInterval]);
|
||||
|
||||
return (
|
||||
<div className="evaluation-time-selector">
|
||||
<Typography.Text className="eval-title">
|
||||
Evaluation Interval:
|
||||
</Typography.Text>
|
||||
<Select
|
||||
style={{ width: 220 }}
|
||||
placeholder="Select time interval (ms)"
|
||||
value={selectedInterval}
|
||||
onChange={handleSelectChange}
|
||||
open={dropdownOpen}
|
||||
onDropdownVisibleChange={setDropdownOpen}
|
||||
dropdownRender={renderDropdown}
|
||||
>
|
||||
<Option value="1ms">1ms</Option>
|
||||
<Option value="2ms">2ms</Option>
|
||||
<Option value="5ms">5ms</Option>
|
||||
<Option value="10ms">10ms</Option>
|
||||
<Option value="15ms">15ms</Option>
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EvaluationTimeSelector;
|
||||
@@ -0,0 +1,46 @@
|
||||
export function convertToMilliseconds(timeInput: string): number {
|
||||
if (!timeInput.trim()) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const match = timeInput.match(/^(\d+)(ms|s|ns)?$/); // Match number and optional unit
|
||||
if (!match) {
|
||||
throw new Error(`Invalid time format: ${timeInput}`);
|
||||
}
|
||||
|
||||
const value = parseInt(match[1], 10);
|
||||
const unit = match[2] || 'ms'; // Default to 'ms' if no unit is provided
|
||||
|
||||
switch (unit) {
|
||||
case 's':
|
||||
return value * 1e3;
|
||||
case 'ms':
|
||||
return value;
|
||||
case 'ns':
|
||||
return value / 1e6;
|
||||
default:
|
||||
throw new Error('Invalid time format');
|
||||
}
|
||||
}
|
||||
|
||||
export interface DropRateResponse {
|
||||
timestamp: string;
|
||||
data: {
|
||||
breach_percentage: number;
|
||||
breached_spans: number;
|
||||
consumer_service: string;
|
||||
producer_service: string;
|
||||
top_traceIDs: string[];
|
||||
total_spans: number;
|
||||
};
|
||||
}
|
||||
export interface DropRateAPIResponse {
|
||||
status: string;
|
||||
data: {
|
||||
resultType: string;
|
||||
result: {
|
||||
queryName: string;
|
||||
list: DropRateResponse[];
|
||||
}[];
|
||||
};
|
||||
}
|
||||
@@ -17,6 +17,11 @@
|
||||
background: var(--bg-ink-500);
|
||||
|
||||
.mq-overview-title {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
|
||||
color: var(--bg-vanilla-200);
|
||||
|
||||
font-family: Inter;
|
||||
@@ -43,3 +48,133 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.droprate-view {
|
||||
.mq-table {
|
||||
width: 100%;
|
||||
|
||||
.ant-table-content {
|
||||
border-radius: 6px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
box-shadow: 0px 4px 12px 0px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.ant-table-tbody {
|
||||
.ant-table-cell {
|
||||
max-width: 250px;
|
||||
border-bottom: none;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead {
|
||||
.ant-table-cell {
|
||||
background-color: var(--bg-ink-500);
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.trace-id-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
width: max-content;
|
||||
|
||||
.traceid-style {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
|
||||
.traceid-text {
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-slate-400);
|
||||
padding: 2px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.remaing-count {
|
||||
cursor: pointer;
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: normal;
|
||||
letter-spacing: -0.06px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.pagination-left {
|
||||
&.mq-table {
|
||||
.ant-pagination {
|
||||
justify-content: flex-start;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.mq-overview-container {
|
||||
background: var(--bg-vanilla-200);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.mq-overview-title {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.mq-details-options {
|
||||
.ant-radio-button-wrapper {
|
||||
border-color: var(--bg-vanilla-300);
|
||||
color: var(--bg-slate-200);
|
||||
}
|
||||
.ant-radio-button-wrapper-checked {
|
||||
color: var(--bg-slate-200);
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
.ant-radio-button-wrapper-disabled {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.droprate-view {
|
||||
.mq-table {
|
||||
.ant-table-content {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.ant-table-tbody {
|
||||
.ant-table-cell {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-thead {
|
||||
.ant-table-cell {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.no-data-style {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
|
||||
.trace-id-list {
|
||||
.traceid-style {
|
||||
.traceid-text {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.remaing-count {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ import {
|
||||
ProducerLatencyOptions,
|
||||
SelectedTimelineQuery,
|
||||
} from '../MessagingQueuesUtils';
|
||||
import { ComingSoon } from '../MQCommon/MQCommon';
|
||||
import MessagingQueuesTable from './MQTables/MQTables';
|
||||
|
||||
const MQServiceDetailTypePerView = (
|
||||
@@ -28,7 +27,6 @@ const MQServiceDetailTypePerView = (
|
||||
MessagingQueueServiceDetailType.ConsumerDetails,
|
||||
MessagingQueueServiceDetailType.ProducerDetails,
|
||||
MessagingQueueServiceDetailType.NetworkLatency,
|
||||
MessagingQueueServiceDetailType.PartitionHostMetrics,
|
||||
],
|
||||
[MessagingQueuesViewType.partitionLatency.value]: [
|
||||
MessagingQueueServiceDetailType.ConsumerDetails,
|
||||
@@ -62,22 +60,8 @@ function MessagingQueuesOptions({
|
||||
const detailTypes =
|
||||
MQServiceDetailTypePerView(producerLatencyOption)[selectedView] || [];
|
||||
return detailTypes.map((detailType) => (
|
||||
<Radio.Button
|
||||
key={detailType}
|
||||
value={detailType}
|
||||
disabled={
|
||||
detailType === MessagingQueueServiceDetailType.PartitionHostMetrics
|
||||
}
|
||||
className={
|
||||
detailType === MessagingQueueServiceDetailType.PartitionHostMetrics
|
||||
? 'disabled-option'
|
||||
: ''
|
||||
}
|
||||
>
|
||||
<Radio.Button key={detailType} value={detailType}>
|
||||
{ConsumerLagDetailTitle[detailType]}
|
||||
{detailType === MessagingQueueServiceDetailType.PartitionHostMetrics && (
|
||||
<ComingSoon />
|
||||
)}
|
||||
</Radio.Button>
|
||||
));
|
||||
};
|
||||
@@ -116,12 +100,7 @@ const checkValidityOfDetailConfigs = (
|
||||
return false;
|
||||
}
|
||||
|
||||
if (currentTab === MessagingQueueServiceDetailType.ConsumerDetails) {
|
||||
return Boolean(configDetails?.topic && configDetails?.partition);
|
||||
}
|
||||
return Boolean(
|
||||
configDetails?.group && configDetails?.topic && configDetails?.partition,
|
||||
);
|
||||
return Boolean(configDetails?.topic && configDetails?.partition);
|
||||
}
|
||||
|
||||
if (selectedView === MessagingQueuesViewType.producerLatency.value) {
|
||||
@@ -139,7 +118,7 @@ const checkValidityOfDetailConfigs = (
|
||||
return Boolean(configDetails?.topic && configDetails?.service_name);
|
||||
}
|
||||
|
||||
return false;
|
||||
return selectedView === MessagingQueuesViewType.dropRate.value;
|
||||
};
|
||||
|
||||
function MessagingQueuesDetails({
|
||||
@@ -213,14 +192,14 @@ function MessagingQueuesDetails({
|
||||
<MessagingQueuesTable
|
||||
currentTab={currentTab}
|
||||
selectedView={selectedView}
|
||||
tableApi={serviceConfigDetails[selectedView].tableApi}
|
||||
tableApi={serviceConfigDetails[selectedView]?.tableApi}
|
||||
validConfigPresent={checkValidityOfDetailConfigs(
|
||||
timelineQueryData,
|
||||
selectedView,
|
||||
currentTab,
|
||||
configDetailQueryData,
|
||||
)}
|
||||
tableApiPayload={serviceConfigDetails[selectedView].tableApiPayload}
|
||||
tableApiPayload={serviceConfigDetails[selectedView]?.tableApiPayload}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
/* eslint-disable react/require-default-props */
|
||||
import './MQTables.styles.scss';
|
||||
|
||||
@@ -32,6 +33,8 @@ import {
|
||||
MessagingQueuesPayloadProps,
|
||||
} from './getConsumerLagDetails';
|
||||
|
||||
const INITIAL_PAGE_SIZE = 10;
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function getColumns(
|
||||
data: MessagingQueuesPayloadProps['payload'],
|
||||
@@ -154,8 +157,8 @@ function MessagingQueuesTable({
|
||||
|
||||
const paginationConfig = useMemo(
|
||||
() =>
|
||||
tableData?.length > 20 && {
|
||||
pageSize: 20,
|
||||
tableData?.length > INITIAL_PAGE_SIZE && {
|
||||
pageSize: INITIAL_PAGE_SIZE,
|
||||
showTotal: showPaginationItem,
|
||||
showSizeChanger: false,
|
||||
hideOnSinglePage: true,
|
||||
@@ -169,15 +172,18 @@ function MessagingQueuesTable({
|
||||
});
|
||||
};
|
||||
|
||||
const { mutate: getViewDetails, isLoading } = useMutation(tableApi, {
|
||||
onSuccess: (data) => {
|
||||
if (data.payload) {
|
||||
setColumns(getColumns(data?.payload, history));
|
||||
setTableData(getTableData(data?.payload));
|
||||
}
|
||||
const { mutate: getViewDetails, isLoading, error, isError } = useMutation(
|
||||
tableApi,
|
||||
{
|
||||
onSuccess: (data) => {
|
||||
if (data.payload) {
|
||||
setColumns(getColumns(data?.payload, history));
|
||||
setTableData(getTableData(data?.payload));
|
||||
}
|
||||
},
|
||||
onError: handleConsumerDetailsOnError,
|
||||
},
|
||||
onError: handleConsumerDetailsOnError,
|
||||
});
|
||||
);
|
||||
|
||||
useEffect(
|
||||
() => {
|
||||
@@ -230,6 +236,10 @@ function MessagingQueuesTable({
|
||||
</Typography.Text>
|
||||
<Skeleton />
|
||||
</div>
|
||||
) : isError ? (
|
||||
<div className="no-data-style">
|
||||
<Typography.Text>{error?.message || SOMETHING_WENT_WRONG}</Typography.Text>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
{currentTab && (
|
||||
@@ -241,7 +251,7 @@ function MessagingQueuesTable({
|
||||
<Table
|
||||
className={cx(
|
||||
'mq-table',
|
||||
type !== 'Detail' ? 'mq-overview-row-clickable' : '',
|
||||
type !== 'Detail' ? 'mq-overview-row-clickable' : 'pagination-left',
|
||||
)}
|
||||
pagination={paginationConfig}
|
||||
size="middle"
|
||||
|
||||
@@ -1,20 +1,18 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { MessagingQueueServiceDetailType } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
export interface MessagingQueueServicePayload {
|
||||
start?: number | string;
|
||||
end?: number | string;
|
||||
variables: {
|
||||
variables?: {
|
||||
partition?: string;
|
||||
topic?: string;
|
||||
consumer_group?: string;
|
||||
service_name?: string;
|
||||
};
|
||||
detailType?: MessagingQueueServiceDetailType | 'producer' | 'consumer';
|
||||
evalTime?: number;
|
||||
}
|
||||
|
||||
export interface MessagingQueuesPayloadProps {
|
||||
@@ -42,21 +40,17 @@ export const getConsumerLagDetails = async (
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
const { detailType, ...restProps } = props;
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/consumer-lag/${props.detailType}`,
|
||||
{
|
||||
...restProps,
|
||||
},
|
||||
);
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/consumer-lag/${props.detailType}`,
|
||||
{
|
||||
...restProps,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG);
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import { DropRateAPIResponse } from '../DropRateView/dropRateViewUtils';
|
||||
import { MessagingQueueServicePayload } from './getConsumerLagDetails';
|
||||
|
||||
export const getKafkaSpanEval = async (
|
||||
props: Omit<MessagingQueueServicePayload, 'detailType' | 'variables'>,
|
||||
): Promise<SuccessResponse<DropRateAPIResponse['data']> | ErrorResponse> => {
|
||||
const { start, end, evalTime } = props;
|
||||
const response = await axios.post(`messaging-queues/kafka/span/evaluation`, {
|
||||
start,
|
||||
end,
|
||||
eval_time: evalTime,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
@@ -1,7 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { MessagingQueueServiceDetailType } from 'pages/MessagingQueues/MessagingQueuesUtils';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
@@ -22,18 +19,15 @@ export const getPartitionLatencyDetails = async (
|
||||
} else {
|
||||
endpoint = `/messaging-queues/kafka/consumer-lag/producer-details`;
|
||||
}
|
||||
try {
|
||||
const response = await axios.post(endpoint, {
|
||||
...rest,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG);
|
||||
}
|
||||
const response = await axios.post(endpoint, {
|
||||
...rest,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
@@ -14,21 +11,17 @@ export const getPartitionLatencyOverview = async (
|
||||
): Promise<
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/partition-latency/overview`,
|
||||
{
|
||||
...props,
|
||||
},
|
||||
);
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/partition-latency/overview`,
|
||||
{
|
||||
...props,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG);
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
@@ -16,18 +13,14 @@ export const getTopicThroughputDetails = async (
|
||||
> => {
|
||||
const { detailType, ...rest } = props;
|
||||
const endpoint = `/messaging-queues/kafka/topic-throughput/${detailType}`;
|
||||
try {
|
||||
const response = await axios.post(endpoint, {
|
||||
...rest,
|
||||
});
|
||||
const response = await axios.post(endpoint, {
|
||||
...rest,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG);
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
|
||||
import {
|
||||
@@ -15,23 +12,18 @@ export const getTopicThroughputOverview = async (
|
||||
SuccessResponse<MessagingQueuesPayloadProps['payload']> | ErrorResponse
|
||||
> => {
|
||||
const { detailType, start, end } = props;
|
||||
console.log(detailType);
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`messaging-queues/kafka/topic-throughput/${detailType}`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
},
|
||||
);
|
||||
const response = await axios.post(
|
||||
`messaging-queues/kafka/topic-throughput/${detailType}`,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler((error as AxiosError) || SOMETHING_WENT_WRONG);
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
ProducerLatencyOptions,
|
||||
} from '../MessagingQueuesUtils';
|
||||
import { MessagingQueueServicePayload } from './MQTables/getConsumerLagDetails';
|
||||
import { getKafkaSpanEval } from './MQTables/getKafkaSpanEval';
|
||||
import { getPartitionLatencyOverview } from './MQTables/getPartitionLatencyOverview';
|
||||
import { getTopicThroughputOverview } from './MQTables/getTopicThroughputOverview';
|
||||
import MessagingQueuesTable from './MQTables/MQTables';
|
||||
@@ -51,6 +52,9 @@ const getTableApi = (selectedView: MessagingQueuesViewTypeOptions): any => {
|
||||
if (selectedView === MessagingQueuesViewType.producerLatency.value) {
|
||||
return getTopicThroughputOverview;
|
||||
}
|
||||
if (selectedView === MessagingQueuesViewType.dropRate.value) {
|
||||
return getKafkaSpanEval;
|
||||
}
|
||||
return getPartitionLatencyOverview;
|
||||
};
|
||||
|
||||
@@ -78,6 +82,10 @@ function MessagingQueueOverview({
|
||||
? 'producer'
|
||||
: 'consumer'
|
||||
: undefined,
|
||||
evalTime:
|
||||
selectedView === MessagingQueuesViewType.dropRate.value
|
||||
? 2363404
|
||||
: undefined,
|
||||
};
|
||||
|
||||
return (
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
import { Typography } from 'antd';
|
||||
import { CardContainer } from 'container/GridCardLayout/styles';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import MetricPageGridGraph from './MetricPageGraph';
|
||||
import {
|
||||
averageRequestLatencyWidgetData,
|
||||
brokerCountWidgetData,
|
||||
brokerNetworkThroughputWidgetData,
|
||||
bytesConsumedWidgetData,
|
||||
consumerFetchRateWidgetData,
|
||||
consumerGroupMemberWidgetData,
|
||||
consumerLagByGroupWidgetData,
|
||||
consumerOffsetWidgetData,
|
||||
ioWaitTimeWidgetData,
|
||||
kafkaProducerByteRateWidgetData,
|
||||
messagesConsumedWidgetData,
|
||||
producerFetchRequestPurgatoryWidgetData,
|
||||
requestResponseWidgetData,
|
||||
requestTimesWidgetData,
|
||||
} from './MetricPageUtil';
|
||||
|
||||
interface MetricSectionProps {
|
||||
title: string;
|
||||
description: string;
|
||||
graphCount: Widgets[];
|
||||
}
|
||||
|
||||
function MetricSection({
|
||||
title,
|
||||
description,
|
||||
graphCount,
|
||||
}: MetricSectionProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
return (
|
||||
<div className="metric-column-graph">
|
||||
<CardContainer className="row-card" isDarkMode={isDarkMode}>
|
||||
<div className="row-panel">
|
||||
<Typography.Text className="section-title">{title}</Typography.Text>
|
||||
</div>
|
||||
</CardContainer>
|
||||
<Typography.Text className="graph-description">
|
||||
{description}
|
||||
</Typography.Text>
|
||||
<div className="metric-page-grid">
|
||||
{graphCount.map((widgetData) => (
|
||||
<MetricPageGridGraph
|
||||
key={`graph-${widgetData.id}`}
|
||||
widgetData={widgetData}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function MetricColumnGraphs(): JSX.Element {
|
||||
const { t } = useTranslation('messagingQueues');
|
||||
|
||||
const metricsData = [
|
||||
{
|
||||
title: t('metricGraphCategory.brokerMetrics.title'),
|
||||
description: t('metricGraphCategory.brokerMetrics.description'),
|
||||
graphCount: [
|
||||
brokerCountWidgetData,
|
||||
requestTimesWidgetData,
|
||||
producerFetchRequestPurgatoryWidgetData,
|
||||
brokerNetworkThroughputWidgetData,
|
||||
],
|
||||
id: 'broker-metrics',
|
||||
},
|
||||
{
|
||||
title: t('metricGraphCategory.producerMetrics.title'),
|
||||
description: t('metricGraphCategory.producerMetrics.description'),
|
||||
graphCount: [
|
||||
ioWaitTimeWidgetData,
|
||||
requestResponseWidgetData,
|
||||
averageRequestLatencyWidgetData,
|
||||
kafkaProducerByteRateWidgetData,
|
||||
bytesConsumedWidgetData,
|
||||
],
|
||||
id: 'producer-metrics',
|
||||
},
|
||||
{
|
||||
title: t('metricGraphCategory.consumerMetrics.title'),
|
||||
description: t('metricGraphCategory.consumerMetrics.description'),
|
||||
graphCount: [
|
||||
consumerOffsetWidgetData,
|
||||
consumerGroupMemberWidgetData,
|
||||
consumerLagByGroupWidgetData,
|
||||
consumerFetchRateWidgetData,
|
||||
messagesConsumedWidgetData,
|
||||
],
|
||||
id: 'consumer-metrics',
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="metric-column-graph-container">
|
||||
{metricsData.map((metric) => (
|
||||
<MetricSection
|
||||
key={metric.id}
|
||||
title={metric.title}
|
||||
description={metric.description}
|
||||
graphCount={metric?.graphCount || []}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MetricColumnGraphs;
|
||||
@@ -0,0 +1,128 @@
|
||||
.metric-page {
|
||||
padding: 20px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 32px;
|
||||
|
||||
.metric-page-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
.row-panel {
|
||||
padding-left: 10px;
|
||||
}
|
||||
|
||||
.metric-page-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
align-items: flex-start;
|
||||
gap: 10px;
|
||||
|
||||
.metric-graph {
|
||||
height: 320px;
|
||||
padding: 10px;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.metric-page-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
|
||||
.graph-description {
|
||||
padding: 16px 10px 16px 10px;
|
||||
}
|
||||
}
|
||||
|
||||
.row-panel {
|
||||
border-radius: 4px;
|
||||
background: rgba(18, 19, 23, 0.4);
|
||||
padding: 8px;
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
height: 48px !important;
|
||||
|
||||
.ant-typography {
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.row-panel-section {
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
|
||||
.row-icon {
|
||||
color: var(--bg-vanilla-400);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.section-title {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metric-column-graph-container {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr 1fr;
|
||||
gap: 10px;
|
||||
|
||||
.metric-column-graph {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
|
||||
.row-panel {
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.metric-page-grid {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
gap: 10px;
|
||||
|
||||
.metric-graph {
|
||||
height: 320px;
|
||||
padding: 10px;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.metric-column-graph-container {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.metric-page {
|
||||
.row-panel {
|
||||
.row-panel-section {
|
||||
.row-icon {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.section-title {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,134 @@
|
||||
import './MetricPage.styles.scss';
|
||||
|
||||
import { Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { CardContainer } from 'container/GridCardLayout/styles';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import MetricColumnGraphs from './MetricColumnGraphs';
|
||||
import MetricPageGridGraph from './MetricPageGraph';
|
||||
import {
|
||||
cpuRecentUtilizationWidgetData,
|
||||
currentOffsetPartitionWidgetData,
|
||||
insyncReplicasWidgetData,
|
||||
jvmGcCollectionsElapsedWidgetData,
|
||||
jvmGCCountWidgetData,
|
||||
jvmMemoryHeapWidgetData,
|
||||
oldestOffsetWidgetData,
|
||||
partitionCountPerTopicWidgetData,
|
||||
} from './MetricPageUtil';
|
||||
|
||||
interface CollapsibleMetricSectionProps {
|
||||
title: string;
|
||||
description: string;
|
||||
graphCount: Widgets[];
|
||||
isCollapsed: boolean;
|
||||
onToggle: () => void;
|
||||
}
|
||||
|
||||
function CollapsibleMetricSection({
|
||||
title,
|
||||
description,
|
||||
graphCount,
|
||||
isCollapsed,
|
||||
onToggle,
|
||||
}: CollapsibleMetricSectionProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
return (
|
||||
<div className="metric-page-container">
|
||||
<CardContainer className="row-card" isDarkMode={isDarkMode}>
|
||||
<div className={cx('row-panel')}>
|
||||
<div className="row-panel-section">
|
||||
<Typography.Text className="section-title">{title}</Typography.Text>
|
||||
{isCollapsed ? (
|
||||
<ChevronDown size={14} onClick={onToggle} className="row-icon" />
|
||||
) : (
|
||||
<ChevronUp size={14} onClick={onToggle} className="row-icon" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</CardContainer>
|
||||
{!isCollapsed && (
|
||||
<>
|
||||
<Typography.Text className="graph-description">
|
||||
{description}
|
||||
</Typography.Text>
|
||||
<div className="metric-page-grid">
|
||||
{graphCount.map((widgetData) => (
|
||||
<MetricPageGridGraph
|
||||
key={`graph-${widgetData.id}`}
|
||||
widgetData={widgetData}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function MetricPage(): JSX.Element {
|
||||
const [collapsedSections, setCollapsedSections] = useState<{
|
||||
[key: string]: boolean;
|
||||
}>({
|
||||
producerMetrics: false,
|
||||
consumerMetrics: false,
|
||||
});
|
||||
|
||||
const toggleCollapse = (key: string): void => {
|
||||
setCollapsedSections((prev) => ({
|
||||
...prev,
|
||||
[key]: !prev[key],
|
||||
}));
|
||||
};
|
||||
|
||||
const { t } = useTranslation('messagingQueues');
|
||||
|
||||
const metricSections = [
|
||||
{
|
||||
key: 'bokerJVMMetrics',
|
||||
title: t('metricGraphCategory.brokerJVMMetrics.title'),
|
||||
description: t('metricGraphCategory.brokerJVMMetrics.description'),
|
||||
graphCount: [
|
||||
jvmGCCountWidgetData,
|
||||
jvmGcCollectionsElapsedWidgetData,
|
||||
cpuRecentUtilizationWidgetData,
|
||||
jvmMemoryHeapWidgetData,
|
||||
],
|
||||
},
|
||||
{
|
||||
key: 'partitionMetrics',
|
||||
title: t('metricGraphCategory.partitionMetrics.title'),
|
||||
description: t('metricGraphCategory.partitionMetrics.description'),
|
||||
graphCount: [
|
||||
partitionCountPerTopicWidgetData,
|
||||
currentOffsetPartitionWidgetData,
|
||||
oldestOffsetWidgetData,
|
||||
insyncReplicasWidgetData,
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="metric-page">
|
||||
<MetricColumnGraphs />
|
||||
{metricSections.map(({ key, title, description, graphCount }) => (
|
||||
<CollapsibleMetricSection
|
||||
key={key}
|
||||
title={title}
|
||||
description={description}
|
||||
graphCount={graphCount}
|
||||
isCollapsed={collapsedSections[key]}
|
||||
onToggle={(): void => toggleCollapse(key)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MetricPage;
|
||||
@@ -0,0 +1,59 @@
|
||||
import './MetricPage.styles.scss';
|
||||
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { ViewMenuAction } from 'container/GridCardLayout/config';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Card } from 'container/GridCardLayout/styles';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
function MetricPageGridGraph({
|
||||
widgetData,
|
||||
}: {
|
||||
widgetData: Widgets;
|
||||
}): JSX.Element {
|
||||
const history = useHistory();
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const onDragSelect = useCallback(
|
||||
(start: number, end: number) => {
|
||||
const startTimestamp = Math.trunc(start);
|
||||
const endTimestamp = Math.trunc(end);
|
||||
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
const generatedUrl = `${pathname}?${urlQuery.toString()}`;
|
||||
history.push(generatedUrl);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
},
|
||||
[dispatch, history, pathname, urlQuery],
|
||||
);
|
||||
|
||||
return (
|
||||
<Card
|
||||
isDarkMode={isDarkMode}
|
||||
$panelType={PANEL_TYPES.TIME_SERIES}
|
||||
className="metric-graph"
|
||||
>
|
||||
<GridCard
|
||||
widget={widgetData}
|
||||
headerMenuList={[...ViewMenuAction]}
|
||||
onDragSelect={onDragSelect}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
export default MetricPageGridGraph;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,270 @@
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import './MessagingQueueHealthCheck.styles.scss';
|
||||
|
||||
import { CaretDownOutlined, LoadingOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
Modal,
|
||||
Select,
|
||||
Spin,
|
||||
Tooltip,
|
||||
Tree,
|
||||
TreeDataNode,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { OnboardingStatusResponse } from 'api/messagingQueues/onboarding/getOnboardingStatus';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { History } from 'history';
|
||||
import { Bolt, Check, OctagonAlert, X } from 'lucide-react';
|
||||
import { ReactNode, useEffect, useState } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { isCloudUser } from 'utils/app';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import {
|
||||
KAFKA_SETUP_DOC_LINK,
|
||||
MessagingQueueHealthCheckService,
|
||||
} from '../MessagingQueuesUtils';
|
||||
|
||||
interface AttributeCheckListProps {
|
||||
visible: boolean;
|
||||
onClose: () => void;
|
||||
onboardingStatusResponses: {
|
||||
title: string;
|
||||
data: OnboardingStatusResponse['data'];
|
||||
errorMsg?: string;
|
||||
}[];
|
||||
loading: boolean;
|
||||
}
|
||||
|
||||
export enum AttributesFilters {
|
||||
ALL = 'all',
|
||||
SUCCESS = 'success',
|
||||
ERROR = 'error',
|
||||
}
|
||||
|
||||
function ErrorTitleAndKey({
|
||||
title,
|
||||
parentTitle,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
errorMsg,
|
||||
isLeaf,
|
||||
}: {
|
||||
title: string;
|
||||
parentTitle: string;
|
||||
isCloudUserVal: boolean;
|
||||
history: History<unknown>;
|
||||
errorMsg?: string;
|
||||
isLeaf?: boolean;
|
||||
}): TreeDataNode {
|
||||
const handleRedirection = (): void => {
|
||||
let link = '';
|
||||
|
||||
switch (parentTitle) {
|
||||
case 'Consumers':
|
||||
link = `${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Consumers}`;
|
||||
break;
|
||||
case 'Producers':
|
||||
link = `${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Producers}`;
|
||||
break;
|
||||
case 'Kafka':
|
||||
link = `${ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Kafka}`;
|
||||
break;
|
||||
default:
|
||||
link = '';
|
||||
}
|
||||
|
||||
if (isCloudUserVal && !!link) {
|
||||
history.push(link);
|
||||
} else {
|
||||
window.open(KAFKA_SETUP_DOC_LINK, '_blank');
|
||||
}
|
||||
};
|
||||
return {
|
||||
key: `${title}-key-${uuid()}`,
|
||||
title: (
|
||||
<div className="attribute-error-title">
|
||||
<Typography.Text className="tree-text" ellipsis={{ tooltip: title }}>
|
||||
{title}
|
||||
</Typography.Text>
|
||||
<Tooltip title={errorMsg}>
|
||||
<div
|
||||
className="attribute-error-warning"
|
||||
onClick={(e): void => {
|
||||
e.preventDefault();
|
||||
handleRedirection();
|
||||
}}
|
||||
>
|
||||
<OctagonAlert size={14} />
|
||||
Fix
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
),
|
||||
isLeaf,
|
||||
};
|
||||
}
|
||||
|
||||
function AttributeLabels({ title }: { title: ReactNode }): JSX.Element {
|
||||
return (
|
||||
<div className="attribute-label">
|
||||
<Bolt size={14} />
|
||||
{title}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function treeTitleAndKey({
|
||||
title,
|
||||
isLeaf,
|
||||
}: {
|
||||
title: string;
|
||||
isLeaf?: boolean;
|
||||
}): TreeDataNode {
|
||||
return {
|
||||
key: `${title}-key-${uuid()}`,
|
||||
title: (
|
||||
<div className="attribute-success-title">
|
||||
<Typography.Text className="tree-text" ellipsis={{ tooltip: title }}>
|
||||
{title}
|
||||
</Typography.Text>
|
||||
{isLeaf && (
|
||||
<div className="success-attribute-icon">
|
||||
<Tooltip title="Success">
|
||||
<Check size={14} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
isLeaf,
|
||||
};
|
||||
}
|
||||
|
||||
function generateTreeDataNodes(
|
||||
response: OnboardingStatusResponse['data'],
|
||||
parentTitle: string,
|
||||
isCloudUserVal: boolean,
|
||||
history: History<unknown>,
|
||||
): TreeDataNode[] {
|
||||
return response
|
||||
.map((item) => {
|
||||
if (item.attribute) {
|
||||
if (item.status === '1') {
|
||||
return treeTitleAndKey({ title: item.attribute, isLeaf: true });
|
||||
}
|
||||
if (item.status === '0') {
|
||||
return ErrorTitleAndKey({
|
||||
title: item.attribute,
|
||||
errorMsg: item.error_message || '',
|
||||
parentTitle,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
});
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter(Boolean) as TreeDataNode[];
|
||||
}
|
||||
|
||||
function AttributeCheckList({
|
||||
visible,
|
||||
onClose,
|
||||
onboardingStatusResponses,
|
||||
loading,
|
||||
}: AttributeCheckListProps): JSX.Element {
|
||||
const [filter, setFilter] = useState<AttributesFilters>(AttributesFilters.ALL);
|
||||
const [treeData, setTreeData] = useState<TreeDataNode[]>([]);
|
||||
|
||||
const handleFilterChange = (value: AttributesFilters): void => {
|
||||
setFilter(value);
|
||||
};
|
||||
const isCloudUserVal = isCloudUser();
|
||||
const history = useHistory();
|
||||
|
||||
useEffect(() => {
|
||||
const filteredData = onboardingStatusResponses.map((response) => {
|
||||
if (response.errorMsg) {
|
||||
return ErrorTitleAndKey({
|
||||
title: response.title,
|
||||
errorMsg: response.errorMsg,
|
||||
isLeaf: true,
|
||||
parentTitle: response.title,
|
||||
history,
|
||||
isCloudUserVal,
|
||||
});
|
||||
}
|
||||
let filteredData = response.data;
|
||||
|
||||
if (filter === AttributesFilters.SUCCESS) {
|
||||
filteredData = response.data.filter((item) => item.status === '1');
|
||||
} else if (filter === AttributesFilters.ERROR) {
|
||||
filteredData = response.data.filter((item) => item.status === '0');
|
||||
}
|
||||
|
||||
return {
|
||||
...treeTitleAndKey({ title: response.title }),
|
||||
children: generateTreeDataNodes(
|
||||
filteredData,
|
||||
response.title,
|
||||
isCloudUserVal,
|
||||
history,
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
setTreeData(filteredData);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [filter, onboardingStatusResponses]);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title="Kafka Service Attributes"
|
||||
open={visible}
|
||||
onCancel={onClose}
|
||||
footer={false}
|
||||
className="mq-health-check-modal"
|
||||
closeIcon={<X size={14} />}
|
||||
>
|
||||
{loading ? (
|
||||
<div className="loader-container">
|
||||
<Spin indicator={<LoadingOutlined spin />} size="large" />
|
||||
</div>
|
||||
) : (
|
||||
<div className="modal-content">
|
||||
<Select
|
||||
defaultValue={AttributesFilters.ALL}
|
||||
className="attribute-select"
|
||||
onChange={handleFilterChange}
|
||||
options={[
|
||||
{
|
||||
value: AttributesFilters.ALL,
|
||||
label: AttributeLabels({ title: 'Attributes: All' }),
|
||||
},
|
||||
{
|
||||
value: AttributesFilters.SUCCESS,
|
||||
label: AttributeLabels({ title: 'Attributes: Success' }),
|
||||
},
|
||||
{
|
||||
value: AttributesFilters.ERROR,
|
||||
label: AttributeLabels({ title: 'Attributes: Error' }),
|
||||
},
|
||||
]}
|
||||
/>
|
||||
<Tree
|
||||
showLine
|
||||
switcherIcon={<CaretDownOutlined />}
|
||||
treeData={treeData}
|
||||
height={450}
|
||||
className="attribute-tree"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
|
||||
export default AttributeCheckList;
|
||||
@@ -0,0 +1,242 @@
|
||||
.mq-health-check-modal {
|
||||
.ant-modal-content {
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-400);
|
||||
box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
|
||||
|
||||
.ant-modal-close {
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.ant-modal-header {
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
background: var(--bg-ink-400);
|
||||
margin-bottom: 16px;
|
||||
padding-bottom: 4px;
|
||||
|
||||
.ant-modal-title {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-family: Inter;
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 22px;
|
||||
letter-spacing: 0.52px;
|
||||
}
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
padding: 8px;
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
.attribute-select {
|
||||
align-items: center;
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
width: 170px;
|
||||
|
||||
.ant-select-selector {
|
||||
display: flex;
|
||||
height: 28px !important;
|
||||
padding: 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-tree {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
.tree-text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: 'Geist Mono';
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
width: 328px;
|
||||
}
|
||||
|
||||
.ant-tree {
|
||||
.ant-tree-title {
|
||||
cursor: default;
|
||||
|
||||
.attribute-error-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-amber-400);
|
||||
height: 24px;
|
||||
|
||||
.tree-text {
|
||||
color: var(--bg-amber-400);
|
||||
}
|
||||
|
||||
.attribute-error-warning {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
|
||||
font-family: 'Geist Mono';
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-success-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 24px;
|
||||
|
||||
.success-attribute-icon {
|
||||
width: 44px;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
|
||||
> svg {
|
||||
margin-left: auto;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-tree-treenode {
|
||||
width: 100%;
|
||||
|
||||
.ant-tree-node-content-wrapper {
|
||||
width: 100%;
|
||||
max-width: 380px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.loader-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
padding: 8px;
|
||||
background: var(--bg-ink-300);
|
||||
height: 156px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.config-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 28px;
|
||||
border-radius: 2px;
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
background: var(--bg-slate-500);
|
||||
|
||||
&.missing-config-btn {
|
||||
background: rgba(255, 205, 86, 0.1);
|
||||
color: var(--bg-amber-400);
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-amber-300) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.config-btn-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-right: 8px;
|
||||
border-right: 1px solid rgba(255, 215, 120, 0.1);
|
||||
padding-right: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.mq-health-check-modal {
|
||||
.ant-modal-content {
|
||||
border: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-200);
|
||||
|
||||
.ant-modal-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-200);
|
||||
|
||||
.ant-modal-title {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
.attribute-select {
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-200);
|
||||
}
|
||||
}
|
||||
|
||||
.tree-text {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.ant-tree {
|
||||
.ant-tree-title {
|
||||
.attribute-error-title {
|
||||
color: var(--bg-amber-500);
|
||||
|
||||
.tree-text {
|
||||
color: var(--bg-amber-500);
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-success-title {
|
||||
.success-attribute-icon {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.loader-container {
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.config-btn {
|
||||
background: var(--bg-vanilla-300);
|
||||
|
||||
&.missing-config-btn {
|
||||
background: var(--bg-amber-100);
|
||||
color: var(--bg-amber-500);
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-amber-600) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.missing-config-btn {
|
||||
.config-btn-content {
|
||||
border-right: 1px solid var(--bg-amber-600);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import './MessagingQueueHealthCheck.styles.scss';
|
||||
|
||||
import { Button } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useOnboardingStatus } from 'hooks/messagingQueue / onboarding/useOnboardingStatus';
|
||||
import { Bolt, FolderTree } from 'lucide-react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
|
||||
import { MessagingQueueHealthCheckService } from '../MessagingQueuesUtils';
|
||||
import AttributeCheckList from './AttributeCheckList';
|
||||
|
||||
interface MessagingQueueHealthCheckProps {
|
||||
serviceToInclude: string[];
|
||||
}
|
||||
|
||||
function MessagingQueueHealthCheck({
|
||||
serviceToInclude,
|
||||
}: MessagingQueueHealthCheckProps): JSX.Element {
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [checkListOpen, setCheckListOpen] = useState(false);
|
||||
|
||||
// Consumer Data
|
||||
const {
|
||||
data: consumerData,
|
||||
error: consumerError,
|
||||
isFetching: consumerLoading,
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: !!serviceToInclude.filter(
|
||||
(service) => service === MessagingQueueHealthCheckService.Consumers,
|
||||
).length,
|
||||
},
|
||||
MessagingQueueHealthCheckService.Consumers,
|
||||
);
|
||||
|
||||
// Producer Data
|
||||
const {
|
||||
data: producerData,
|
||||
error: producerError,
|
||||
isFetching: producerLoading,
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: !!serviceToInclude.filter(
|
||||
(service) => service === MessagingQueueHealthCheckService.Producers,
|
||||
).length,
|
||||
},
|
||||
MessagingQueueHealthCheckService.Producers,
|
||||
);
|
||||
|
||||
// Kafka Data
|
||||
const {
|
||||
data: kafkaData,
|
||||
error: kafkaError,
|
||||
isFetching: kafkaLoading,
|
||||
} = useOnboardingStatus(
|
||||
{
|
||||
enabled: !!serviceToInclude.filter(
|
||||
(service) => service === MessagingQueueHealthCheckService.Kafka,
|
||||
).length,
|
||||
},
|
||||
MessagingQueueHealthCheckService.Kafka,
|
||||
);
|
||||
|
||||
// combined loading and update state
|
||||
useEffect(() => {
|
||||
setLoading(consumerLoading || producerLoading || kafkaLoading);
|
||||
}, [consumerLoading, producerLoading, kafkaLoading]);
|
||||
|
||||
const missingConfiguration = useMemo(() => {
|
||||
const consumerMissing =
|
||||
(serviceToInclude.includes(MessagingQueueHealthCheckService.Consumers) &&
|
||||
consumerData?.payload?.data?.filter((item) => item.status === '0')
|
||||
.length) ||
|
||||
0;
|
||||
const producerMissing =
|
||||
(serviceToInclude.includes(MessagingQueueHealthCheckService.Producers) &&
|
||||
producerData?.payload?.data?.filter((item) => item.status === '0')
|
||||
.length) ||
|
||||
0;
|
||||
const kafkaMissing =
|
||||
(serviceToInclude.includes(MessagingQueueHealthCheckService.Kafka) &&
|
||||
kafkaData?.payload?.data?.filter((item) => item.status === '0').length) ||
|
||||
0;
|
||||
|
||||
return consumerMissing + producerMissing + kafkaMissing;
|
||||
}, [consumerData, producerData, kafkaData, serviceToInclude]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Button
|
||||
onClick={(): void => setCheckListOpen(true)}
|
||||
loading={loading}
|
||||
className={cx(
|
||||
'config-btn',
|
||||
missingConfiguration ? 'missing-config-btn' : '',
|
||||
)}
|
||||
icon={<Bolt size={12} />}
|
||||
>
|
||||
<div className="config-btn-content">
|
||||
{missingConfiguration
|
||||
? `Missing Configuration (${missingConfiguration})`
|
||||
: 'Configuration'}
|
||||
</div>
|
||||
<FolderTree size={14} />
|
||||
</Button>
|
||||
<AttributeCheckList
|
||||
visible={checkListOpen}
|
||||
onClose={(): void => setCheckListOpen(false)}
|
||||
onboardingStatusResponses={[
|
||||
{
|
||||
title: 'Consumers',
|
||||
data: consumerData?.payload?.data || [],
|
||||
errorMsg: (consumerError || consumerData?.error) as string,
|
||||
},
|
||||
{
|
||||
title: 'Producers',
|
||||
data: producerData?.payload?.data || [],
|
||||
errorMsg: (producerError || producerData?.error) as string,
|
||||
},
|
||||
{
|
||||
title: 'Kafka',
|
||||
data: kafkaData?.payload?.data || [],
|
||||
errorMsg: (kafkaError || kafkaData?.error) as string,
|
||||
},
|
||||
].filter((item) => serviceToInclude.includes(item.title.toLowerCase()))}
|
||||
loading={loading}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MessagingQueueHealthCheck;
|
||||
@@ -47,7 +47,7 @@
|
||||
|
||||
.header-config {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
gap: 12px;
|
||||
align-items: center;
|
||||
|
||||
.messaging-queue-options {
|
||||
@@ -221,6 +221,29 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
:nth-child(2),
|
||||
:nth-child(4) {
|
||||
border-left: none !important;
|
||||
border-right: none !important;
|
||||
}
|
||||
|
||||
&.summary-section {
|
||||
.overview-info-card {
|
||||
min-height: 144px;
|
||||
|
||||
.card-title {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 13px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 22px;
|
||||
letter-spacing: 0.52px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.summary-section {
|
||||
@@ -314,6 +337,10 @@
|
||||
.messaging-breadcrumb {
|
||||
color: var(--bg-ink-400);
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.message-queue-text {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
.messaging-header {
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
@@ -1,48 +1,38 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import './MessagingQueues.styles.scss';
|
||||
|
||||
import { ExclamationCircleFilled } from '@ant-design/icons';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Modal } from 'antd';
|
||||
import { Button } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { Calendar, ListMinus } from 'lucide-react';
|
||||
import { ListMinus } from 'lucide-react';
|
||||
import { useEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { isCloudUser } from 'utils/app';
|
||||
|
||||
import MessagingQueueHealthCheck from './MessagingQueueHealthCheck/MessagingQueueHealthCheck';
|
||||
import {
|
||||
KAFKA_SETUP_DOC_LINK,
|
||||
MessagingQueueHealthCheckService,
|
||||
MessagingQueuesViewType,
|
||||
} from './MessagingQueuesUtils';
|
||||
import { ComingSoon } from './MQCommon/MQCommon';
|
||||
|
||||
function MessagingQueues(): JSX.Element {
|
||||
const history = useHistory();
|
||||
const { t } = useTranslation('messagingQueuesKafkaOverview');
|
||||
|
||||
const { confirm } = Modal;
|
||||
|
||||
const showConfirm = (): void => {
|
||||
const redirectToDetailsPage = (callerView?: string): void => {
|
||||
logEvent('Messaging Queues: View details clicked', {
|
||||
page: 'Messaging Queues Overview',
|
||||
source: 'Consumer Latency view',
|
||||
source: callerView,
|
||||
});
|
||||
|
||||
confirm({
|
||||
icon: <ExclamationCircleFilled />,
|
||||
content: t('confirmModal.content'),
|
||||
className: 'overview-confirm-modal',
|
||||
onOk() {
|
||||
logEvent('Messaging Queues: Proceed button clicked', {
|
||||
page: 'Messaging Queues Overview',
|
||||
});
|
||||
history.push(ROUTES.MESSAGING_QUEUES_DETAIL);
|
||||
},
|
||||
okText: t('confirmModal.okText'),
|
||||
});
|
||||
history.push(
|
||||
`${ROUTES.MESSAGING_QUEUES_DETAIL}?${QueryParams.mqServiceView}=${callerView}`,
|
||||
);
|
||||
};
|
||||
|
||||
const isCloudUserVal = isCloudUser();
|
||||
@@ -70,7 +60,16 @@ function MessagingQueues(): JSX.Element {
|
||||
{t('breadcrumb')}
|
||||
</div>
|
||||
<div className="messaging-header">
|
||||
<div className="header-config">{t('header')}</div>
|
||||
<div className="header-config">
|
||||
{t('header')} /
|
||||
<MessagingQueueHealthCheck
|
||||
serviceToInclude={[
|
||||
MessagingQueueHealthCheckService.Consumers,
|
||||
MessagingQueueHealthCheckService.Producers,
|
||||
MessagingQueueHealthCheckService.Kafka,
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
<DateTimeSelectionV2 showAutoRefresh={false} hideShareModal />
|
||||
</div>
|
||||
<div className="messaging-overview">
|
||||
@@ -87,7 +86,7 @@ function MessagingQueues(): JSX.Element {
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
getStartedRedirect(
|
||||
`${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=consumer`,
|
||||
`${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Consumers}`,
|
||||
'Configure Consumer',
|
||||
)
|
||||
}
|
||||
@@ -106,7 +105,7 @@ function MessagingQueues(): JSX.Element {
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
getStartedRedirect(
|
||||
`${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=producer`,
|
||||
`${ROUTES.GET_STARTED_APPLICATION_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Producers}`,
|
||||
'Configure Producer',
|
||||
)
|
||||
}
|
||||
@@ -125,7 +124,7 @@ function MessagingQueues(): JSX.Element {
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
getStartedRedirect(
|
||||
`${ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=kafka`,
|
||||
`${ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING}?${QueryParams.getStartedSource}=kafka&${QueryParams.getStartedSourceService}=${MessagingQueueHealthCheckService.Kafka}`,
|
||||
'Monitor kafka',
|
||||
)
|
||||
}
|
||||
@@ -135,55 +134,98 @@ function MessagingQueues(): JSX.Element {
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-section">
|
||||
<div className="summary-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.consumerLag.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
|
||||
<p className="overview-text">{t('overviewSummarySection.title')}</p>
|
||||
<p className="overview-subtext">{t('overviewSummarySection.subtitle')}</p>
|
||||
<div className={cx('overview-doc-area', 'summary-section')}>
|
||||
<div className="overview-info-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.consumer.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.consumer.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<Button type="primary" onClick={showConfirm}>
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.consumerLag.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-card coming-soon-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.partitionLatency.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
<div className="overview-info-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.producer.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.producer.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<ComingSoon />
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.producerLatency.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-card coming-soon-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.producerLatency.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
<div className="overview-info-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.partition.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.partition.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<ComingSoon />
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.partitionLatency.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="summary-card coming-soon-card">
|
||||
<div className="summary-title">
|
||||
<p>{MessagingQueuesViewType.consumerLatency.label}</p>
|
||||
<div className="time-value">
|
||||
<Calendar size={14} color={Color.BG_SLATE_200} />
|
||||
<p className="time-value">1D</p>
|
||||
</div>
|
||||
<div className="overview-info-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.dropRate.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.dropRate.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="view-detail-btn">
|
||||
<ComingSoon />
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.dropRate.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="overview-info-card">
|
||||
<div>
|
||||
<p className="card-title">{t('summarySection.metricPage.title')}</p>
|
||||
<p className="card-info-text">
|
||||
{t('summarySection.metricPage.description')}
|
||||
</p>
|
||||
</div>
|
||||
<div className="button-grp">
|
||||
<Button
|
||||
type="default"
|
||||
onClick={(): void =>
|
||||
redirectToDetailsPage(MessagingQueuesViewType.metricPage.value)
|
||||
}
|
||||
>
|
||||
{t('summarySection.viewDetailsButton')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -222,7 +222,8 @@ export enum MessagingQueuesViewTypeOptions {
|
||||
ConsumerLag = 'consumerLag',
|
||||
PartitionLatency = 'partitionLatency',
|
||||
ProducerLatency = 'producerLatency',
|
||||
ConsumerLatency = 'consumerLatency',
|
||||
DropRate = 'dropRate',
|
||||
MetricPage = 'metricPage',
|
||||
}
|
||||
|
||||
export const MessagingQueuesViewType = {
|
||||
@@ -238,9 +239,13 @@ export const MessagingQueuesViewType = {
|
||||
label: 'Producer Latency view',
|
||||
value: MessagingQueuesViewTypeOptions.ProducerLatency,
|
||||
},
|
||||
consumerLatency: {
|
||||
label: 'Consumer latency view',
|
||||
value: MessagingQueuesViewTypeOptions.ConsumerLatency,
|
||||
dropRate: {
|
||||
label: 'Drop Rate view',
|
||||
value: MessagingQueuesViewTypeOptions.DropRate,
|
||||
},
|
||||
metricPage: {
|
||||
label: 'Metric view',
|
||||
value: MessagingQueuesViewTypeOptions.MetricPage,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -381,3 +386,9 @@ export const getAttributeDataFromOnboardingStatus = (
|
||||
attributeDataWithError,
|
||||
};
|
||||
};
|
||||
|
||||
export enum MessagingQueueHealthCheckService {
|
||||
Consumers = 'consumers',
|
||||
Producers = 'producers',
|
||||
Kafka = 'kafka',
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ export const defaultSeasonality = 'hourly';
|
||||
export interface AlertDef {
|
||||
id?: number;
|
||||
alertType?: string;
|
||||
alert?: string;
|
||||
alert: string;
|
||||
ruleType?: string;
|
||||
frequency?: string;
|
||||
condition: RuleCondition;
|
||||
|
||||
@@ -9,6 +9,7 @@ export interface ILog {
|
||||
severityNumber: number;
|
||||
body: string;
|
||||
resources_string: Record<string, never>;
|
||||
scope_string: Record<string, never>;
|
||||
attributesString: Record<string, never>;
|
||||
attributes_string: Record<string, never>;
|
||||
attributesInt: Record<string, never>;
|
||||
@@ -22,6 +23,7 @@ type OmitAttributesResources = Pick<
|
||||
Exclude<
|
||||
keyof ILog,
|
||||
| 'resources_string'
|
||||
| 'scope_string'
|
||||
| 'attributesString'
|
||||
| 'attributes_string'
|
||||
| 'attributesInt'
|
||||
@@ -32,4 +34,5 @@ type OmitAttributesResources = Pick<
|
||||
export type ILogAggregateAttributesResources = OmitAttributesResources & {
|
||||
attributes: Record<string, never>;
|
||||
resources: Record<string, never>;
|
||||
scope: Record<string, never>;
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,7 +22,6 @@ const (
|
||||
defaultTraceDB string = "signoz_traces"
|
||||
defaultOperationsTable string = "distributed_signoz_operations"
|
||||
defaultIndexTable string = "distributed_signoz_index_v2"
|
||||
defaultLocalIndexTable string = "signoz_index_v2"
|
||||
defaultErrorTable string = "distributed_signoz_error_index_v2"
|
||||
defaultDurationTable string = "distributed_durationSort"
|
||||
defaultUsageExplorerTable string = "distributed_usage_explorer"
|
||||
@@ -46,11 +45,6 @@ const (
|
||||
defaultLogsTableV2 string = "distributed_logs_v2"
|
||||
defaultLogsResourceLocalTableV2 string = "logs_v2_resource"
|
||||
defaultLogsResourceTableV2 string = "distributed_logs_v2_resource"
|
||||
|
||||
defaultTraceIndexTableV3 string = "distributed_signoz_index_v3"
|
||||
defaultTraceLocalTableName string = "signoz_index_v3"
|
||||
defaultTraceResourceTableV3 string = "distributed_traces_v3_resource"
|
||||
defaultTraceSummaryTable string = "distributed_trace_summary"
|
||||
)
|
||||
|
||||
// NamespaceConfig is Clickhouse's internal configuration data
|
||||
@@ -64,7 +58,6 @@ type namespaceConfig struct {
|
||||
TraceDB string
|
||||
OperationsTable string
|
||||
IndexTable string
|
||||
LocalIndexTable string
|
||||
DurationTable string
|
||||
UsageExplorerTable string
|
||||
SpansTable string
|
||||
@@ -89,11 +82,6 @@ type namespaceConfig struct {
|
||||
LogsTableV2 string
|
||||
LogsResourceLocalTableV2 string
|
||||
LogsResourceTableV2 string
|
||||
|
||||
TraceIndexTableV3 string
|
||||
TraceLocalTableNameV3 string
|
||||
TraceResourceTableV3 string
|
||||
TraceSummaryTable string
|
||||
}
|
||||
|
||||
// Connecto defines how to connect to the database
|
||||
@@ -162,7 +150,6 @@ func NewOptions(
|
||||
TraceDB: defaultTraceDB,
|
||||
OperationsTable: defaultOperationsTable,
|
||||
IndexTable: defaultIndexTable,
|
||||
LocalIndexTable: defaultLocalIndexTable,
|
||||
ErrorTable: defaultErrorTable,
|
||||
DurationTable: defaultDurationTable,
|
||||
UsageExplorerTable: defaultUsageExplorerTable,
|
||||
@@ -187,11 +174,6 @@ func NewOptions(
|
||||
LogsLocalTableV2: defaultLogsLocalTableV2,
|
||||
LogsResourceTableV2: defaultLogsResourceTableV2,
|
||||
LogsResourceLocalTableV2: defaultLogsResourceLocalTableV2,
|
||||
|
||||
TraceIndexTableV3: defaultTraceIndexTableV3,
|
||||
TraceLocalTableNameV3: defaultTraceLocalTableName,
|
||||
TraceResourceTableV3: defaultTraceResourceTableV3,
|
||||
TraceSummaryTable: defaultTraceSummaryTable,
|
||||
},
|
||||
others: make(map[string]*namespaceConfig, len(otherNamespaces)),
|
||||
}
|
||||
|
||||
@@ -145,16 +145,9 @@ type ClickHouseReader struct {
|
||||
liveTailRefreshSeconds int
|
||||
cluster string
|
||||
|
||||
useLogsNewSchema bool
|
||||
useTraceNewSchema bool
|
||||
|
||||
useLogsNewSchema bool
|
||||
logsTableName string
|
||||
logsLocalTableName string
|
||||
|
||||
traceTableName string
|
||||
traceLocalTableName string
|
||||
traceResourceTableV3 string
|
||||
traceSummaryTable string
|
||||
}
|
||||
|
||||
// NewTraceReader returns a TraceReader for the database
|
||||
@@ -167,7 +160,6 @@ func NewReader(
|
||||
dialTimeout time.Duration,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
) *ClickHouseReader {
|
||||
|
||||
datasource := os.Getenv("ClickHouseUrl")
|
||||
@@ -178,7 +170,7 @@ func NewReader(
|
||||
zap.L().Fatal("failed to initialize ClickHouse", zap.Error(err))
|
||||
}
|
||||
|
||||
return NewReaderFromClickhouseConnection(db, options, localDB, configFile, featureFlag, cluster, useLogsNewSchema, useTraceNewSchema)
|
||||
return NewReaderFromClickhouseConnection(db, options, localDB, configFile, featureFlag, cluster, useLogsNewSchema)
|
||||
}
|
||||
|
||||
func NewReaderFromClickhouseConnection(
|
||||
@@ -189,7 +181,6 @@ func NewReaderFromClickhouseConnection(
|
||||
featureFlag interfaces.FeatureLookup,
|
||||
cluster string,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
) *ClickHouseReader {
|
||||
alertManager, err := am.New()
|
||||
if err != nil {
|
||||
@@ -227,13 +218,6 @@ func NewReaderFromClickhouseConnection(
|
||||
logsLocalTableName = options.primary.LogsLocalTableV2
|
||||
}
|
||||
|
||||
traceTableName := options.primary.IndexTable
|
||||
traceLocalTableName := options.primary.LocalIndexTable
|
||||
if useTraceNewSchema {
|
||||
traceTableName = options.primary.TraceIndexTableV3
|
||||
traceLocalTableName = options.primary.TraceLocalTableNameV3
|
||||
}
|
||||
|
||||
return &ClickHouseReader{
|
||||
db: wrap,
|
||||
localDB: localDB,
|
||||
@@ -261,8 +245,7 @@ func NewReaderFromClickhouseConnection(
|
||||
cluster: cluster,
|
||||
queryProgressTracker: queryprogress.NewQueryProgressTracker(),
|
||||
|
||||
useLogsNewSchema: useLogsNewSchema,
|
||||
useTraceNewSchema: useTraceNewSchema,
|
||||
useLogsNewSchema: useLogsNewSchema,
|
||||
|
||||
logsTableV2: options.primary.LogsTableV2,
|
||||
logsLocalTableV2: options.primary.LogsLocalTableV2,
|
||||
@@ -270,11 +253,6 @@ func NewReaderFromClickhouseConnection(
|
||||
logsResourceLocalTableV2: options.primary.LogsResourceLocalTableV2,
|
||||
logsTableName: logsTableName,
|
||||
logsLocalTableName: logsLocalTableName,
|
||||
|
||||
traceLocalTableName: traceLocalTableName,
|
||||
traceTableName: traceTableName,
|
||||
traceResourceTableV3: options.primary.TraceResourceTableV3,
|
||||
traceSummaryTable: options.primary.TraceSummaryTable,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -485,8 +463,9 @@ func (r *ClickHouseReader) GetQueryRangeResult(ctx context.Context, query *model
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, error) {
|
||||
|
||||
services := []string{}
|
||||
query := fmt.Sprintf(`SELECT DISTINCT serviceName FROM %s.%s WHERE ts_bucket_start > (toUnixTimestamp(now()) - 1800) AND toDate(timestamp) > now() - INTERVAL 1 DAY`, r.TraceDB, r.traceLocalTableName)
|
||||
query := fmt.Sprintf(`SELECT DISTINCT serviceName FROM %s.%s WHERE toDate(timestamp) > now() - INTERVAL 1 DAY`, r.TraceDB, r.indexTable)
|
||||
|
||||
rows, err := r.db.Query(ctx, query)
|
||||
|
||||
@@ -595,14 +574,14 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
|
||||
count(*) as numCalls
|
||||
FROM %s.%s
|
||||
WHERE serviceName = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end`,
|
||||
r.TraceDB, r.traceTableName,
|
||||
r.TraceDB, r.indexTable,
|
||||
)
|
||||
errorQuery := fmt.Sprintf(
|
||||
`SELECT
|
||||
count(*) as numErrors
|
||||
FROM %s.%s
|
||||
WHERE serviceName = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
|
||||
r.TraceDB, r.traceTableName,
|
||||
r.TraceDB, r.indexTable,
|
||||
)
|
||||
|
||||
args := []interface{}{}
|
||||
@@ -612,17 +591,6 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
|
||||
clickhouse.Named("serviceName", svc),
|
||||
clickhouse.Named("names", ops),
|
||||
)
|
||||
|
||||
if r.useTraceNewSchema {
|
||||
bFilter := " AND ts_bucket_start >= @start_bucket AND ts_bucket_start <= @end_bucket"
|
||||
query += bFilter
|
||||
errorQuery += bFilter
|
||||
args = append(args,
|
||||
clickhouse.Named("start_bucket", strconv.FormatInt(queryParams.Start.Unix()-1800, 10)),
|
||||
clickhouse.Named("end_bucket", strconv.FormatInt(queryParams.End.Unix(), 10)),
|
||||
)
|
||||
}
|
||||
|
||||
// create TagQuery from TagQueryParams
|
||||
tags := createTagQueryFromTagQueryParams(queryParams.Tags)
|
||||
subQuery, argsSubQuery, errStatus := buildQueryWithTagParams(ctx, tags)
|
||||
@@ -705,7 +673,7 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *
|
||||
count(*) as numCalls
|
||||
FROM %s.%s
|
||||
WHERE serviceName = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end`,
|
||||
r.TraceDB, r.traceTableName,
|
||||
r.TraceDB, r.indexTable,
|
||||
)
|
||||
args := []interface{}{}
|
||||
args = append(args, namedArgs...)
|
||||
@@ -736,7 +704,7 @@ func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *
|
||||
count(*) as numErrors
|
||||
FROM %s.%s
|
||||
WHERE serviceName = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
|
||||
r.TraceDB, r.traceTableName,
|
||||
r.TraceDB, r.indexTable,
|
||||
)
|
||||
args = []interface{}{}
|
||||
args = append(args, namedArgs...)
|
||||
@@ -873,7 +841,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
case constants.TraceID:
|
||||
continue
|
||||
case constants.ServiceName:
|
||||
finalQuery := fmt.Sprintf("SELECT serviceName, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT serviceName, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY serviceName"
|
||||
var dBResponse []model.DBResponseServiceName
|
||||
@@ -890,7 +858,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
case constants.HttpRoute:
|
||||
finalQuery := fmt.Sprintf("SELECT httpRoute, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT httpRoute, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY httpRoute"
|
||||
var dBResponse []model.DBResponseHttpRoute
|
||||
@@ -907,7 +875,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
case constants.HttpUrl:
|
||||
finalQuery := fmt.Sprintf("SELECT httpUrl, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT httpUrl, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY httpUrl"
|
||||
var dBResponse []model.DBResponseHttpUrl
|
||||
@@ -924,7 +892,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
case constants.HttpMethod:
|
||||
finalQuery := fmt.Sprintf("SELECT httpMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT httpMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY httpMethod"
|
||||
var dBResponse []model.DBResponseHttpMethod
|
||||
@@ -941,7 +909,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
case constants.HttpHost:
|
||||
finalQuery := fmt.Sprintf("SELECT httpHost, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT httpHost, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY httpHost"
|
||||
var dBResponse []model.DBResponseHttpHost
|
||||
@@ -958,7 +926,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
case constants.OperationRequest:
|
||||
finalQuery := fmt.Sprintf("SELECT name, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT name, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY name"
|
||||
var dBResponse []model.DBResponseOperation
|
||||
@@ -975,7 +943,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
case constants.Status:
|
||||
finalQuery := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = true", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = true", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
var dBResponse []model.DBResponseTotal
|
||||
err := r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||
@@ -986,7 +954,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query: %s", err)}
|
||||
}
|
||||
|
||||
finalQuery2 := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = false", r.TraceDB, r.traceTableName)
|
||||
finalQuery2 := fmt.Sprintf("SELECT COUNT(*) as numTotal FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU AND hasError = false", r.TraceDB, r.indexTable)
|
||||
finalQuery2 += query
|
||||
var dBResponse2 []model.DBResponseTotal
|
||||
err = r.db.Select(ctx, &dBResponse2, finalQuery2, args...)
|
||||
@@ -1011,7 +979,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
finalQuery := ""
|
||||
if !durationSortEnabled {
|
||||
// if duration sort is not enabled, we need to get the min and max duration from the index table
|
||||
finalQuery = fmt.Sprintf("SELECT min(durationNano) as min, max(durationNano) as max FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery = fmt.Sprintf("SELECT min(durationNano) as min, max(durationNano) as max FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
var dBResponse []model.DBResponseMinMax
|
||||
err = r.db.Select(ctx, &dBResponse, finalQuery, args...)
|
||||
@@ -1056,7 +1024,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
}
|
||||
case constants.RPCMethod:
|
||||
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT rpcMethod, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY rpcMethod"
|
||||
var dBResponse []model.DBResponseRPCMethod
|
||||
@@ -1074,7 +1042,7 @@ func (r *ClickHouseReader) GetSpanFilters(ctx context.Context, queryParams *mode
|
||||
}
|
||||
|
||||
case constants.ResponseStatusCode:
|
||||
finalQuery := fmt.Sprintf("SELECT responseStatusCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf("SELECT responseStatusCode, count() as count FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " GROUP BY responseStatusCode"
|
||||
var dBResponse []model.DBResponseStatusCodeMethod
|
||||
@@ -1122,7 +1090,7 @@ func getStatusFilters(query string, statusParams []string, excludeMap map[string
|
||||
|
||||
func (r *ClickHouseReader) GetFilteredSpans(ctx context.Context, queryParams *model.GetFilteredSpansParams) (*model.GetFilterSpansResponse, *model.ApiError) {
|
||||
|
||||
queryTable := fmt.Sprintf("%s.%s", r.TraceDB, r.traceTableName)
|
||||
queryTable := fmt.Sprintf("%s.%s", r.TraceDB, r.indexTable)
|
||||
|
||||
excludeMap := make(map[string]struct{})
|
||||
for _, e := range queryParams.Exclude {
|
||||
@@ -1468,8 +1436,8 @@ func (r *ClickHouseReader) GetTagFilters(ctx context.Context, queryParams *model
|
||||
|
||||
tagFilters := []model.TagFilters{}
|
||||
|
||||
// Alternative finalQuery := fmt.Sprintf(`SELECT DISTINCT arrayJoin(tagMap.keys) as tagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, indexTable)
|
||||
finalQuery := fmt.Sprintf(`SELECT groupUniqArrayArray(mapKeys(stringTagMap)) as stringTagKeys, groupUniqArrayArray(mapKeys(numberTagMap)) as numberTagKeys, groupUniqArrayArray(mapKeys(boolTagMap)) as boolTagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.traceTableName)
|
||||
// Alternative finalQuery := fmt.Sprintf(`SELECT DISTINCT arrayJoin(tagMap.keys) as tagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
|
||||
finalQuery := fmt.Sprintf(`SELECT groupUniqArrayArray(mapKeys(stringTagMap)) as stringTagKeys, groupUniqArrayArray(mapKeys(numberTagMap)) as numberTagKeys, groupUniqArrayArray(mapKeys(boolTagMap)) as boolTagKeys FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
err := r.db.Select(ctx, &tagFilters, finalQuery, args...)
|
||||
|
||||
@@ -1580,7 +1548,7 @@ func (r *ClickHouseReader) GetTagValues(ctx context.Context, queryParams *model.
|
||||
|
||||
tagValues := []model.TagValues{}
|
||||
|
||||
finalQuery := fmt.Sprintf(`SELECT groupArray(DISTINCT attributes_string[@key]) as stringTagValues FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.traceTableName)
|
||||
finalQuery := fmt.Sprintf(`SELECT groupArray(DISTINCT stringTagMap[@key]) as stringTagValues FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU`, r.TraceDB, r.indexTable)
|
||||
finalQuery += query
|
||||
finalQuery += " LIMIT @limit"
|
||||
|
||||
@@ -1631,7 +1599,7 @@ func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *mo
|
||||
name
|
||||
FROM %s.%s
|
||||
WHERE serviceName = @serviceName AND timestamp>= @start AND timestamp<= @end`,
|
||||
r.TraceDB, r.traceTableName,
|
||||
r.TraceDB, r.indexTable,
|
||||
)
|
||||
args := []interface{}{}
|
||||
args = append(args, namedArgs...)
|
||||
@@ -1698,137 +1666,10 @@ func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetU
|
||||
return &usageItems, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) SearchTracesV2(ctx context.Context, params *model.SearchTracesParams) (*[]model.SearchSpansResult, error) {
|
||||
searchSpansResult := []model.SearchSpansResult{
|
||||
{
|
||||
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
|
||||
IsSubTree: false,
|
||||
Events: make([][]interface{}, 0),
|
||||
},
|
||||
}
|
||||
|
||||
var traceSummary model.TraceSummary
|
||||
summaryQuery := fmt.Sprintf("SELECT * from %s.%s WHERE traceID=$1", r.TraceDB, r.traceSummaryTable)
|
||||
err := r.db.QueryRow(ctx, summaryQuery, params.TraceID).Scan(&traceSummary.TraceID, &traceSummary.FirstReported, &traceSummary.LastReported, &traceSummary.NumSpans)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return &searchSpansResult, nil
|
||||
}
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
|
||||
if traceSummary.NumSpans > uint64(params.MaxSpansInTrace) {
|
||||
zap.L().Error("Max spans allowed in a trace limit reached", zap.Int("MaxSpansInTrace", params.MaxSpansInTrace),
|
||||
zap.Uint64("Count", traceSummary.NumSpans))
|
||||
userEmail, err := auth.GetEmailFromJwt(ctx)
|
||||
if err == nil {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": traceSummary.NumSpans,
|
||||
"maxSpansInTraceLimit": params.MaxSpansInTrace,
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_MAX_SPANS_ALLOWED_LIMIT_REACHED, data, userEmail, true, false)
|
||||
}
|
||||
return nil, fmt.Errorf("max spans allowed in trace limit reached, please contact support for more details")
|
||||
}
|
||||
|
||||
userEmail, err := auth.GetEmailFromJwt(ctx)
|
||||
if err == nil {
|
||||
data := map[string]interface{}{
|
||||
"traceSize": traceSummary.NumSpans,
|
||||
}
|
||||
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_TRACE_DETAIL_API, data, userEmail, true, false)
|
||||
}
|
||||
|
||||
var startTime, endTime, durationNano uint64
|
||||
var searchScanResponses []model.SearchSpanResponseItemV2
|
||||
|
||||
query := fmt.Sprintf("SELECT timestamp, durationNano, spanID, traceID, hasError, kind, serviceName, name, references, attributes_string, events, statusMessage, statusCodeString, spanKind FROM %s.%s WHERE traceID=$1 and ts_bucket_start>=$2 and ts_bucket_start<=$3", r.TraceDB, r.traceTableName)
|
||||
|
||||
start := time.Now()
|
||||
|
||||
err = r.db.Select(ctx, &searchScanResponses, query, params.TraceID, strconv.FormatInt(traceSummary.FirstReported.Unix()-1800, 10), strconv.FormatInt(traceSummary.LastReported.Unix(), 10))
|
||||
|
||||
zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error in processing sql query")
|
||||
}
|
||||
end := time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery took: ", zap.Duration("duration", end.Sub(start)))
|
||||
|
||||
searchSpansResult[0].Events = make([][]interface{}, len(searchScanResponses))
|
||||
|
||||
searchSpanResponses := []model.SearchSpanResponseItem{}
|
||||
start = time.Now()
|
||||
for _, item := range searchScanResponses {
|
||||
ref := []model.OtelSpanRef{}
|
||||
err := json.Unmarshal([]byte(item.References), &ref)
|
||||
if err != nil {
|
||||
zap.L().Error("Error unmarshalling references", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge attributes_number and attributes_bool to attributes_string
|
||||
for k, v := range item.Attributes_bool {
|
||||
item.Attributes_string[k] = fmt.Sprintf("%v", v)
|
||||
}
|
||||
for k, v := range item.Attributes_number {
|
||||
item.Attributes_string[k] = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
jsonItem := model.SearchSpanResponseItem{
|
||||
SpanID: item.SpanID,
|
||||
TraceID: item.TraceID,
|
||||
ServiceName: item.ServiceName,
|
||||
Name: item.Name,
|
||||
Kind: int32(item.Kind),
|
||||
DurationNano: int64(item.DurationNano),
|
||||
HasError: item.HasError,
|
||||
StatusMessage: item.StatusMessage,
|
||||
StatusCodeString: item.StatusCodeString,
|
||||
SpanKind: item.SpanKind,
|
||||
References: ref,
|
||||
Events: item.Events,
|
||||
TagMap: item.Attributes_string,
|
||||
}
|
||||
|
||||
jsonItem.TimeUnixNano = uint64(item.TimeUnixNano.UnixNano() / 1000000)
|
||||
|
||||
searchSpanResponses = append(searchSpanResponses, jsonItem)
|
||||
if startTime == 0 || jsonItem.TimeUnixNano < startTime {
|
||||
startTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if endTime == 0 || jsonItem.TimeUnixNano > endTime {
|
||||
endTime = jsonItem.TimeUnixNano
|
||||
}
|
||||
if durationNano == 0 || uint64(jsonItem.DurationNano) > durationNano {
|
||||
durationNano = uint64(jsonItem.DurationNano)
|
||||
}
|
||||
}
|
||||
end = time.Now()
|
||||
zap.L().Debug("getTraceSQLQuery unmarshal took: ", zap.Duration("duration", end.Sub(start)))
|
||||
|
||||
for i, item := range searchSpanResponses {
|
||||
spanEvents := item.GetValues()
|
||||
searchSpansResult[0].Events[i] = spanEvents
|
||||
}
|
||||
|
||||
searchSpansResult[0].StartTimestampMillis = startTime - (durationNano / 1000000)
|
||||
searchSpansResult[0].EndTimestampMillis = endTime + (durationNano / 1000000)
|
||||
|
||||
return &searchSpansResult, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.SearchTracesParams,
|
||||
smartTraceAlgorithm func(payload []model.SearchSpanResponseItem, targetSpanId string,
|
||||
levelUp int, levelDown int, spanLimit int) ([]model.SearchSpansResult, error)) (*[]model.SearchSpansResult, error) {
|
||||
|
||||
if r.useTraceNewSchema {
|
||||
return r.SearchTracesV2(ctx, params)
|
||||
}
|
||||
|
||||
var countSpans uint64
|
||||
countQuery := fmt.Sprintf("SELECT count() as count from %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
|
||||
err := r.db.QueryRow(ctx, countQuery, params.TraceID).Scan(&countSpans)
|
||||
@@ -1905,7 +1746,6 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
|
||||
|
||||
err = r.featureFlags.CheckFeature(model.SmartTraceDetail)
|
||||
smartAlgoEnabled := err == nil
|
||||
// TODO(nitya): this will never run remove it
|
||||
if len(searchScanResponses) > params.SpansRenderLimit && smartAlgoEnabled {
|
||||
start = time.Now()
|
||||
searchSpansResult, err = smartTraceAlgorithm(searchSpanResponses, params.SpanID, params.LevelUp, params.LevelDown, params.SpansRenderLimit)
|
||||
@@ -1984,6 +1824,7 @@ func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, queryParams *model.GetFilteredSpanAggregatesParams) (*model.GetFilteredSpansAggregatesResponse, *model.ApiError) {
|
||||
|
||||
excludeMap := make(map[string]struct{})
|
||||
for _, e := range queryParams.Exclude {
|
||||
if e == constants.OperationRequest {
|
||||
@@ -2029,7 +1870,7 @@ func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, query
|
||||
// Using %s for groupBy params as it can be a custom column and custom columns are not supported by clickhouse-go yet:
|
||||
// issue link: https://github.com/ClickHouse/clickhouse-go/issues/870
|
||||
if queryParams.GroupBy != "" && columnExists {
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, queryParams.GroupBy, aggregation_query, r.TraceDB, r.traceTableName)
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, queryParams.GroupBy, aggregation_query, r.TraceDB, r.indexTable)
|
||||
args = append(args, clickhouse.Named("groupByVar", queryParams.GroupBy))
|
||||
} else if queryParams.GroupBy != "" {
|
||||
customStr = strings.Split(queryParams.GroupBy, ".(")
|
||||
@@ -2037,17 +1878,17 @@ func (r *ClickHouseReader) GetFilteredSpansAggregates(ctx context.Context, query
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("GroupBy: %s not supported", queryParams.GroupBy)}
|
||||
}
|
||||
if customStr[1] == string(model.TagTypeString)+")" {
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, stringTagMap['%s'] as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.traceTableName)
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, stringTagMap['%s'] as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
|
||||
} else if customStr[1] == string(model.TagTypeNumber)+")" {
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(numberTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.traceTableName)
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(numberTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
|
||||
} else if customStr[1] == string(model.TagTypeBool)+")" {
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(boolTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.traceTableName)
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, toString(boolTagMap['%s']) as groupBy, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, customStr[0], aggregation_query, r.TraceDB, r.indexTable)
|
||||
} else {
|
||||
// return error for unsupported group by
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("GroupBy: %s not supported", queryParams.GroupBy)}
|
||||
}
|
||||
} else {
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.traceTableName)
|
||||
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s FROM %s.%s WHERE timestamp >= @timestampL AND timestamp <= @timestampU", queryParams.StepSeconds/60, aggregation_query, r.TraceDB, r.indexTable)
|
||||
}
|
||||
|
||||
if len(queryParams.TraceID) > 0 {
|
||||
@@ -3215,10 +3056,11 @@ func (r *ClickHouseReader) GetLogsInfoInLastHeartBeatInterval(ctx context.Contex
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetTagsInfoInLastHeartBeatInterval(ctx context.Context, interval time.Duration) (*model.TagsInfo, error) {
|
||||
|
||||
queryStr := fmt.Sprintf(`select serviceName, stringTagMap['deployment.environment'] as env,
|
||||
stringTagMap['telemetry.sdk.language'] as language from %s.%s
|
||||
where timestamp > toUnixTimestamp(now()-toIntervalMinute(%d))
|
||||
group by serviceName, env, language;`, r.TraceDB, r.traceTableName, int(interval.Minutes()))
|
||||
group by serviceName, env, language;`, r.TraceDB, r.indexTable, int(interval.Minutes()))
|
||||
|
||||
tagTelemetryDataList := []model.TagTelemetryData{}
|
||||
err := r.db.Select(ctx, &tagTelemetryDataList, queryStr)
|
||||
@@ -4733,6 +4575,8 @@ func (r *ClickHouseReader) GetTraceAggregateAttributes(ctx context.Context, req
|
||||
if err := rows.Scan(&tagKey, &tagType, &dataType, &isColumn); err != nil {
|
||||
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
|
||||
}
|
||||
// TODO: Remove this once the column name are updated in the table
|
||||
tagKey = tempHandleFixedColumns(tagKey)
|
||||
key := v3.AttributeKey{
|
||||
Key: tagKey,
|
||||
DataType: v3.AttributeKeyDataType(dataType),
|
||||
@@ -4772,6 +4616,8 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
|
||||
if err := rows.Scan(&tagKey, &tagType, &dataType, &isColumn); err != nil {
|
||||
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
|
||||
}
|
||||
// TODO: Remove this once the column name are updated in the table
|
||||
tagKey = tempHandleFixedColumns(tagKey)
|
||||
key := v3.AttributeKey{
|
||||
Key: tagKey,
|
||||
DataType: v3.AttributeKeyDataType(dataType),
|
||||
@@ -4783,6 +4629,19 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
// tempHandleFixedColumns is a temporary function to handle the fixed columns whose name has been changed in AttributeKeys Table
|
||||
func tempHandleFixedColumns(tagKey string) string {
|
||||
switch {
|
||||
case tagKey == "traceId":
|
||||
tagKey = "traceID"
|
||||
case tagKey == "spanId":
|
||||
tagKey = "spanID"
|
||||
case tagKey == "parentSpanId":
|
||||
tagKey = "parentSpanID"
|
||||
}
|
||||
return tagKey
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetTraceAttributeValues(ctx context.Context, req *v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) {
|
||||
|
||||
var query string
|
||||
@@ -4843,38 +4702,31 @@ func (r *ClickHouseReader) GetSpanAttributeKeys(ctx context.Context) (map[string
|
||||
var rows driver.Rows
|
||||
response := map[string]v3.AttributeKey{}
|
||||
|
||||
query = fmt.Sprintf("SELECT DISTINCT(tagKey), tagType, dataType FROM %s.%s", r.TraceDB, r.spanAttributesKeysTable)
|
||||
query = fmt.Sprintf("SELECT DISTINCT(tagKey), tagType, dataType, isColumn FROM %s.%s", r.TraceDB, r.spanAttributesKeysTable)
|
||||
|
||||
rows, err = r.db.Query(ctx, query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error while executing query", zap.Error(err))
|
||||
return nil, fmt.Errorf("error while executing query: %s", err.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
statements := []model.ShowCreateTableStatement{}
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE %s.%s", r.TraceDB, r.traceTableName)
|
||||
err = r.db.Select(ctx, &statements, query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error while fetching logs schema: %s", err.Error())
|
||||
}
|
||||
|
||||
var tagKey string
|
||||
var dataType string
|
||||
var tagType string
|
||||
var isColumn bool
|
||||
for rows.Next() {
|
||||
if err := rows.Scan(&tagKey, &tagType, &dataType); err != nil {
|
||||
if err := rows.Scan(&tagKey, &tagType, &dataType, &isColumn); err != nil {
|
||||
return nil, fmt.Errorf("error while scanning rows: %s", err.Error())
|
||||
}
|
||||
key := v3.AttributeKey{
|
||||
Key: tagKey,
|
||||
DataType: v3.AttributeKeyDataType(dataType),
|
||||
Type: v3.AttributeKeyType(tagType),
|
||||
IsColumn: isColumn(r.useLogsNewSchema, statements[0].Statement, tagType, tagKey, dataType),
|
||||
IsColumn: isColumn,
|
||||
}
|
||||
|
||||
name := tagKey + "##" + tagType + "##" + strings.ToLower(dataType)
|
||||
response[name] = key
|
||||
response[tagKey] = key
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
@@ -39,7 +39,6 @@ import (
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "go.signoz.io/signoz/pkg/query-service/app/traces/v4"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
@@ -111,8 +110,8 @@ type APIHandler struct {
|
||||
// Websocket connection upgrader
|
||||
Upgrader *websocket.Upgrader
|
||||
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
UseLogsNewSchema bool
|
||||
UseLicensesV3 bool
|
||||
|
||||
hostsRepo *inframetrics.HostsRepo
|
||||
processesRepo *inframetrics.ProcessesRepo
|
||||
@@ -159,7 +158,8 @@ type APIHandlerOpts struct {
|
||||
// Use Logs New schema
|
||||
UseLogsNewSchema bool
|
||||
|
||||
UseTraceNewSchema bool
|
||||
// Use Licenses V3 structure
|
||||
UseLicensesV3 bool
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
@@ -171,23 +171,21 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
}
|
||||
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
FeatureLookup: opts.FeatureFlags,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
FeatureLookup: opts.FeatureFlags,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
}
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
FeatureLookup: opts.FeatureFlags,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
FluxInterval: opts.FluxInterval,
|
||||
FeatureLookup: opts.FeatureFlags,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
}
|
||||
|
||||
querier := querier.NewQuerier(querierOpts)
|
||||
@@ -217,7 +215,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
querier: querier,
|
||||
querierV2: querierv2,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
UseLicensesV3: opts.UseLicensesV3,
|
||||
hostsRepo: hostsRepo,
|
||||
processesRepo: processesRepo,
|
||||
podsRepo: podsRepo,
|
||||
@@ -231,14 +229,9 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
logsQueryBuilder = logsv4.PrepareLogsQuery
|
||||
}
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if opts.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
|
||||
builderOpts := queryBuilder.QueryBuilderOptions{
|
||||
BuildMetricQuery: metricsv3.PrepareMetricQuery,
|
||||
BuildTraceQuery: tracesQueryBuilder,
|
||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
||||
BuildLogQuery: logsQueryBuilder,
|
||||
}
|
||||
aH.queryBuilder = queryBuilder.NewQueryBuilder(builderOpts, aH.featureFlags)
|
||||
@@ -3229,16 +3222,16 @@ func (aH *APIHandler) getProducerThroughputOverview(
|
||||
}
|
||||
|
||||
for _, res := range result {
|
||||
for _, series := range res.Series {
|
||||
serviceName, serviceNameOk := series.Labels["service_name"]
|
||||
topicName, topicNameOk := series.Labels["topic"]
|
||||
params := []string{serviceName, topicName}
|
||||
for _, list := range res.List {
|
||||
serviceName, serviceNameOk := list.Data["service_name"].(*string)
|
||||
topicName, topicNameOk := list.Data["topic"].(*string)
|
||||
params := []string{*serviceName, *topicName}
|
||||
hashKey := uniqueIdentifier(params, "#")
|
||||
_, ok := attributeCache.Hash[hashKey]
|
||||
if topicNameOk && serviceNameOk && !ok {
|
||||
attributeCache.Hash[hashKey] = struct{}{}
|
||||
attributeCache.TopicName = append(attributeCache.TopicName, topicName)
|
||||
attributeCache.ServiceName = append(attributeCache.ServiceName, serviceName)
|
||||
attributeCache.TopicName = append(attributeCache.TopicName, *topicName)
|
||||
attributeCache.ServiceName = append(attributeCache.ServiceName, *serviceName)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3263,25 +3256,23 @@ func (aH *APIHandler) getProducerThroughputOverview(
|
||||
}
|
||||
|
||||
latencyColumn := &v3.Result{QueryName: "latency"}
|
||||
var latencySeries []*v3.Series
|
||||
var latencySeries []*v3.Row
|
||||
for _, res := range resultFetchLatency {
|
||||
for _, series := range res.Series {
|
||||
topic, topicOk := series.Labels["topic"]
|
||||
serviceName, serviceNameOk := series.Labels["service_name"]
|
||||
params := []string{topic, serviceName}
|
||||
for _, list := range res.List {
|
||||
topic, topicOk := list.Data["topic"].(*string)
|
||||
serviceName, serviceNameOk := list.Data["service_name"].(*string)
|
||||
params := []string{*serviceName, *topic}
|
||||
hashKey := uniqueIdentifier(params, "#")
|
||||
_, ok := attributeCache.Hash[hashKey]
|
||||
if topicOk && serviceNameOk && ok {
|
||||
latencySeries = append(latencySeries, series)
|
||||
latencySeries = append(latencySeries, list)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
latencyColumn.Series = latencySeries
|
||||
latencyColumn.List = latencySeries
|
||||
result = append(result, latencyColumn)
|
||||
|
||||
resultFetchLatency = postprocess.TransformToTableForBuilderQueries(result, queryRangeParams)
|
||||
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: resultFetchLatency,
|
||||
}
|
||||
@@ -4407,12 +4398,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
if aH.UseTraceNewSchema {
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
} else {
|
||||
tracesV3.Enrich(queryRangeParams, spanKeys)
|
||||
}
|
||||
|
||||
tracesV3.Enrich(queryRangeParams, spanKeys)
|
||||
}
|
||||
|
||||
// WARN: Only works for AND operator in traces query
|
||||
|
||||
@@ -284,7 +284,7 @@ func BuildQRParamsWithCache(messagingQueue *MessagingQueue, queryContext string,
|
||||
cq = &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: bhq,
|
||||
PanelType: v3.PanelTypeTable,
|
||||
PanelType: v3.PanelTypeList,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -364,7 +364,7 @@ func BuildClickHouseQuery(messagingQueue *MessagingQueue, queueType string, quer
|
||||
|
||||
func buildCompositeQuery(chq *v3.ClickHouseQuery, queryContext string) (*v3.CompositeQuery, error) {
|
||||
|
||||
if queryContext == "producer-consumer-eval" {
|
||||
if queryContext == "producer-consumer-eval" || queryContext == "producer-throughput-overview" {
|
||||
return &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
ClickHouseQueries: map[string]*v3.ClickHouseQuery{queryContext: chq},
|
||||
|
||||
@@ -69,7 +69,7 @@ func EnrichmentRequired(params *v3.QueryRangeParamsV3) bool {
|
||||
// but the query should work regardless and shouldn't fail
|
||||
func isEnriched(field v3.AttributeKey) bool {
|
||||
// if it is timestamp/id dont check
|
||||
if field.Key == "timestamp" || field.Key == "id" || field.Key == constants.SigNozOrderByValue {
|
||||
if field.Key == "timestamp" || field.Key == "id" || field.Key == constants.SigNozOrderByValue || field.Type == v3.AttributeKeyTypeInstrumentationScope {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -142,7 +142,7 @@ func enrichFieldWithMetadata(field v3.AttributeKey, fields map[string]v3.Attribu
|
||||
}
|
||||
|
||||
// check if the field is present in the fields map
|
||||
for _, key := range utils.GenerateEnrichmentKeys(field) {
|
||||
for _, key := range utils.GenerateLogEnrichmentKeys(field) {
|
||||
if val, ok := fields[key]; ok {
|
||||
return val
|
||||
}
|
||||
|
||||
@@ -57,6 +57,9 @@ func GetClickhouseLogsColumnType(columnType v3.AttributeKeyType) string {
|
||||
if columnType == v3.AttributeKeyTypeTag {
|
||||
return "attributes"
|
||||
}
|
||||
if columnType == v3.AttributeKeyTypeInstrumentationScope {
|
||||
return "scope"
|
||||
}
|
||||
return "resources"
|
||||
}
|
||||
|
||||
@@ -416,9 +419,6 @@ func orderBy(panelType v3.PanelType, items []v3.OrderBy, tagLookup map[string]st
|
||||
} else if panelType == v3.PanelTypeList {
|
||||
attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn}
|
||||
name := getClickhouseColumnName(attr)
|
||||
if item.IsColumn {
|
||||
name = "`" + name + "`"
|
||||
}
|
||||
orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,6 +53,11 @@ var testGetClickhouseColumnNameData = []struct {
|
||||
AttributeKey: v3.AttributeKey{Key: "test-attr", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, IsColumn: true},
|
||||
ExpectedColumnName: "`attribute_string_test-attr`",
|
||||
},
|
||||
{
|
||||
Name: "instrumentation scope attribute",
|
||||
AttributeKey: v3.AttributeKey{Key: "version", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeInstrumentationScope, IsColumn: false},
|
||||
ExpectedColumnName: "scope_string_value[indexOf(scope_string_key, 'version')]",
|
||||
},
|
||||
}
|
||||
|
||||
func TestGetClickhouseColumnName(t *testing.T) {
|
||||
@@ -130,6 +135,13 @@ var timeSeriesFilterQueryData = []struct {
|
||||
}},
|
||||
ExpectedFilter: "attributes_string_value[indexOf(attributes_string_key, 'user_name')] = 'john' AND resources_string_value[indexOf(resources_string_key, 'k8s_namespace')] != 'my_service'",
|
||||
},
|
||||
{
|
||||
Name: "Test instrumentation scope attribute",
|
||||
FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "version", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeInstrumentationScope}, Value: "v1", Operator: "="},
|
||||
}},
|
||||
ExpectedFilter: "scope_string_value[indexOf(scope_string_key, 'version')] = 'v1'",
|
||||
},
|
||||
{
|
||||
Name: "Test attribute and resource attribute with different case",
|
||||
FilterSet: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
@@ -758,10 +770,11 @@ var testBuildLogsQueryData = []struct {
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}},
|
||||
},
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
"CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by timestamp DESC",
|
||||
},
|
||||
{
|
||||
@@ -775,13 +788,14 @@ var testBuildLogsQueryData = []struct {
|
||||
AggregateOperator: v3.AggregateOperatorNoOp,
|
||||
Expression: "A",
|
||||
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}},
|
||||
OrderBy: []v3.OrderBy{{ColumnName: "method", DataType: v3.AttributeKeyDataTypeString, Order: "ASC", IsColumn: true}},
|
||||
OrderBy: []v3.OrderBy{{ColumnName: "method", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag, Order: "ASC", IsColumn: true}},
|
||||
},
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
"CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by `method` ASC",
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) order by `attribute_string_method` ASC",
|
||||
},
|
||||
{
|
||||
Name: "Test Noop with filter",
|
||||
@@ -797,10 +811,11 @@ var testBuildLogsQueryData = []struct {
|
||||
{Key: v3.AttributeKey{Key: "severity_number", DataType: v3.AttributeKeyDataTypeInt64, IsColumn: true}, Operator: "!=", Value: 0},
|
||||
}},
|
||||
},
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string," +
|
||||
"CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64," +
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND severity_number != 0 order by timestamp DESC",
|
||||
},
|
||||
{
|
||||
@@ -1333,7 +1348,7 @@ var testPrepLogsQueryData = []struct {
|
||||
},
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND ",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' AND ",
|
||||
Options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
{
|
||||
@@ -1352,7 +1367,7 @@ var testPrepLogsQueryData = []struct {
|
||||
},
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] ILIKE '%GET%' AND ",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where attributes_string_value[indexOf(attributes_string_key, 'method')] ILIKE '%GET%' AND ",
|
||||
Options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
{
|
||||
@@ -1368,7 +1383,7 @@ var testPrepLogsQueryData = []struct {
|
||||
Filters: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{}},
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where ",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where ",
|
||||
Options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
{
|
||||
@@ -1428,9 +1443,10 @@ var testPrepLogsQueryData = []struct {
|
||||
PageSize: 100,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
"attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < 'logid' order by " +
|
||||
"timestamp DESC LIMIT 100",
|
||||
},
|
||||
@@ -1458,9 +1474,10 @@ var testPrepLogsQueryData = []struct {
|
||||
PageSize: 100,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as " +
|
||||
"attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string " +
|
||||
"attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope " +
|
||||
"from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND attributes_string_value[indexOf(attributes_string_key, 'method')] = 'GET' order by " +
|
||||
"resources_string_value[indexOf(resources_string_key, 'mycolumn')] DESC LIMIT 100 OFFSET 100",
|
||||
},
|
||||
@@ -1507,7 +1524,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 5,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by `timestamp` desc LIMIT 1",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by timestamp desc LIMIT 1",
|
||||
},
|
||||
{
|
||||
Name: "Test limit greater than pageSize - order by ts",
|
||||
@@ -1528,7 +1545,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 10,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by `timestamp` desc LIMIT 10",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by timestamp desc LIMIT 10",
|
||||
},
|
||||
{
|
||||
Name: "Test limit less than pageSize - order by custom",
|
||||
@@ -1547,7 +1564,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 5,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 1 OFFSET 0",
|
||||
},
|
||||
{
|
||||
Name: "Test limit greater than pageSize - order by custom",
|
||||
@@ -1568,7 +1585,7 @@ var testPrepLogsQueryLimitOffsetData = []struct {
|
||||
PageSize: 50,
|
||||
},
|
||||
TableName: "logs",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50",
|
||||
ExpectedQuery: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body,CAST((attributes_string_key, attributes_string_value), 'Map(String, String)') as attributes_string,CAST((attributes_int64_key, attributes_int64_value), 'Map(String, Int64)') as attributes_int64,CAST((attributes_float64_key, attributes_float64_value), 'Map(String, Float64)') as attributes_float64,CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool,CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string,CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope from signoz_logs.distributed_logs where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string_value[indexOf(attributes_string_key, 'method')] desc LIMIT 50 OFFSET 50",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -255,9 +255,6 @@ func orderBy(panelType v3.PanelType, items []v3.OrderBy, tagLookup map[string]st
|
||||
} else if panelType == v3.PanelTypeList {
|
||||
attr := v3.AttributeKey{Key: item.ColumnName, DataType: item.DataType, Type: item.Type, IsColumn: item.IsColumn}
|
||||
name := getClickhouseKey(attr)
|
||||
if item.IsColumn {
|
||||
name = "`" + name + "`"
|
||||
}
|
||||
orderBy = append(orderBy, fmt.Sprintf("%s %s", name, item.Order))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -520,14 +520,16 @@ func Test_orderByAttributeKeyTags(t *testing.T) {
|
||||
{
|
||||
ColumnName: "bytes",
|
||||
Order: "asc",
|
||||
IsColumn: true,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
},
|
||||
},
|
||||
tags: []v3.AttributeKey{
|
||||
{Key: "name"},
|
||||
{Key: "bytes"},
|
||||
},
|
||||
},
|
||||
want: "`name` asc,value asc,`bytes` asc",
|
||||
want: "`name` asc,value asc,`attribute_string_bytes` asc",
|
||||
},
|
||||
{
|
||||
name: "test 4",
|
||||
@@ -722,7 +724,7 @@ func Test_buildLogsQuery(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string " +
|
||||
"from signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726210000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"AND attributes_string['service.name'] = 'test' AND mapContains(attributes_string, 'service.name') order by timestamp desc",
|
||||
},
|
||||
@@ -931,7 +933,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
},
|
||||
options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string " +
|
||||
"from signoz_logs.distributed_logs_v2 where attributes_string['method'] = 'GET' AND mapContains(attributes_string, 'method') AND ",
|
||||
},
|
||||
{
|
||||
@@ -954,7 +956,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
},
|
||||
options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where attributes_string['method'] = 'GET' AND mapContains(attributes_string, 'method') AND " +
|
||||
"(resource_fingerprint GLOBAL IN (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE simpleJSONExtractString(lower(labels), 'service.name') LIKE '%app%' AND lower(labels) like '%service.name%app%' AND ",
|
||||
},
|
||||
@@ -974,7 +976,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
},
|
||||
options: v3.QBOptions{IsLivetailQuery: true},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string " +
|
||||
"from signoz_logs.distributed_logs_v2 where ",
|
||||
},
|
||||
{
|
||||
@@ -1014,9 +1016,9 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 5,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"order by `timestamp` desc LIMIT 1",
|
||||
"order by timestamp desc LIMIT 1",
|
||||
},
|
||||
{
|
||||
name: "Test limit greater than pageSize - order by ts",
|
||||
@@ -1039,9 +1041,9 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 10,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by `timestamp` desc LIMIT 10",
|
||||
"AND id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by timestamp desc LIMIT 10",
|
||||
},
|
||||
{
|
||||
name: "Test limit less than pageSize - order by custom",
|
||||
@@ -1062,7 +1064,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 5,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) " +
|
||||
"order by attributes_string['method'] desc LIMIT 1 OFFSET 0",
|
||||
},
|
||||
@@ -1087,7 +1089,7 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
PageSize: 50,
|
||||
},
|
||||
},
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, body, attributes_string, attributes_number, attributes_bool, resources_string from " +
|
||||
want: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string from " +
|
||||
"signoz_logs.distributed_logs_v2 where (timestamp >= 1680066360726000000 AND timestamp <= 1680066458000000000) AND (ts_bucket_start >= 1680064560 AND ts_bucket_start <= 1680066458) AND " +
|
||||
"id < '2TNh4vp2TpiWyLt3SzuadLJF2s4' order by attributes_string['method'] desc LIMIT 50 OFFSET 50",
|
||||
},
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
logsV4 "go.signoz.io/signoz/pkg/query-service/app/logs/v4"
|
||||
metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "go.signoz.io/signoz/pkg/query-service/app/traces/v4"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
@@ -159,16 +158,11 @@ func (q *querier) runBuilderQuery(
|
||||
|
||||
if builderQuery.DataSource == v3.DataSourceTraces {
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if q.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
|
||||
var query string
|
||||
var err error
|
||||
// for ts query with group by and limit form two queries
|
||||
if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
|
||||
limitQuery, err := tracesQueryBuilder(
|
||||
limitQuery, err := tracesV3.PrepareTracesQuery(
|
||||
start,
|
||||
end,
|
||||
params.CompositeQuery.PanelType,
|
||||
@@ -179,7 +173,7 @@ func (q *querier) runBuilderQuery(
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
||||
return
|
||||
}
|
||||
placeholderQuery, err := tracesQueryBuilder(
|
||||
placeholderQuery, err := tracesV3.PrepareTracesQuery(
|
||||
start,
|
||||
end,
|
||||
params.CompositeQuery.PanelType,
|
||||
@@ -192,7 +186,7 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
query = fmt.Sprintf(placeholderQuery, limitQuery)
|
||||
} else {
|
||||
query, err = tracesQueryBuilder(
|
||||
query, err = tracesV3.PrepareTracesQuery(
|
||||
start,
|
||||
end,
|
||||
params.CompositeQuery.PanelType,
|
||||
|
||||
@@ -11,8 +11,6 @@ import (
|
||||
metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "go.signoz.io/signoz/pkg/query-service/app/traces/v4"
|
||||
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
||||
"go.signoz.io/signoz/pkg/query-service/querycache"
|
||||
@@ -54,8 +52,7 @@ type querier struct {
|
||||
returnedSeries []*v3.Series
|
||||
returnedErr error
|
||||
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
type QuerierOptions struct {
|
||||
@@ -66,11 +63,10 @@ type QuerierOptions struct {
|
||||
FeatureLookup interfaces.FeatureLookup
|
||||
|
||||
// used for testing
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
@@ -78,10 +74,6 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
if opts.UseLogsNewSchema {
|
||||
logsQueryBuilder = logsV4.PrepareLogsQuery
|
||||
}
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if opts.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(opts.Cache), querycache.WithFluxInterval(opts.FluxInterval))
|
||||
|
||||
@@ -93,17 +85,16 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
fluxInterval: opts.FluxInterval,
|
||||
|
||||
builder: queryBuilder.NewQueryBuilder(queryBuilder.QueryBuilderOptions{
|
||||
BuildTraceQuery: tracesQueryBuilder,
|
||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
||||
BuildLogQuery: logsQueryBuilder,
|
||||
BuildMetricQuery: metricsV3.PrepareMetricQuery,
|
||||
}, opts.FeatureLookup),
|
||||
featureLookUp: opts.FeatureLookup,
|
||||
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -317,7 +308,7 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
||||
return results, errQueriesByName, err
|
||||
}
|
||||
|
||||
func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
||||
func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
||||
res := make([]*v3.Result, 0)
|
||||
qName := ""
|
||||
pageSize := uint64(0)
|
||||
@@ -378,18 +369,14 @@ func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryR
|
||||
|
||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
|
||||
// List query has support for only one query.
|
||||
if q.UseLogsNewSchema && params.CompositeQuery != nil &&
|
||||
len(params.CompositeQuery.BuilderQueries) == 1 &&
|
||||
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
|
||||
if q.UseLogsNewSchema && params.CompositeQuery != nil && len(params.CompositeQuery.BuilderQueries) == 1 {
|
||||
for _, v := range params.CompositeQuery.BuilderQueries {
|
||||
// only allow of logs queries with timestamp ordering desc
|
||||
// TODO(nitya): allow for timestamp asc
|
||||
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
|
||||
len(v.OrderBy) == 1 &&
|
||||
v.OrderBy[0].ColumnName == "timestamp" &&
|
||||
v.OrderBy[0].Order == "desc" {
|
||||
if v.DataSource == v3.DataSourceLogs && len(v.OrderBy) == 1 && v.OrderBy[0].ColumnName == "timestamp" && v.OrderBy[0].Order == "desc" {
|
||||
startEndArr := utils.GetLogsListTsRanges(params.Start, params.End)
|
||||
return q.runWindowBasedListQuery(ctx, params, startEndArr)
|
||||
if len(startEndArr) > 0 {
|
||||
return q.runLogsListQuery(ctx, params, startEndArr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
|
||||
metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "go.signoz.io/signoz/pkg/query-service/app/traces/v4"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
@@ -159,16 +158,11 @@ func (q *querier) runBuilderQuery(
|
||||
|
||||
if builderQuery.DataSource == v3.DataSourceTraces {
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if q.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
|
||||
var query string
|
||||
var err error
|
||||
// for ts query with group by and limit form two queries
|
||||
if params.CompositeQuery.PanelType == v3.PanelTypeGraph && builderQuery.Limit > 0 && len(builderQuery.GroupBy) > 0 {
|
||||
limitQuery, err := tracesQueryBuilder(
|
||||
limitQuery, err := tracesV3.PrepareTracesQuery(
|
||||
start,
|
||||
end,
|
||||
params.CompositeQuery.PanelType,
|
||||
@@ -179,7 +173,7 @@ func (q *querier) runBuilderQuery(
|
||||
ch <- channelResult{Err: err, Name: queryName, Query: limitQuery, Series: nil}
|
||||
return
|
||||
}
|
||||
placeholderQuery, err := tracesQueryBuilder(
|
||||
placeholderQuery, err := tracesV3.PrepareTracesQuery(
|
||||
start,
|
||||
end,
|
||||
params.CompositeQuery.PanelType,
|
||||
@@ -192,7 +186,7 @@ func (q *querier) runBuilderQuery(
|
||||
}
|
||||
query = fmt.Sprintf(placeholderQuery, limitQuery)
|
||||
} else {
|
||||
query, err = tracesQueryBuilder(
|
||||
query, err = tracesV3.PrepareTracesQuery(
|
||||
start,
|
||||
end,
|
||||
params.CompositeQuery.PanelType,
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
tracesV4 "go.signoz.io/signoz/pkg/query-service/app/traces/v4"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
|
||||
"go.signoz.io/signoz/pkg/query-service/querycache"
|
||||
@@ -49,11 +48,10 @@ type querier struct {
|
||||
testingMode bool
|
||||
queriesExecuted []string
|
||||
// tuple of start and end time in milliseconds
|
||||
timeRanges [][]int
|
||||
returnedSeries []*v3.Series
|
||||
returnedErr error
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
timeRanges [][]int
|
||||
returnedSeries []*v3.Series
|
||||
returnedErr error
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
type QuerierOptions struct {
|
||||
@@ -64,11 +62,10 @@ type QuerierOptions struct {
|
||||
FeatureLookup interfaces.FeatureLookup
|
||||
|
||||
// used for testing
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
TestingMode bool
|
||||
ReturnedSeries []*v3.Series
|
||||
ReturnedErr error
|
||||
UseLogsNewSchema bool
|
||||
}
|
||||
|
||||
func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
@@ -77,11 +74,6 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
logsQueryBuilder = logsV4.PrepareLogsQuery
|
||||
}
|
||||
|
||||
tracesQueryBuilder := tracesV3.PrepareTracesQuery
|
||||
if opts.UseTraceNewSchema {
|
||||
tracesQueryBuilder = tracesV4.PrepareTracesQuery
|
||||
}
|
||||
|
||||
qc := querycache.NewQueryCache(querycache.WithCache(opts.Cache), querycache.WithFluxInterval(opts.FluxInterval))
|
||||
|
||||
return &querier{
|
||||
@@ -92,17 +84,16 @@ func NewQuerier(opts QuerierOptions) interfaces.Querier {
|
||||
fluxInterval: opts.FluxInterval,
|
||||
|
||||
builder: queryBuilder.NewQueryBuilder(queryBuilder.QueryBuilderOptions{
|
||||
BuildTraceQuery: tracesQueryBuilder,
|
||||
BuildTraceQuery: tracesV3.PrepareTracesQuery,
|
||||
BuildLogQuery: logsQueryBuilder,
|
||||
BuildMetricQuery: metricsV4.PrepareMetricQuery,
|
||||
}, opts.FeatureLookup),
|
||||
featureLookUp: opts.FeatureLookup,
|
||||
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
testingMode: opts.TestingMode,
|
||||
returnedSeries: opts.ReturnedSeries,
|
||||
returnedErr: opts.ReturnedErr,
|
||||
UseLogsNewSchema: opts.UseLogsNewSchema,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -317,7 +308,7 @@ func (q *querier) runClickHouseQueries(ctx context.Context, params *v3.QueryRang
|
||||
return results, errQueriesByName, err
|
||||
}
|
||||
|
||||
func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
||||
func (q *querier) runLogsListQuery(ctx context.Context, params *v3.QueryRangeParamsV3, tsRanges []utils.LogsListTsRange) ([]*v3.Result, map[string]error, error) {
|
||||
res := make([]*v3.Result, 0)
|
||||
qName := ""
|
||||
pageSize := uint64(0)
|
||||
@@ -378,18 +369,14 @@ func (q *querier) runWindowBasedListQuery(ctx context.Context, params *v3.QueryR
|
||||
|
||||
func (q *querier) runBuilderListQueries(ctx context.Context, params *v3.QueryRangeParamsV3) ([]*v3.Result, map[string]error, error) {
|
||||
// List query has support for only one query.
|
||||
if q.UseLogsNewSchema && params.CompositeQuery != nil &&
|
||||
len(params.CompositeQuery.BuilderQueries) == 1 &&
|
||||
params.CompositeQuery.PanelType != v3.PanelTypeTrace {
|
||||
if q.UseLogsNewSchema && params.CompositeQuery != nil && len(params.CompositeQuery.BuilderQueries) == 1 {
|
||||
for _, v := range params.CompositeQuery.BuilderQueries {
|
||||
// only allow of logs queries with timestamp ordering desc
|
||||
// TODO(nitya): allow for timestamp asc
|
||||
if (v.DataSource == v3.DataSourceLogs || v.DataSource == v3.DataSourceTraces) &&
|
||||
len(v.OrderBy) == 1 &&
|
||||
v.OrderBy[0].ColumnName == "timestamp" &&
|
||||
v.OrderBy[0].Order == "desc" {
|
||||
if v.DataSource == v3.DataSourceLogs && len(v.OrderBy) == 1 && v.OrderBy[0].ColumnName == "timestamp" && v.OrderBy[0].Order == "desc" {
|
||||
startEndArr := utils.GetLogsListTsRanges(params.Start, params.End)
|
||||
return q.runWindowBasedListQuery(ctx, params, startEndArr)
|
||||
if len(startEndArr) > 0 {
|
||||
return q.runLogsListQuery(ctx, params, startEndArr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user