Compare commits

..

27 Commits

Author SHA1 Message Date
Prashant Shahi
6c192f1242 Merge branch 'develop' into release/v0.48.x 2024-06-20 18:46:14 +05:30
Yunus M
adfeaaa1f0 feat: pass fill gaps to query range api (#5276) 2024-06-20 18:34:05 +05:30
Srikanth Chekuri
6ee9705599 chore: bump SigNoz/signoz-otel-collector and SigNoz/prometheus (#5294) 2024-06-20 18:33:45 +05:30
Vikrant Gupta
67965c8e4d fix: dependent variable panel not updating (#5283)
* fix: dependent variable panel not updating

* fix: build issues
2024-06-20 17:21:04 +05:30
Yunus M
38b1de5ccc feat: [5038] enable list view - add to dashboard (#5268)
* feat: [5038] enable list view - add to dashboard

* feat: pass page size for list view export
2024-06-20 17:05:18 +05:30
Yunus M
64e06ab3f9 fix: update from typography link to react router dom link component to maintain global state (#5279) 2024-06-20 11:34:27 +05:30
Prashant Shahi
537641000d chore(signoz): 📌 pin versions: SigNoz 0.48.0, SigNoz OtelCollector 0.102.0
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-06-19 21:14:45 +05:30
Prashant Shahi
4916cf5083 Merge branch 'main' into release/v0.48.x 2024-06-19 20:57:30 +05:30
Vikrant Gupta
f3c2fb0246 fix: dashboard empty state learn more link not working (#5287) 2024-06-19 20:31:36 +05:30
Nityananda Gohain
a4e98e565d feat: sanitize query and remove groupBy for list panel query (#5285) 2024-06-19 15:40:34 +05:30
Srikanth Chekuri
faa1728b8c chore: threshold rule panel type to graph (#5284) 2024-06-19 14:19:30 +05:30
SagarRajput-7
b69e97d7b0 fix: fixed flakiness in alert list actions - delete, edit, clone & toggle (#5237)
* fix: fixed flakiness in alert list actions - delete, edit, clone & toggle

* fix: added onhover dropdown open and close
2024-06-19 12:10:43 +05:30
Vikrant Gupta
c0195e9dc9 fix: added null checks for stacked bar chart with fallbacks (#5282) 2024-06-19 11:50:18 +05:30
Vishal Sharma
b69545a771 fix: update companyDomain in before firing every event (#5275) 2024-06-19 10:49:57 +05:30
Vikrant Gupta
9a6db272c1 fix: update the error boundary components with sentry error boundary components (#5271)
* fix: update the error boundary components with sentry error boundary components

* fix: update fallback components
2024-06-18 19:04:06 +05:30
SagarRajput-7
45d6430ab3 fix: fixed panelType when creating alerts from histogram dashboard (#5251)
* fix: fixed panelType when creating alerts from histogram dashboard

* fix: added PANEL_TYPES.TIME_SERIES always for all type in case of alerts
2024-06-18 13:48:28 +05:30
SagarRajput-7
cf7bf32ac2 fix: fixed lightMode style for histogram panel (#5236) 2024-06-18 13:38:30 +05:30
SagarRajput-7
1695b4f06d fix: convert timestamp in new trace explorer to user browser timezone and readable format (#5235)
* fix: convert timestamp in new trace explorer to user browser timezone and readable format

* fix: code refactor
2024-06-18 13:24:54 +05:30
SagarRajput-7
a65d5095a0 feat: added checkbox selection in dashboard variables (#5191)
* feat: added checkbox selection in dashboard variables

* feat: added checkbox selection - handling with only and all

* feat: added checkbox selection - style changes

* fix: fixed deselecting all options

* feat: fixed all showing up in single select

* feat: improve styles

* feat: fixed single select getting all values and array issues

* feat: updated test case

* feat: added max tag shown logic with count length and info on hover for overflowed content
2024-06-18 13:02:15 +05:30
Vikrant Gupta
0fade428ef fix: table row data doesn't align with the response (#5248)
* fix: wrong values getting associated with the table rows

* fix: table columns rendering

* fix: remove console logs
2024-06-18 12:25:10 +05:30
Vikrant Gupta
3b4b9e43b3 fix: trace explorer not highlighting in sidenav (#5263) 2024-06-18 11:58:35 +05:30
Srikanth Chekuri
c104b758ba chore: adjust the step interval for builder queries (#5253) 2024-06-17 22:59:28 +05:30
Vikrant Gupta
2a4e97f8da fix: table sorting when units are present (#5249) 2024-06-17 15:51:04 +05:30
Srikanth Chekuri
f1b5da9916 chore: fix elapsed time formatting (#5238) 2024-06-17 09:00:55 +05:30
Prashant Shahi
b57a24a177 Merge pull request #5151 from SigNoz/release/v0.47.x
Release/v0.47.x
2024-06-05 19:51:47 +05:30
Prashant Shahi
a6e005e3a2 Merge branch 'develop' into release/v0.47.x 2024-06-05 19:43:32 +05:30
Prashant Shahi
4d375e7cc3 chore(signoz): 📌 pin versions: SigNoz 0.47.0, SigNoz OtelCollector 0.88.26
Signed-off-by: Prashant Shahi <prashant@signoz.io>
2024-06-05 19:12:50 +05:30
90 changed files with 1716 additions and 1326 deletions

View File

@@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.46.0
image: signoz/query-service:0.48.0
command:
[
"-config=/root/config/prometheus.yml",
@@ -186,7 +186,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:0.46.0
image: signoz/frontend:0.48.0
deploy:
restart_policy:
condition: on-failure
@@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.88.24
image: signoz/signoz-otel-collector:0.102.0
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -237,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.88.24
image: signoz/signoz-schema-migrator:0.102.0
deploy:
restart_policy:
condition: on-failure

View File

@@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.88.24
image: signoz/signoz-otel-collector:0.102.0
command:
[
"--config=/etc/otel-collector-config.yaml",

View File

@@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
image: signoz/query-service:${DOCKER_TAG:-0.48.0}
container_name: signoz-query-service
command:
[
@@ -204,7 +204,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
image: signoz/frontend:${DOCKER_TAG:-0.48.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -216,7 +216,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -230,7 +230,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.0}
container_name: signoz-otel-collector
command:
[

View File

@@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
image: signoz/query-service:${DOCKER_TAG:-0.48.0}
container_name: signoz-query-service
command:
[
@@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
image: signoz/frontend:${DOCKER_TAG:-0.48.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -229,7 +229,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.0}
container_name: signoz-otel-collector
command:
[

View File

@@ -35,14 +35,14 @@ func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
req := basemodel.LoginRequest{}
err := parseRequest(r, &req)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
ctx := context.Background()
if req.Email != "" && ah.CheckFeature(model.SSO) {
var apierr *basemodel.ApiError
var apierr basemodel.BaseApiError
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
if apierr != nil && !apierr.IsNil() {
RespondError(w, apierr, nil)
@@ -74,7 +74,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
requestBody, err := io.ReadAll(r.Body)
if err != nil {
zap.L().Error("received no input in api", zap.Error(err))
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -82,7 +82,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
if err != nil {
zap.L().Error("received invalid user registration request", zap.Error(err))
RespondError(w, basemodel.BadRequest(fmt.Errorf("failed to register user")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
return
}
@@ -90,13 +90,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
invite, err := baseauth.ValidateInvite(ctx, req)
if err != nil {
zap.L().Error("failed to validate invite token", zap.Error(err))
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
if invite == nil {
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
RespondError(w, basemodel.BadRequest(basemodel.ErrSignupFailed{}), nil)
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
return
}
@@ -104,7 +104,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
RespondError(w, basemodel.InternalError(basemodel.ErrSignupFailed{}), nil)
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
}
precheckResp := &basemodel.PrecheckResponse{
@@ -120,7 +120,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
return
}
var precheckError *basemodel.ApiError
var precheckError basemodel.BaseApiError
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
if precheckError != nil {
@@ -130,7 +130,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
} else {
// no-sso, validate password
if err := baseauth.ValidatePassword(req.Password); err != nil {
RespondError(w, basemodel.InternalError(fmt.Errorf("password is not in a valid format")), nil)
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
return
}
@@ -155,7 +155,7 @@ func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
inviteObject, err := baseauth.GetInvite(context.Background(), token)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}

View File

@@ -9,7 +9,6 @@ import (
"github.com/google/uuid"
"github.com/gorilla/mux"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) {
@@ -28,12 +27,12 @@ func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) {
req := model.OrgDomain{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
if err := req.ValidNew(); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -51,18 +50,18 @@ func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) {
domainIdStr := mux.Vars(r)["id"]
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
req := model.OrgDomain{Id: domainId}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
req.Id = domainId
if err := req.Valid(nil); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
}
if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil {
@@ -78,7 +77,7 @@ func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) {
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, basemodel.BadRequest(fmt.Errorf("invalid domain id")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil)
return
}

View File

@@ -9,7 +9,6 @@ import (
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -72,12 +71,12 @@ func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) {
var l model.License
if err := json.NewDecoder(r.Body).Decode(&l); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
if l.Key == "" {
RespondError(w, basemodel.BadRequest(fmt.Errorf("license key is required")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
return
}
license, apiError := ah.LM().Activate(r.Context(), l.Key)
@@ -101,20 +100,20 @@ func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
hClient := &http.Client{}
req, err := http.NewRequest("POST", constants.LicenseSignozIo+"/checkout", r.Body)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
licenseResp, err := hClient.Do(req)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
// decode response body
var resp checkoutResponse
if err := json.NewDecoder(licenseResp.Body).Decode(&resp); err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
@@ -125,7 +124,7 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
licenseKey := r.URL.Query().Get("licenseKey")
if licenseKey == "" {
RespondError(w, basemodel.BadRequest(fmt.Errorf("license key is required")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
return
}
@@ -134,20 +133,20 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
hClient := &http.Client{}
req, err := http.NewRequest("GET", billingURL, nil)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
billingResp, err := hClient.Do(req)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
// decode response body
var billingResponse billingDetails
if err := json.NewDecoder(billingResp.Body).Decode(&billingResponse); err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
@@ -252,20 +251,20 @@ func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
hClient := &http.Client{}
req, err := http.NewRequest("POST", constants.LicenseSignozIo+"/portal", r.Body)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
licenseResp, err := hClient.Do(req)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
// decode response body
var resp checkoutResponse
if err := json.NewDecoder(licenseResp.Body).Decode(&resp); err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}

View File

@@ -31,13 +31,13 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
req := model.CreatePATRequestBody{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
@@ -49,7 +49,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
}
err = validatePATRequest(pat)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -66,7 +66,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
}
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
var apierr *basemodel.ApiError
var apierr basemodel.BaseApiError
if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil {
RespondError(w, apierr, nil)
return
@@ -93,14 +93,14 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
req := model.PAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
@@ -108,7 +108,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
err = validatePATRequest(req)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -116,7 +116,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
req.UpdatedAt = time.Now().Unix()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr *basemodel.ApiError
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil {
RespondError(w, apierr, nil)
return
@@ -129,8 +129,8 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
@@ -149,8 +149,8 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return

View File

@@ -7,6 +7,6 @@ import (
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
func RespondError(w http.ResponseWriter, apiErr *basemodel.ApiError, data interface{}) {
baseapp.RespondError(w, apiErr)
func RespondError(w http.ResponseWriter, apiErr basemodel.BaseApiError, data interface{}) {
baseapp.RespondError(w, apiErr, data)
}

View File

@@ -4,6 +4,7 @@ import (
"net/http"
"go.signoz.io/signoz/ee/query-service/app/db"
"go.signoz.io/signoz/ee/query-service/model"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
@@ -18,7 +19,7 @@ func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
}
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, "Error reading params")
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}

View File

@@ -21,7 +21,7 @@ import (
// GetMetricResultEE runs the query and returns list of time series
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
defer utils.Elapsed("GetMetricResult")()
defer utils.Elapsed("GetMetricResult", nil)()
zap.L().Info("Executing metric result query: ", zap.String("query", query))
var hash string

View File

@@ -21,24 +21,24 @@ type ModelDao interface {
DB() *sqlx.DB
// auth methods
CanUsePassword(ctx context.Context, email string) (bool, *basemodel.ApiError)
PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr *basemodel.ApiError)
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError)
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*model.OrgDomain, error)
// org domain (auth domains) CRUD ops
ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, *basemodel.ApiError)
GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, *basemodel.ApiError)
CreateDomain(ctx context.Context, d *model.OrgDomain) *basemodel.ApiError
UpdateDomain(ctx context.Context, domain *model.OrgDomain) *basemodel.ApiError
DeleteDomain(ctx context.Context, id uuid.UUID) *basemodel.ApiError
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, *basemodel.ApiError)
ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError)
GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError)
CreateDomain(ctx context.Context, d *model.OrgDomain) basemodel.BaseApiError
UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *basemodel.ApiError)
UpdatePAT(ctx context.Context, p model.PAT, id string) *basemodel.ApiError
GetPAT(ctx context.Context, pat string) (*model.PAT, *basemodel.ApiError)
UpdatePATLastUsed(ctx context.Context, pat string, lastUsed int64) *basemodel.ApiError
GetPATByID(ctx context.Context, id string) (*model.PAT, *basemodel.ApiError)
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, *basemodel.ApiError)
ListPATs(ctx context.Context) ([]model.PAT, *basemodel.ApiError)
RevokePAT(ctx context.Context, id string, userID string) *basemodel.ApiError
CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, p model.PAT, id string) basemodel.BaseApiError
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
UpdatePATLastUsed(ctx context.Context, pat string, lastUsed int64) basemodel.BaseApiError
GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError)
ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError
}

View File

@@ -17,19 +17,19 @@ import (
"go.uber.org/zap"
)
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*basemodel.User, *basemodel.ApiError) {
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*basemodel.User, basemodel.BaseApiError) {
// get auth domain from email domain
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, basemodel.InternalError(fmt.Errorf("failed to get domain from email"))
return nil, model.InternalErrorStr("failed to get domain from email")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
return nil, basemodel.InternalError(fmt.Errorf("failed to generate password hash"))
return nil, model.InternalErrorStr("failed to generate password hash")
}
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
@@ -61,12 +61,12 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
// PrepareSsoRedirect prepares redirect page link after SSO response
// is successfully parsed (i.e. valid email is available)
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr *basemodel.ApiError) {
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError) {
userPayload, apierr := m.GetUserByEmail(ctx, email)
if !apierr.IsNil() {
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
return "", basemodel.BadRequest(fmt.Errorf("invalid user email received from the auth provider"))
return "", model.BadRequestStr("invalid user email received from the auth provider")
}
user := &basemodel.User{}
@@ -85,7 +85,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
tokenStore, err := baseauth.GenerateJWTForUser(user)
if err != nil {
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", basemodel.InternalError(fmt.Errorf("failed to generate token for the user"))
return "", model.InternalErrorStr("failed to generate token for the user")
}
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
@@ -95,7 +95,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
tokenStore.RefreshJwt), nil
}
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, *basemodel.ApiError) {
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
return false, apierr
@@ -110,7 +110,7 @@ func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, *bas
}
if userPayload.Role != baseconst.AdminGroup {
return false, basemodel.BadRequest(fmt.Errorf("auth method not supported"))
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
}
}
@@ -120,7 +120,7 @@ func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, *bas
// PrecheckLogin is called when the login or signup page is loaded
// to check sso login is to be prompted
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, *basemodel.ApiError) {
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
// assume user is valid unless proven otherwise
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
@@ -144,7 +144,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
ssoAvailable = false
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, &basemodel.ApiError{Err: err, Typ: basemodel.ErrorBadData}
return resp, model.BadRequestStr(err.Error())
}
}
@@ -177,7 +177,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
zap.L().Error("failed to parse referer", zap.Error(err))
return resp, basemodel.InternalError(fmt.Errorf("failed to generate login request"))
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
// build Idp URL that will authenticat the user
@@ -186,7 +186,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
if err != nil {
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return resp, basemodel.InternalError(err)
return resp, model.InternalError(err)
}
// set SSO to true, as the url is generated correctly

View File

@@ -76,47 +76,47 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
}
// GetDomainByName returns org domain for a given domain name
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*model.OrgDomain, basemodel.BaseApiError) {
stored := StoredDomain{}
err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE name=$1 LIMIT 1`, name)
if err != nil {
if err == sql.ErrNoRows {
return nil, basemodel.BadRequest(fmt.Errorf("invalid domain name"))
return nil, model.BadRequest(fmt.Errorf("invalid domain name"))
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
return domain, nil
}
// GetDomain returns org domain for a given domain id
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError) {
stored := StoredDomain{}
err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE id=$1 LIMIT 1`, id)
if err != nil {
if err == sql.ErrNoRows {
return nil, basemodel.BadRequest(fmt.Errorf("invalid domain id"))
return nil, model.BadRequest(fmt.Errorf("invalid domain id"))
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
return domain, nil
}
// ListDomains gets the list of auth domains by org id
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError) {
domains := []model.OrgDomain{}
stored := []StoredDomain{}
@@ -126,7 +126,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
if err == sql.ErrNoRows {
return []model.OrgDomain{}, nil
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
for _, s := range stored {
@@ -141,20 +141,20 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
}
// CreateDomain creates a new auth domain
func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) *basemodel.ApiError {
func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
if domain.Id == uuid.Nil {
domain.Id = uuid.New()
}
if domain.OrgId == "" || domain.Name == "" {
return basemodel.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgId, Name "))
return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgId, Name "))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain creation failed"))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
_, err = m.DB().ExecContext(ctx,
@@ -168,24 +168,24 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) *b
if err != nil {
zap.L().Error("failed to insert domain in db", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain creation failed"))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
return nil
}
// UpdateDomain updates stored config params for a domain
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) *basemodel.ApiError {
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
if domain.Id == uuid.Nil {
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return basemodel.InternalError(fmt.Errorf("domain update failed"))
return model.InternalError(fmt.Errorf("domain update failed"))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain update failed"))
return model.InternalError(fmt.Errorf("domain update failed"))
}
_, err = m.DB().ExecContext(ctx,
@@ -196,18 +196,18 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) *b
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain update failed"))
return model.InternalError(fmt.Errorf("domain update failed"))
}
return nil
}
// DeleteDomain deletes an org domain
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) *basemodel.ApiError {
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
if id == uuid.Nil {
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return basemodel.InternalError(fmt.Errorf("domain delete failed"))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
_, err := m.DB().ExecContext(ctx,
@@ -216,21 +216,21 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) *basemodel.Ap
if err != nil {
zap.L().Error("domain delete failed", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain delete failed"))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
return nil
}
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError) {
if email == "" {
return nil, basemodel.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
}
components := strings.Split(email, "@")
if len(components) < 2 {
return nil, basemodel.BadRequest(fmt.Errorf("invalid email address"))
return nil, model.BadRequest(fmt.Errorf("invalid email address"))
}
parsedDomain := components[1]
@@ -242,12 +242,12 @@ func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.O
if err == sql.ErrNoRows {
return nil, nil
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
return domain, nil
}

View File

@@ -11,7 +11,7 @@ import (
"go.uber.org/zap"
)
func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *basemodel.ApiError) {
func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError) {
result, err := m.DB().ExecContext(ctx,
"INSERT INTO personal_access_tokens (user_id, token, role, name, created_at, expires_at, updated_at, updated_by_user_id, last_used, revoked) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)",
p.UserID,
@@ -27,12 +27,12 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *base
)
if err != nil {
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
return model.PAT{}, basemodel.InternalError(fmt.Errorf("PAT insertion failed"))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
id, err := result.LastInsertId()
if err != nil {
zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err))
return model.PAT{}, basemodel.InternalError(fmt.Errorf("PAT insertion failed"))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
p.Id = strconv.Itoa(int(id))
createdByUser, _ := m.GetUser(ctx, p.UserID)
@@ -53,7 +53,7 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *base
return p, nil
}
func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) *basemodel.ApiError {
func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET role=$1, name=$2, updated_at=$3, updated_by_user_id=$4 WHERE id=$5 and revoked=false;",
p.Role,
@@ -63,29 +63,29 @@ func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) *basem
id)
if err != nil {
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("PAT update failed"))
return model.InternalError(fmt.Errorf("PAT update failed"))
}
return nil
}
func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed int64) *basemodel.ApiError {
func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed int64) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET last_used=$1 WHERE token=$2 and revoked=false;",
lastUsed,
token)
if err != nil {
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("PAT last used update failed"))
return model.InternalError(fmt.Errorf("PAT last used update failed"))
}
return nil
}
func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, *basemodel.ApiError) {
func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil {
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch PATs"))
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
}
for i := range pats {
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
@@ -123,28 +123,28 @@ func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, *basemodel.ApiErr
return pats, nil
}
func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) *basemodel.ApiError {
func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3",
userID, updatedAt, id)
if err != nil {
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("PAT revoke failed"))
return model.InternalError(fmt.Errorf("PAT revoke failed"))
}
return nil
}
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, *basemodel.ApiError) {
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE token=? and revoked=false;`, token); err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch PAT"))
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &basemodel.ApiError{
Typ: basemodel.ErrorInternal,
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token),
}
}
@@ -152,16 +152,16 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, *basem
return &pats[0], nil
}
func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, *basemodel.ApiError) {
func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE id=? and revoked=false;`, id); err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch PAT"))
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &basemodel.ApiError{
Typ: basemodel.ErrorInternal,
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token"),
}
}
@@ -170,7 +170,7 @@ func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, *base
}
// deprecated
func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, *basemodel.ApiError) {
func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError) {
users := []basemodel.UserPayload{}
query := `SELECT
@@ -186,12 +186,12 @@ func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.U
WHERE u.id = p.user_id and p.token=? and p.expires_at >= strftime('%s', 'now');`
if err := m.DB().Select(&users, query, token); err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
}
if len(users) != 1 {
return nil, &basemodel.ApiError{
Typ: basemodel.ErrorInternal,
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
}
}

View File

@@ -13,7 +13,6 @@ import (
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
var C *Client
@@ -38,7 +37,7 @@ func init() {
}
// ActivateLicense sends key to license.signoz.io and gets activation data
func ActivateLicense(key, siteId string) (*ActivationResponse, *basemodel.ApiError) {
func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) {
licenseReq := map[string]string{
"key": key,
"siteId": siteId,
@@ -49,13 +48,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *basemodel.ApiErr
if err != nil {
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
return nil, basemodel.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
}
httpBody, err := io.ReadAll(httpResponse.Body)
if err != nil {
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
return nil, basemodel.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
}
defer httpResponse.Body.Close()
@@ -65,22 +64,22 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *basemodel.ApiErr
err = json.Unmarshal(httpBody, &result)
if err != nil {
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
return nil, basemodel.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
}
switch httpResponse.StatusCode {
case 200, 201:
return result.Data, nil
case 400, 401:
return nil, basemodel.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
return nil, model.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
default:
return nil, basemodel.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
return nil, model.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
}
}
// ValidateLicense validates the license key
func ValidateLicense(activationId string) (*ActivationResponse, *basemodel.ApiError) {
func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError) {
validReq := map[string]string{
"activationId": activationId,
}
@@ -89,12 +88,12 @@ func ValidateLicense(activationId string) (*ActivationResponse, *basemodel.ApiEr
response, err := http.Post(C.Prefix+"/licenses/validate", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
return nil, basemodel.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
return nil, model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
}
body, err := io.ReadAll(response.Body)
if err != nil {
return nil, basemodel.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io"))
return nil, model.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io"))
}
defer response.Body.Close()
@@ -104,14 +103,14 @@ func ValidateLicense(activationId string) (*ActivationResponse, *basemodel.ApiEr
a := ActivationResult{}
err = json.Unmarshal(body, &a)
if err != nil {
return nil, basemodel.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
}
return a.Data, nil
case 400, 401:
return nil, basemodel.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
"bad request error received from license.signoz.io"))
default:
return nil, basemodel.InternalError(errors.Wrap(fmt.Errorf(string(body)),
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
"internal error received from license.signoz.io"))
}
@@ -128,21 +127,21 @@ func NewPostRequestWithCtx(ctx context.Context, url string, contentType string,
}
// SendUsage reports the usage of signoz to license server
func SendUsage(ctx context.Context, usage model.UsagePayload) *basemodel.ApiError {
func SendUsage(ctx context.Context, usage model.UsagePayload) *model.ApiError {
reqString, _ := json.Marshal(usage)
req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
return basemodel.BadRequest(errors.Wrap(err, "unable to create http request"))
return model.BadRequest(errors.Wrap(err, "unable to create http request"))
}
res, err := http.DefaultClient.Do(req)
if err != nil {
return basemodel.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
return model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
}
body, err := io.ReadAll(res.Body)
if err != nil {
return basemodel.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
return model.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
}
defer res.Body.Close()
@@ -151,10 +150,10 @@ func SendUsage(ctx context.Context, usage model.UsagePayload) *basemodel.ApiErro
case 200, 201:
return nil
case 400, 401:
return basemodel.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
return model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
"bad request error received from license.signoz.io"))
default:
return basemodel.InternalError(errors.Wrap(fmt.Errorf(string(body)),
return model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
"internal error received from license.signoz.io"))
}
}

View File

@@ -137,11 +137,11 @@ func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error {
return nil
}
func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *basemodel.ApiError) {
func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *model.ApiError) {
licenses, err := lm.repo.GetLicenses(ctx)
if err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
for _, l := range licenses {
@@ -212,8 +212,8 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
if apiError != nil {
zap.L().Error("failed to validate license", zap.Any("apiError", apiError))
return apiError
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError.Err
}
if response.PlanDetails == lm.activeLicense.PlanDetails {
@@ -255,7 +255,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
}
// Activate activates a license key with signoz server
func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *basemodel.ApiError) {
func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) {
defer func() {
if errResponse != nil {
userEmail, err := auth.GetEmailFromJwt(ctx)
@@ -268,7 +268,7 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
response, apiError := validate.ActivateLicense(key, "")
if apiError != nil {
zap.L().Error("failed to activate license", zap.Any("apiError", apiError))
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
return nil, apiError
}
@@ -283,14 +283,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
// store the license before activating it
err = lm.repo.InsertLicense(ctx, l)
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
// license is valid, activate it

View File

@@ -1,5 +1,107 @@
package model
import (
"fmt"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
type ApiError struct {
Typ basemodel.ErrorType
Err error
}
func (a *ApiError) Type() basemodel.ErrorType {
return a.Typ
}
func (a *ApiError) ToError() error {
if a != nil {
return a.Err
}
return a.Err
}
func (a *ApiError) Error() string {
return a.Err.Error()
}
func (a *ApiError) IsNil() bool {
return a == nil || a.Err == nil
}
// NewApiError returns a ApiError object of given type
func NewApiError(typ basemodel.ErrorType, err error) *ApiError {
return &ApiError{
Typ: typ,
Err: err,
}
}
// BadRequest returns a ApiError object of bad request
func BadRequest(err error) *ApiError {
return &ApiError{
Typ: basemodel.ErrorBadData,
Err: err,
}
}
// BadRequestStr returns a ApiError object of bad request for string input
func BadRequestStr(s string) *ApiError {
return &ApiError{
Typ: basemodel.ErrorBadData,
Err: fmt.Errorf(s),
}
}
// InternalError returns a ApiError object of internal type
func InternalError(err error) *ApiError {
return &ApiError{
Typ: basemodel.ErrorInternal,
Err: err,
}
}
// InternalErrorStr returns a ApiError object of internal type for string input
func InternalErrorStr(s string) *ApiError {
return &ApiError{
Typ: basemodel.ErrorInternal,
Err: fmt.Errorf(s),
}
}
var (
ErrorNone basemodel.ErrorType = ""
ErrorTimeout basemodel.ErrorType = "timeout"
ErrorCanceled basemodel.ErrorType = "canceled"
ErrorExec basemodel.ErrorType = "execution"
ErrorBadData basemodel.ErrorType = "bad_data"
ErrorInternal basemodel.ErrorType = "internal"
ErrorUnavailable basemodel.ErrorType = "unavailable"
ErrorNotFound basemodel.ErrorType = "not_found"
ErrorNotImplemented basemodel.ErrorType = "not_implemented"
ErrorUnauthorized basemodel.ErrorType = "unauthorized"
ErrorForbidden basemodel.ErrorType = "forbidden"
ErrorConflict basemodel.ErrorType = "conflict"
ErrorStreamingNotSupported basemodel.ErrorType = "streaming is not supported"
)
func init() {
ErrorNone = basemodel.ErrorNone
ErrorTimeout = basemodel.ErrorTimeout
ErrorCanceled = basemodel.ErrorCanceled
ErrorExec = basemodel.ErrorExec
ErrorBadData = basemodel.ErrorBadData
ErrorInternal = basemodel.ErrorInternal
ErrorUnavailable = basemodel.ErrorUnavailable
ErrorNotFound = basemodel.ErrorNotFound
ErrorNotImplemented = basemodel.ErrorNotImplemented
ErrorUnauthorized = basemodel.ErrorUnauthorized
ErrorForbidden = basemodel.ErrorForbidden
ErrorConflict = basemodel.ErrorConflict
ErrorStreamingNotSupported = basemodel.ErrorStreamingNotSupported
}
type ErrUnsupportedAuth struct{}
func (errUnsupportedAuth ErrUnsupportedAuth) Error() string {

View File

@@ -190,7 +190,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
} else if apiErr != nil {
// sleeping for exponential backoff
sleepDuration := RetryInterval * time.Duration(i)
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Any("apiErr", apiErr))
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
time.Sleep(sleepDuration)
} else {
break

View File

@@ -3,6 +3,7 @@ import './DropDown.styles.scss';
import { EllipsisOutlined } from '@ant-design/icons';
import { Button, Dropdown, MenuProps } from 'antd';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useState } from 'react';
function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
const isDarkMode = useIsDarkMode();
@@ -14,12 +15,24 @@ function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
}),
);
const [isDdOpen, setDdOpen] = useState<boolean>(false);
return (
<Dropdown menu={{ items }}>
<Dropdown
menu={{
items,
onMouseEnter: (): void => setDdOpen(true),
onMouseLeave: (): void => setDdOpen(false),
}}
open={isDdOpen}
>
<Button
type="link"
className={!isDarkMode ? 'dropdown-button--dark' : 'dropdown-button'}
onClick={(e): void => e.preventDefault()}
onClick={(e): void => {
e.preventDefault();
setDdOpen(true);
}}
>
<EllipsisOutlined className="dropdown-icon" />
</Button>

View File

@@ -1,6 +1,7 @@
/* eslint-disable sonarjs/cognitive-complexity */
import './Uplot.styles.scss';
import * as Sentry from '@sentry/react';
import { Typography } from 'antd';
import { ToggleGraphProps } from 'components/Graph/types';
import { LineChart } from 'lucide-react';
@@ -13,7 +14,6 @@ import {
useImperativeHandle,
useRef,
} from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import UPlot from 'uplot';
import { dataMatch, optionsUpdateState } from './utils';
@@ -139,7 +139,7 @@ const Uplot = forwardRef<ToggleGraphProps | undefined, UplotProps>(
}
return (
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="uplot-graph-container" ref={targetRef}>
{data && data[0] && data[0]?.length === 0 ? (
<div className="not-found">
@@ -147,7 +147,7 @@ const Uplot = forwardRef<ToggleGraphProps | undefined, UplotProps>(
</div>
) : null}
</div>
</ErrorBoundary>
</Sentry.ErrorBoundary>
);
},
);

View File

@@ -40,4 +40,5 @@ export const getComponentForPanelType = (
export const AVAILABLE_EXPORT_PANEL_TYPES = [
PANEL_TYPES.TIME_SERIES,
PANEL_TYPES.TABLE,
PANEL_TYPES.LIST,
];

View File

@@ -3,6 +3,7 @@
/* eslint-disable jsx-a11y/anchor-is-valid */
import './AppLayout.styles.scss';
import * as Sentry from '@sentry/react';
import { Flex } from 'antd';
import getLocalStorageKey from 'api/browser/localstorage/get';
import getDynamicConfigs from 'api/dynamicConfigs/getDynamicConfigs';
@@ -27,7 +28,6 @@ import {
useRef,
useState,
} from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import { Helmet } from 'react-helmet-async';
import { useTranslation } from 'react-i18next';
import { useQueries } from 'react-query';
@@ -342,7 +342,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
/>
)}
<div className={cx('app-content', collapsed ? 'collapsed' : '')}>
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<LayoutContent>
<ChildrenContainer
style={{
@@ -360,7 +360,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
{children}
</ChildrenContainer>
</LayoutContent>
</ErrorBoundary>
</Sentry.ErrorBoundary>
</div>
</Flex>
</Layout>

View File

@@ -77,7 +77,8 @@ function FormAlertRules({
const urlQuery = useUrlQuery();
const panelType = urlQuery.get(QueryParams.panelTypes) as PANEL_TYPES | null;
// In case of alert the panel types should always be "Graph" only
const panelType = PANEL_TYPES.TIME_SERIES;
const {
currentQuery,

View File

@@ -108,6 +108,7 @@ function GridCardGraph({
query: updatedQuery,
globalSelectedInterval,
variables: getDashboardVariables(variables),
fillGaps: widget.fillSpans,
};
}
updatedQuery.builder.queryData[0].pageSize = 10;
@@ -122,6 +123,7 @@ function GridCardGraph({
limit: updatedQuery.builder.queryData[0].limit || 0,
},
},
fillGaps: widget.fillSpans,
};
});
@@ -152,6 +154,7 @@ function GridCardGraph({
widget?.query,
widget?.panelTypes,
widget.timePreferance,
widget.fillSpans,
requestData,
],
retry(failureCount, error): boolean {

View File

@@ -62,10 +62,11 @@ function GridTableComponent({
mutateDataSource = mutateDataSource.map(
(val): RowData => {
const newValue = val;
const newValue = { ...val };
Object.keys(val).forEach((k) => {
if (columnUnits[k]) {
newValue[k] = getYAxisFormattedValue(String(val[k]), columnUnits[k]);
newValue[`${k}_without_unit`] = val[k];
}
});
return newValue;
@@ -81,7 +82,6 @@ function GridTableComponent({
applyColumnUnits,
originalDataSource,
]);
useEffect(() => {
if (tableProcessedDataRef) {
// eslint-disable-next-line no-param-reassign

View File

@@ -55,6 +55,9 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
role,
);
const [editLoader, setEditLoader] = useState<boolean>(false);
const [cloneLoader, setCloneLoader] = useState<boolean>(false);
const params = useUrlQuery();
const orderColumnParam = params.get('columnKey');
const orderQueryParam = params.get('order');
@@ -113,6 +116,7 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
}, [featureResponse, handleError]);
const onEditHandler = (record: GettableAlert) => (): void => {
setEditLoader(true);
featureResponse
.refetch()
.then(() => {
@@ -129,9 +133,11 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
params.set(QueryParams.ruleId, record.id.toString());
setEditLoader(false);
history.push(`${ROUTES.EDIT_ALERTS}?${params.toString()}`);
})
.catch(handleError);
.catch(handleError)
.finally(() => setEditLoader(false));
};
const onCloneHandler = (
@@ -143,33 +149,41 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
};
const apiReq = { data: copyAlert };
const response = await saveAlertApi(apiReq);
try {
setCloneLoader(true);
const response = await saveAlertApi(apiReq);
if (response.statusCode === 200) {
notificationsApi.success({
message: 'Success',
description: 'Alert cloned successfully',
});
if (response.statusCode === 200) {
notificationsApi.success({
message: 'Success',
description: 'Alert cloned successfully',
});
const { data: refetchData, status } = await refetch();
if (status === 'success' && refetchData.payload) {
setData(refetchData.payload || []);
setTimeout(() => {
const clonedAlert = refetchData.payload[refetchData.payload.length - 1];
params.set(QueryParams.ruleId, String(clonedAlert.id));
history.push(`${ROUTES.EDIT_ALERTS}?${params.toString()}`);
}, 2000);
}
if (status === 'error') {
const { data: refetchData, status } = await refetch();
if (status === 'success' && refetchData.payload) {
setData(refetchData.payload || []);
setTimeout(() => {
const clonedAlert = refetchData.payload[refetchData.payload.length - 1];
params.set(QueryParams.ruleId, String(clonedAlert.id));
history.push(`${ROUTES.EDIT_ALERTS}?${params.toString()}`);
}, 2000);
}
if (status === 'error') {
notificationsApi.error({
message: t('something_went_wrong'),
});
}
} else {
notificationsApi.error({
message: t('something_went_wrong'),
message: 'Error',
description: response.error || t('something_went_wrong'),
});
}
} else {
notificationsApi.error({
message: 'Error',
description: response.error || t('something_went_wrong'),
});
} catch (error) {
handleError();
console.error(error);
} finally {
setCloneLoader(false);
}
};
@@ -314,10 +328,20 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
setData={setData}
id={id}
/>,
<ColumnButton key="2" onClick={onEditHandler(record)} type="link">
<ColumnButton
key="2"
onClick={onEditHandler(record)}
type="link"
loading={editLoader}
>
Edit
</ColumnButton>,
<ColumnButton key="3" onClick={onCloneHandler(record)} type="link">
<ColumnButton
key="3"
onClick={onCloneHandler(record)}
type="link"
loading={cloneLoader}
>
Clone
</ColumnButton>,
<DeleteAlert

View File

@@ -27,5 +27,8 @@ export const ColumnButton = styled(ButtonComponent)`
padding-left: 0;
padding-right: 0;
margin-right: 1.5em;
width: 100%;
display: flex;
align-items: center;
}
`;

View File

@@ -699,7 +699,16 @@ function DashboardsList(): JSX.Element {
New Dashboard
</Button>
</Dropdown>
<Button type="text" className="learn-more">
<Button
type="text"
className="learn-more"
onClick={(): void => {
window.open(
'https://signoz.io/docs/userguide/manage-dashboards?utm_source=product&utm_medium=dashboard-list-empty-state',
'_blank',
);
}}
>
Learn more
</Button>
<ArrowUpRight size={16} className="learn-more-arrow" />

View File

@@ -37,7 +37,7 @@ import { useNotifications } from 'hooks/useNotifications';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { FlatLogData } from 'lib/logs/flatLogData';
import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData';
import { defaultTo, isEmpty, omit } from 'lodash-es';
import { cloneDeep, defaultTo, isEmpty, omit, set } from 'lodash-es';
import { Sliders } from 'lucide-react';
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
@@ -117,6 +117,12 @@ function LogsExplorerViews({
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
}, [stagedQuery]);
const { options, config } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});
const orderByTimestamp: OrderByPayload | null = useMemo(() => {
const timestampOrderBy = listQuery?.orderBy.find(
(item) => item.columnName === 'timestamp',
@@ -174,10 +180,10 @@ function LogsExplorerViews({
() =>
updateAllQueriesOperators(
currentQuery || initialQueriesMap.logs,
PANEL_TYPES.TIME_SERIES,
selectedPanelType,
DataSource.LOGS,
),
[currentQuery, updateAllQueriesOperators],
[currentQuery, selectedPanelType, updateAllQueriesOperators],
);
const handleModeChange = (panelType: PANEL_TYPES): void => {
@@ -309,6 +315,14 @@ function LogsExplorerViews({
isLoading: isUpdateDashboardLoading,
} = useUpdateDashboard();
const getUpdatedQueryForExport = useCallback((): Query => {
const updatedQuery = cloneDeep(currentQuery);
set(updatedQuery, 'builder.queryData[0].pageSize', 10);
return updatedQuery;
}, [currentQuery]);
const handleExport = useCallback(
(dashboard: Dashboard | null): void => {
if (!dashboard || !panelType) return;
@@ -319,11 +333,17 @@ function LogsExplorerViews({
const widgetId = v4();
const query =
panelType === PANEL_TYPES.LIST
? getUpdatedQueryForExport()
: exportDefaultQuery;
const updatedDashboard = addEmptyWidgetInDashboardJSONWithQuery(
dashboard,
exportDefaultQuery,
query,
widgetId,
panelTypeParam,
options.selectColumns,
);
updateDashboard(updatedDashboard, {
@@ -353,7 +373,7 @@ function LogsExplorerViews({
}
const dashboardEditView = generateExportToDashboardLink({
query: exportDefaultQuery,
query,
panelType: panelTypeParam,
dashboardId: data.payload?.uuid || '',
widgetId,
@@ -365,7 +385,9 @@ function LogsExplorerViews({
});
},
[
getUpdatedQueryForExport,
exportDefaultQuery,
options.selectColumns,
history,
notifications,
panelType,
@@ -460,12 +482,6 @@ function LogsExplorerViews({
selectedView,
]);
const { options, config } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.METRICS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});
const chartData = useMemo(() => {
if (!stagedQuery) return [];

View File

@@ -5,7 +5,7 @@
height: 100%;
.resize-table {
height: calc(100% - 40px);
height: calc(100% - 70px);
overflow: scroll;
overflow-x: hidden;

View File

@@ -40,12 +40,46 @@
}
.variable-select {
.ant-select-dropdown {
max-width: 300px;
.ant-select-item {
display: flex;
align-items: center;
}
.all-label {
display: flex;
gap: 16px;
}
.dropdown-checkbox-label {
display: grid;
grid-template-columns: 24px 1fr;
}
.dropdown-value {
display: flex;
justify-content: space-between;
align-items: center;
.option-text {
max-width: 180px;
padding: 0 8px;
}
.toggle-tag-label {
padding-left: 8px;
right: 40px;
font-weight: normal;
position: absolute;
}
}
}
}
.dropdown-styles {
min-width: 300px;
max-width: 350px;
}
.lightMode {
.variable-item {
.variable-name {

View File

@@ -138,6 +138,7 @@ function DashboardVariableSelection(): JSX.Element | null {
}}
onValueUpdate={onValueUpdate}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
/>
))}
</Row>

View File

@@ -54,6 +54,7 @@ describe('VariableItem', () => {
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
/>
</MockQueryClientProvider>,
);
@@ -69,6 +70,7 @@ describe('VariableItem', () => {
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
/>
</MockQueryClientProvider>,
);
@@ -83,6 +85,7 @@ describe('VariableItem', () => {
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
/>
</MockQueryClientProvider>,
);
@@ -111,6 +114,7 @@ describe('VariableItem', () => {
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
/>
</MockQueryClientProvider>,
);
@@ -123,6 +127,8 @@ describe('VariableItem', () => {
const customVariableData = {
...mockCustomVariableData,
allSelected: true,
showALLOption: true,
multiSelect: true,
};
render(
@@ -132,6 +138,7 @@ describe('VariableItem', () => {
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
/>
</MockQueryClientProvider>,
);
@@ -147,6 +154,7 @@ describe('VariableItem', () => {
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
/>
</MockQueryClientProvider>,
);

View File

@@ -1,15 +1,29 @@
/* eslint-disable jsx-a11y/click-events-have-key-events */
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable react/jsx-props-no-spreading */
/* eslint-disable no-nested-ternary */
import './DashboardVariableSelection.styles.scss';
import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Input, Popover, Select, Typography } from 'antd';
import {
Checkbox,
Input,
Popover,
Select,
Tag,
Tooltip,
Typography,
} from 'antd';
import { CheckboxChangeEvent } from 'antd/es/checkbox';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { debounce, isArray, isString } from 'lodash-es';
import map from 'lodash-es/map';
import { memo, useEffect, useMemo, useState } from 'react';
import { ChangeEvent, memo, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
@@ -23,6 +37,11 @@ const ALL_SELECT_VALUE = '__ALL__';
const variableRegexPattern = /\{\{\s*?\.([^\s}]+)\s*?\}\}/g;
enum ToggleTagValue {
Only = 'Only',
All = 'All',
}
interface VariableItemProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
@@ -33,12 +52,17 @@ interface VariableItemProps {
allSelected: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
}
const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,
): string | string[] => {
if (Array.isArray(selectedValue)) {
if (!variableData.multiSelect && selectedValue.length === 1) {
return selectedValue[0]?.toString() || '';
}
return selectedValue.map((item) => item.toString());
}
return selectedValue?.toString() || '';
@@ -50,6 +74,7 @@ function VariableItem({
existingVariables,
onValueUpdate,
variablesToGetUpdated,
setVariablesToGetUpdated,
}: VariableItemProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
@@ -148,6 +173,10 @@ function VariableItem({
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
@@ -193,7 +222,7 @@ function VariableItem({
});
const handleChange = (value: string | string[]): void => {
if (variableData.name)
if (variableData.name) {
if (
value === ALL_SELECT_VALUE ||
(Array.isArray(value) && value.includes(ALL_SELECT_VALUE)) ||
@@ -203,25 +232,29 @@ function VariableItem({
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
};
// do not debounce the above function as we do not need debounce in select variables
const debouncedHandleChange = debounce(handleChange, 500);
const { selectedValue } = variableData;
const selectedValueStringified = useMemo(() => getSelectValue(selectedValue), [
selectedValue,
]);
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const selectValue = variableData.allSelected
? 'ALL'
: selectedValueStringified;
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const mode =
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
: selectedValueStringified;
const mode: 'multiple' | undefined =
variableData.multiSelect && !variableData.allSelected
? 'multiple'
: undefined;
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
useEffect(() => {
// Fetch options for CUSTOM Type
@@ -231,6 +264,117 @@ function VariableItem({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variableData.type, variableData.customValue]);
const checkAll = (e: MouseEvent): void => {
e.stopPropagation();
e.preventDefault();
const isChecked =
variableData.allSelected || selectValue.includes(ALL_SELECT_VALUE);
if (isChecked) {
handleChange([]);
} else {
handleChange(ALL_SELECT_VALUE);
}
};
const handleOptionSelect = (
e: CheckboxChangeEvent,
option: string | number | boolean,
): void => {
const newSelectedValue = Array.isArray(selectedValue)
? ((selectedValue.filter(
(val) => val.toString() !== option.toString(),
) as unknown) as string[])
: [];
if (
!e.target.checked &&
Array.isArray(selectedValueStringified) &&
selectedValueStringified.includes(option.toString())
) {
if (newSelectedValue.length === 0) {
handleChange(ALL_SELECT_VALUE);
return;
}
if (newSelectedValue.length === 1) {
handleChange(newSelectedValue[0].toString());
return;
}
handleChange(newSelectedValue);
} else if (!e.target.checked && selectedValue === option.toString()) {
handleChange(ALL_SELECT_VALUE);
} else if (newSelectedValue.length === optionsData.length - 1) {
handleChange(ALL_SELECT_VALUE);
}
};
const [optionState, setOptionState] = useState({
tag: '',
visible: false,
});
function currentToggleTagValue({
option,
}: {
option: string;
}): ToggleTagValue {
if (
option.toString() === selectValue ||
(Array.isArray(selectValue) &&
selectValue?.includes(option.toString()) &&
selectValue.length === 1)
) {
return ToggleTagValue.All;
}
return ToggleTagValue.Only;
}
function handleToggle(e: ChangeEvent, option: string): void {
e.stopPropagation();
const mode = currentToggleTagValue({ option: option as string });
const isChecked =
variableData.allSelected ||
option.toString() === selectValue ||
(Array.isArray(selectValue) && selectValue?.includes(option.toString()));
if (isChecked) {
if (mode === ToggleTagValue.Only) {
handleChange(option.toString());
} else if (!variableData.multiSelect) {
handleChange(option.toString());
} else {
handleChange(ALL_SELECT_VALUE);
}
} else {
handleChange(option.toString());
}
}
function retProps(
option: string,
): {
onMouseOver: () => void;
onMouseOut: () => void;
} {
return {
onMouseOver: (): void =>
setOptionState({
tag: option.toString(),
visible: true,
}),
onMouseOut: (): void =>
setOptionState({
tag: option.toString(),
visible: false,
}),
};
}
const ensureValidOption = (option: string): boolean =>
!(
currentToggleTagValue({ option }) === ToggleTagValue.All && !enableSelectAll
);
return (
<div className="variable-item">
<Typography.Text className="variable-name" ellipsis>
@@ -264,19 +408,35 @@ function VariableItem({
onChange={handleChange}
bordered={false}
placeholder="Select value"
placement="bottomRight"
placement="bottomLeft"
mode={mode}
dropdownMatchSelectWidth={false}
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
maxTagCount={4}
getPopupContainer={popupContainer}
// eslint-disable-next-line react/no-unstable-nested-components
tagRender={(props): JSX.Element => (
<Tag closable onClose={props.onClose}>
{props.value}
</Tag>
)}
// eslint-disable-next-line react/no-unstable-nested-components
maxTagPlaceholder={(omittedValues): JSX.Element => (
<Tooltip title={omittedValues.map(({ value }) => value).join(', ')}>
<span>+ {omittedValues.length} </span>
</Tooltip>
)}
>
{enableSelectAll && (
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
ALL
<div className="all-label" onClick={(e): void => checkAll(e as any)}>
<Checkbox checked={variableData.allSelected} />
ALL
</div>
</Select.Option>
)}
{map(optionsData, (option) => (
@@ -285,7 +445,45 @@ function VariableItem({
key={option.toString()}
value={option}
>
{option.toString()}
<div
className={variableData.multiSelect ? 'dropdown-checkbox-label' : ''}
>
{variableData.multiSelect && (
<Checkbox
onChange={(e): void => {
e.stopPropagation();
e.preventDefault();
handleOptionSelect(e, option);
}}
checked={
variableData.allSelected ||
option.toString() === selectValue ||
(Array.isArray(selectValue) &&
selectValue?.includes(option.toString()))
}
/>
)}
<div
className="dropdown-value"
{...retProps(option as string)}
onClick={(e): void => handleToggle(e as any, option as string)}
>
<Tooltip title={option.toString()} placement="bottomRight">
<Typography.Text ellipsis className="option-text">
{option.toString()}
</Typography.Text>
</Tooltip>
{variableData.multiSelect &&
optionState.tag === option.toString() &&
optionState.visible &&
ensureValidOption(option as string) && (
<Typography.Text className="toggle-tag-label">
{currentToggleTagValue({ option: option as string })}
</Typography.Text>
)}
</div>
</div>
</Select.Option>
))}
</Select>

View File

@@ -42,4 +42,5 @@ export const VariableValue = styled(Typography)`
export const SelectItemStyle = {
minWidth: 120,
fontSize: '0.8rem',
width: '100%',
};

View File

@@ -72,10 +72,16 @@ function LeftContainer({
globalSelectedInterval,
graphType: getGraphType(selectedGraph || selectedWidget.panelTypes),
query: stagedQuery,
fillGaps: selectedWidget.fillSpans || false,
}));
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [stagedQuery, selectedTime, globalSelectedInterval]);
}, [
stagedQuery,
selectedTime,
selectedWidget.fillSpans,
globalSelectedInterval,
]);
const queryResponse = useGetQueryRange(
requestData,

View File

@@ -429,6 +429,21 @@
}
}
.bucket-config {
.label {
color: var(--bg-ink-400);
}
.bucket-input {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-300);
.ant-input {
background: var(--bg-vanilla-300);
}
}
}
.panel-time-text {
color: var(--bg-ink-400);
}

View File

@@ -189,50 +189,6 @@ export const panelTypeDataSourceFormValuesMap: Record<
},
},
},
[PANEL_TYPES.HISTOGRAM]: {
[DataSource.LOGS]: {
builder: {
queryData: [
'filters',
'aggregateOperator',
'aggregateAttribute',
'groupBy',
'limit',
'having',
'orderBy',
'functions',
],
},
},
[DataSource.METRICS]: {
builder: {
queryData: [
'filters',
'aggregateOperator',
'aggregateAttribute',
'groupBy',
'limit',
'having',
'orderBy',
'functions',
'spaceAggregation',
],
},
},
[DataSource.TRACES]: {
builder: {
queryData: [
'filters',
'aggregateOperator',
'aggregateAttribute',
'groupBy',
'limit',
'having',
'orderBy',
],
},
},
},
[PANEL_TYPES.TABLE]: {
[DataSource.LOGS]: {
builder: {

View File

@@ -1,7 +1,7 @@
import * as Sentry from '@sentry/react';
import { FeatureKeys } from 'constants/features';
import useFeatureFlag from 'hooks/useFeatureFlag';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { ErrorBoundary } from 'react-error-boundary';
import ServiceMetrics from './ServiceMetrics';
import ServiceTraces from './ServiceTraces';
@@ -12,11 +12,11 @@ function Services(): JSX.Element {
?.active;
return (
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<Container style={{ marginTop: 0 }}>
{isSpanMetricEnabled ? <ServiceMetrics /> : <ServiceTraces />}
</Container>
</ErrorBoundary>
</Sentry.ErrorBoundary>
);
}

View File

@@ -125,8 +125,8 @@ const menuItems: SidebarItem[] = [
/** Mapping of some newly added routes and their corresponding active sidebar menu key */
export const NEW_ROUTES_MENU_ITEM_KEY_MAP: Record<string, string> = {
[ROUTES.TRACES_EXPLORER]: ROUTES.TRACE,
[ROUTES.TRACE_EXPLORER]: ROUTES.TRACE,
[ROUTES.TRACE]: ROUTES.TRACES_EXPLORER,
[ROUTES.TRACE_EXPLORER]: ROUTES.TRACES_EXPLORER,
[ROUTES.LOGS_BASE]: ROUTES.LOGS_EXPLORER,
};

View File

@@ -1,7 +1,17 @@
.span-details-sider {
padding-top: 16px;
::-webkit-scrollbar {
width: 0.2em;
}
::-webkit-scrollbar-track {
box-shadow: inset 0 0 6px rgba(18, 18, 18, 0.3);
}
&.dark {
.ant-layout-sider-trigger {
background-color: black !important;
background-color: #0b0c0e !important;
}
}

View File

@@ -246,13 +246,14 @@ function TraceDetail({ response }: TraceDetailProps): JSX.Element {
<Sider
className={cx('span-details-sider', isDarkMode ? 'dark' : 'light')}
style={{ background: isDarkMode ? '#000' : '#fff' }}
style={{ background: isDarkMode ? '#0b0c0e' : '#fff' }}
theme={isDarkMode ? 'dark' : 'light'}
collapsible
collapsed={collapsed}
reverseArrow
width={300}
collapsedWidth={40}
defaultCollapsed
onCollapse={(value): void => setCollapsed(value)}
>
{!collapsed && (

View File

@@ -3,6 +3,7 @@ import { ColumnsType } from 'antd/es/table';
import ROUTES from 'constants/routes';
import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util';
import { formUrlParams } from 'container/TraceDetail/utils';
import dayjs from 'dayjs';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { ILog } from 'types/api/logs/log';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
@@ -30,6 +31,13 @@ export const getListColumns = (
key: 'date',
title: 'Timestamp',
width: 145,
render: (item): JSX.Element => {
const date =
typeof item === 'string'
? dayjs(item).format('YYYY-MM-DD HH:mm:ss.SSS')
: dayjs(item / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS');
return <Typography.Text>{date}</Typography.Text>;
},
},
];

View File

@@ -3,7 +3,7 @@ import { ColumnsType } from 'antd/es/table';
import ROUTES from 'constants/routes';
import { getMs } from 'container/Trace/Filters/Panel/PanelBody/Duration/util';
import { DEFAULT_PER_PAGE_OPTIONS } from 'hooks/queryPagination';
import { generatePath } from 'react-router-dom';
import { generatePath, Link } from 'react-router-dom';
import { ListItem } from 'types/api/widgets/getQuery';
export const PER_PAGE_OPTIONS: number[] = [10, ...DEFAULT_PER_PAGE_OPTIONS];
@@ -38,14 +38,14 @@ export const columns: ColumnsType<ListItem['data']> = [
dataIndex: 'traceID',
key: 'traceID',
render: (traceID: string): JSX.Element => (
<Typography.Link
href={generatePath(ROUTES.TRACE_DETAIL, {
<Link
to={generatePath(ROUTES.TRACE_DETAIL, {
id: traceID,
})}
data-testid="trace-id"
>
{traceID}
</Typography.Link>
</Link>
),
},
];

View File

@@ -5,7 +5,7 @@
height: 100%;
.resize-table {
height: calc(100% - 40px);
height: calc(100% - 70px);
overflow: scroll;
overflow-x: hidden;

View File

@@ -1,43 +1,61 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
import { Dashboard } from 'types/api/dashboard/getAll';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
const baseLogsSelectedColumns = {
dataType: 'string',
type: '',
name: 'timestamp',
};
export const addEmptyWidgetInDashboardJSONWithQuery = (
dashboard: Dashboard,
query: Query,
widgetId: string,
panelTypes?: PANEL_TYPES,
): Dashboard => ({
...dashboard,
data: {
...dashboard.data,
layout: [
{
i: widgetId,
w: 6,
x: 0,
h: 6,
y: 0,
},
...(dashboard?.data?.layout || []),
],
widgets: [
...(dashboard?.data?.widgets || []),
{
id: widgetId,
query,
description: '',
isStacked: false,
nullZeroValues: '',
opacity: '',
title: '',
timePreferance: 'GLOBAL_TIME',
panelTypes: panelTypes || PANEL_TYPES.TIME_SERIES,
softMax: null,
softMin: null,
selectedLogFields: [],
selectedTracesFields: [],
},
],
},
});
panelType?: PANEL_TYPES,
selectedColumns?: BaseAutocompleteData[] | null,
): Dashboard => {
const logsSelectedColumns = [
baseLogsSelectedColumns,
...convertKeysToColumnFields(selectedColumns || []),
];
return {
...dashboard,
data: {
...dashboard.data,
layout: [
{
i: widgetId,
w: 6,
x: 0,
h: 6,
y: 0,
},
...(dashboard?.data?.layout || []),
],
widgets: [
...(dashboard?.data?.widgets || []),
{
id: widgetId,
query,
description: '',
isStacked: false,
nullZeroValues: '',
opacity: '',
title: '',
timePreferance: 'GLOBAL_TIME',
panelTypes: panelType || PANEL_TYPES.TIME_SERIES,
softMax: null,
softMin: null,
selectedLogFields:
panelType === PANEL_TYPES.LIST ? logsSelectedColumns : [],
selectedTracesFields:
panelType === PANEL_TYPES.LIST ? selectedColumns || [] : [],
},
],
},
};
};

View File

@@ -7,7 +7,6 @@ import { AxiosError } from 'axios';
import { ThemeProvider } from 'hooks/useDarkMode';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { createRoot } from 'react-dom/client';
import { ErrorBoundary } from 'react-error-boundary';
import { HelmetProvider } from 'react-helmet-async';
import { QueryClient, QueryClientProvider } from 'react-query';
import { Provider } from 'react-redux';
@@ -58,7 +57,7 @@ if (container) {
const root = createRoot(container);
root.render(
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<HelmetProvider>
<ThemeProvider>
<QueryClientProvider client={queryClient}>
@@ -68,6 +67,6 @@ if (container) {
</QueryClientProvider>
</ThemeProvider>
</HelmetProvider>
</ErrorBoundary>,
</Sentry.ErrorBoundary>,
);
}

View File

@@ -75,6 +75,7 @@ export interface GetQueryResultsProps {
globalSelectedInterval: Time | TimeV2 | CustomTimeType;
variables?: Record<string, unknown>;
params?: Record<string, unknown>;
fillGaps?: boolean;
tableParams?: {
pagination?: Pagination;
selectColumns?: any;

View File

@@ -20,6 +20,7 @@ export const prepareQueryRangePayload = ({
tableParams,
variables = {},
params = {},
fillGaps = false,
}: GetQueryResultsProps): PrepareQueryRangePayload => {
let legendMap: Record<string, string> = {};
const { allowSelectedIntervalForStepGen, ...restParams } = params;
@@ -27,6 +28,7 @@ export const prepareQueryRangePayload = ({
const compositeQuery: QueryRangePayload['compositeQuery'] = {
queryType: query.queryType,
panelType: graphType,
fillGaps,
};
switch (query.queryType) {

View File

@@ -365,6 +365,7 @@ const fillRestAggregationData = (
queryTableData: QueryDataV3[],
seria: SeriesItem,
equalQueriesByLabels: string[],
isEqualQuery: boolean,
): void => {
const nextQueryData =
queryTableData.find((q) => q.queryName === column.field) || null;
@@ -374,13 +375,13 @@ const fillRestAggregationData = (
nextQueryData,
);
const isEqual = isEqualQueriesByLabel(equalQueriesByLabels, column.field);
if (targetSeria) {
const isEqual = isEqualQueriesByLabel(equalQueriesByLabels, column.field);
if (!isEqual) {
// This line is crucial. It ensures that no additional rows are added to the table for similar labels across all formulas here is how this check is applied: signoz/frontend/src/lib/query/createTableColumnsFromQuery.ts line number 370
equalQueriesByLabels.push(column.field);
}
} else {
} else if (!isEqualQuery) {
column.data.push('N/A');
}
};
@@ -435,6 +436,7 @@ const fillDataFromSeries = (
queryTableData,
seria,
equalQueriesByLabels,
isEqualQuery,
);
return;
@@ -537,8 +539,12 @@ const generateTableColumns = (
width: QUERY_TABLE_CONFIG.width,
render: renderColumnCell && renderColumnCell[item.dataIndex],
sorter: (a: RowData, b: RowData): number => {
const valueA = Number(a[item.dataIndex]);
const valueB = Number(b[item.dataIndex]);
const valueA = Number(
a[`${item.dataIndex}_without_unit`] ?? a[item.dataIndex],
);
const valueB = Number(
b[`${item.dataIndex}_without_unit`] ?? b[item.dataIndex],
);
if (!isNaN(valueA) && !isNaN(valueB)) {
return valueA - valueB;
@@ -566,6 +572,29 @@ export const createTableColumnsFromQuery: CreateTableDataFromQuery = ({
a.queryName < b.queryName ? -1 : 1,
);
// the reason we need this is because the filling of values in rows doesn't account for mismatch enteries
// in the response. Example : Series A -> [label1, label2] and Series B -> [label2,label1] this isn't accounted for
sortedQueryTableData.forEach((q) => {
q.series?.forEach((s) => {
s.labelsArray?.sort((a, b) =>
Object.keys(a)[0] < Object.keys(b)[0] ? -1 : 1,
);
});
q.series?.sort((a, b) => {
let labelA = '';
let labelB = '';
a.labelsArray.forEach((lab) => {
labelA += Object.values(lab)[0];
});
b.labelsArray.forEach((lab) => {
labelB += Object.values(lab)[0];
});
return labelA < labelB ? -1 : 1;
});
});
const dynamicColumns = getDynamicColumns(sortedQueryTableData, query);
const { filledDynamicColumns, rowsLength } = fillColumnsData(

View File

@@ -68,7 +68,8 @@ function getStackedSeries(apiResponse: QueryData[]): QueryData[] {
const { values } = series[i];
for (let j = 0; j < values.length; j++) {
values[j][1] = String(
parseFloat(values[j]?.[1]) + parseFloat(series[i + 1].values[j]?.[1]),
parseFloat(values[j]?.[1] || '0') +
parseFloat(series[i + 1].values[j]?.[1] || '0'),
);
}
@@ -88,7 +89,8 @@ function getStackedSeriesQueryFormat(apiResponse: QueryData[]): QueryData[] {
const { values } = series[i];
for (let j = 0; j < values.length; j++) {
values[j].value = String(
parseFloat(values[j].value) + parseFloat(series[i + 1].values[j].value),
parseFloat(values[j]?.value || '0') +
parseFloat(series[i + 1].values[j]?.value || '0'),
);
}

View File

@@ -17,11 +17,7 @@ function getXAxisTimestamps(seriesList: QueryData[]): number[] {
return timestampsArr.sort((a, b) => a - b);
}
function fillMissingXAxisTimestamps(
timestampArr: number[],
data: any[],
fillSpans: boolean,
): any {
function fillMissingXAxisTimestamps(timestampArr: number[], data: any[]): any {
// Generate a set of all timestamps in the range
const allTimestampsSet = new Set(timestampArr);
const processedData = JSON.parse(JSON.stringify(data));
@@ -35,14 +31,14 @@ function fillMissingXAxisTimestamps(
);
missingTimestamps.forEach((timestamp) => {
const value = fillSpans ? 0 : null;
const value = null;
entry.values.push([timestamp, value]);
});
entry.values.forEach((v) => {
if (Number.isNaN(v[1])) {
const replaceValue = fillSpans ? 0 : null;
const replaceValue = null;
// eslint-disable-next-line no-param-reassign
v[1] = replaceValue;
} else if (v[1] !== null) {
@@ -85,11 +81,7 @@ export const getUPlotChartData = (
): any[] => {
const seriesList = apiResponse?.data?.result || [];
const timestampArr = getXAxisTimestamps(seriesList);
const yAxisValuesArr = fillMissingXAxisTimestamps(
timestampArr,
seriesList,
fillSpans || false,
);
const yAxisValuesArr = fillMissingXAxisTimestamps(timestampArr, seriesList);
return [
timestampArr,

View File

@@ -1,5 +1,6 @@
import './LogsExplorer.styles.scss';
import * as Sentry from '@sentry/react';
import ExplorerCard from 'components/ExplorerCard/ExplorerCard';
import LogExplorerQuerySection from 'container/LogExplorerQuerySection';
import LogsExplorerViews from 'container/LogsExplorerViews';
@@ -9,7 +10,6 @@ import Toolbar from 'container/Toolbar/Toolbar';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { useEffect, useMemo, useState } from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import { DataSource } from 'types/common/queryBuilder';
import { WrapperStyled } from './styles';
@@ -70,7 +70,7 @@ function LogsExplorer(): JSX.Element {
);
return (
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<Toolbar
showAutoRefresh={false}
leftActions={
@@ -101,7 +101,7 @@ function LogsExplorer(): JSX.Element {
</div>
</div>
</WrapperStyled>
</ErrorBoundary>
</Sentry.ErrorBoundary>
);
}

View File

@@ -1,5 +1,6 @@
import './Pipelines.styles.scss';
import * as Sentry from '@sentry/react';
import type { TabsProps } from 'antd';
import { Tabs } from 'antd';
import getPipeline from 'api/pipeline/get';
@@ -9,7 +10,6 @@ import PipelinePage from 'container/PipelinePage/Layouts/Pipeline';
import { useNotifications } from 'hooks/useNotifications';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { useEffect, useMemo } from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import { useTranslation } from 'react-i18next';
import { useQuery } from 'react-query';
import { SuccessResponse } from 'types/api';
@@ -82,13 +82,13 @@ function Pipelines(): JSX.Element {
}
return (
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<Tabs
className="pipeline-tabs"
defaultActiveKey="pipelines"
items={tabItems}
/>
</ErrorBoundary>
</Sentry.ErrorBoundary>
);
}

View File

@@ -1,3 +1,4 @@
import * as Sentry from '@sentry/react';
import { Card } from 'antd';
import { NotificationInstance } from 'antd/es/notification/interface';
import ROUTES from 'constants/routes';
@@ -11,7 +12,6 @@ import getStep from 'lib/getStep';
import history from 'lib/history';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { MouseEventHandler, useCallback, useEffect, useState } from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import { connect, useDispatch, useSelector } from 'react-redux';
import { bindActionCreators, Dispatch } from 'redux';
import { ThunkDispatch } from 'redux-thunk';
@@ -146,7 +146,7 @@ function Trace({
);
return (
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<Search />
<Container>
<div>
@@ -169,7 +169,7 @@ function Trace({
</Card>
</RightContainer>
</Container>
</ErrorBoundary>
</Sentry.ErrorBoundary>
);
}

View File

@@ -1,15 +1,19 @@
import './TracesExplorer.styles.scss';
import { FilterOutlined } from '@ant-design/icons';
import * as Sentry from '@sentry/react';
import { Button, Card, Tabs, Tooltip } from 'antd';
import axios from 'axios';
import ExplorerCard from 'components/ExplorerCard/ExplorerCard';
import { LOCALSTORAGE } from 'constants/localStorage';
import { AVAILABLE_EXPORT_PANEL_TYPES } from 'constants/panelTypes';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
import ExportPanel from 'container/ExportPanel';
import { useOptionsMenu } from 'container/OptionsMenu';
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2';
import { defaultSelectedColumns } from 'container/TracesExplorer/ListView/configs';
import QuerySection from 'container/TracesExplorer/QuerySection';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { addEmptyWidgetInDashboardJSONWithQuery } from 'hooks/dashboard/utils';
@@ -19,10 +23,11 @@ import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
import { useNotifications } from 'hooks/useNotifications';
import history from 'lib/history';
import { cloneDeep, set } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import { Dashboard } from 'types/api/dashboard/getAll';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink';
import { v4 } from 'uuid';
@@ -42,6 +47,15 @@ function TracesExplorer(): JSX.Element {
stagedQuery,
} = useQueryBuilder();
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.TRACES_LIST_OPTIONS,
dataSource: DataSource.TRACES,
aggregateOperator: 'noop',
initialOptions: {
selectColumns: defaultSelectedColumns,
},
});
const currentPanelType = useGetPanelTypesQueryParam();
const { handleExplorerTabChange } = useHandleExplorerTabChange();
@@ -101,6 +115,18 @@ function TracesExplorer(): JSX.Element {
const { mutate: updateDashboard, isLoading } = useUpdateDashboard();
const getUpdatedQueryForExport = (): Query => {
const updatedQuery = cloneDeep(currentQuery);
set(
updatedQuery,
'builder.queryData[0].selectColumns',
options.selectColumns,
);
return updatedQuery;
};
const handleExport = useCallback(
(dashboard: Dashboard | null): void => {
if (!dashboard || !panelType) return;
@@ -111,11 +137,17 @@ function TracesExplorer(): JSX.Element {
const widgetId = v4();
const query =
panelType === PANEL_TYPES.LIST
? getUpdatedQueryForExport()
: exportDefaultQuery;
const updatedDashboard = addEmptyWidgetInDashboardJSONWithQuery(
dashboard,
exportDefaultQuery,
query,
widgetId,
panelTypeParam,
options.selectColumns,
);
updateDashboard(updatedDashboard, {
@@ -144,7 +176,7 @@ function TracesExplorer(): JSX.Element {
return;
}
const dashboardEditView = generateExportToDashboardLink({
query: exportDefaultQuery,
query,
panelType: panelTypeParam,
dashboardId: data.payload?.uuid || '',
widgetId,
@@ -161,6 +193,7 @@ function TracesExplorer(): JSX.Element {
},
});
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[exportDefaultQuery, notifications, panelType, updateDashboard],
);
@@ -185,7 +218,7 @@ function TracesExplorer(): JSX.Element {
const [isOpen, setOpen] = useState<boolean>(true);
return (
<ErrorBoundary FallbackComponent={ErrorBoundaryFallback}>
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="trace-explorer-page">
<Card className="filter" hidden={!isOpen}>
<Filter setOpen={setOpen} />
@@ -236,7 +269,7 @@ function TracesExplorer(): JSX.Element {
/>
</Card>
</div>
</ErrorBoundary>
</Sentry.ErrorBoundary>
);
}

View File

@@ -18,6 +18,7 @@ export type QueryRangePayload = {
promQueries?: Record<string, IPromQLQuery>;
queryType: EQueryType;
panelType: PANEL_TYPES;
fillGaps?: boolean;
};
end: number;
start: number;

View File

@@ -74,6 +74,7 @@ export type IBuilderQuery = {
legend: string;
pageSize?: number;
offset?: number;
selectColumns?: BaseAutocompleteData[];
};
export interface IClickHouseQuery {

176
go.mod
View File

@@ -6,13 +6,13 @@ require (
github.com/ClickHouse/clickhouse-go/v2 v2.20.0
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
github.com/SigNoz/signoz-otel-collector v0.88.24
github.com/SigNoz/signoz-otel-collector v0.102.0
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
github.com/antonmedv/expr v1.15.3
github.com/auth0/go-jwt-middleware v1.0.1
github.com/cespare/xxhash v1.1.0
github.com/coreos/go-oidc/v3 v3.4.0
github.com/coreos/go-oidc/v3 v3.10.0
github.com/dustin/go-humanize v1.0.1
github.com/go-co-op/gocron v1.30.1
github.com/go-kit/log v0.2.1
@@ -21,7 +21,7 @@ require (
github.com/golang-jwt/jwt v3.2.2+incompatible
github.com/google/uuid v1.6.0
github.com/gorilla/handlers v1.5.1
github.com/gorilla/mux v1.8.0
github.com/gorilla/mux v1.8.1
github.com/gosimple/slug v1.10.0
github.com/jmoiron/sqlx v1.3.4
github.com/json-iterator/go v1.1.12
@@ -29,18 +29,18 @@ require (
github.com/mailru/easyjson v0.7.7
github.com/mattn/go-sqlite3 v2.0.3+incompatible
github.com/minio/minio-go/v6 v6.0.57
github.com/mitchellh/mapstructure v1.5.1-0.20220423185008-bf980b35cac4
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c
github.com/oklog/oklog v0.3.2
github.com/open-telemetry/opamp-go v0.5.0
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.88.0
github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.88.0
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.102.0
github.com/open-telemetry/opentelemetry-collector-contrib/processor/logstransformprocessor v0.102.0
github.com/opentracing/opentracing-go v1.2.0
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/pkg/errors v0.9.1
github.com/posthog/posthog-go v0.0.0-20220817142604-0b0bbf0f9c0f
github.com/prometheus/common v0.44.0
github.com/prometheus/common v0.54.0
github.com/prometheus/prometheus v2.5.0+incompatible
github.com/rs/cors v1.10.1
github.com/rs/cors v1.11.0
github.com/russellhaering/gosaml2 v0.9.0
github.com/russellhaering/goxmldsig v1.2.0
github.com/samber/lo v1.38.1
@@ -49,93 +49,90 @@ require (
github.com/soheilhy/cmux v0.1.5
github.com/srikanthccv/ClickHouse-go-mock v0.7.0
github.com/stretchr/testify v1.9.0
go.opentelemetry.io/collector/component v0.88.0
go.opentelemetry.io/collector/confmap v0.88.0
go.opentelemetry.io/collector/connector v0.88.0
go.opentelemetry.io/collector/consumer v0.88.0
go.opentelemetry.io/collector/exporter v0.88.0
go.opentelemetry.io/collector/extension v0.88.0
go.opentelemetry.io/collector/otelcol v0.88.0
go.opentelemetry.io/collector/pdata v1.3.0
go.opentelemetry.io/collector/processor v0.88.0
go.opentelemetry.io/collector/receiver v0.88.0
go.opentelemetry.io/collector/service v0.88.0
go.opentelemetry.io/otel v1.24.0
go.opentelemetry.io/otel/sdk v1.23.1
go.opentelemetry.io/collector/component v0.102.1
go.opentelemetry.io/collector/confmap v0.102.1
go.opentelemetry.io/collector/confmap/converter/expandconverter v0.102.0
go.opentelemetry.io/collector/confmap/provider/fileprovider v0.102.0
go.opentelemetry.io/collector/connector v0.102.0
go.opentelemetry.io/collector/consumer v0.102.1
go.opentelemetry.io/collector/exporter v0.102.0
go.opentelemetry.io/collector/extension v0.102.1
go.opentelemetry.io/collector/otelcol v0.102.0
go.opentelemetry.io/collector/pdata v1.9.0
go.opentelemetry.io/collector/processor v0.102.0
go.opentelemetry.io/collector/receiver v0.102.0
go.opentelemetry.io/collector/service v0.102.0
go.opentelemetry.io/otel v1.27.0
go.opentelemetry.io/otel/sdk v1.27.0
go.uber.org/multierr v1.11.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.24.0
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842
golang.org/x/net v0.26.0
golang.org/x/oauth2 v0.16.0
golang.org/x/oauth2 v0.21.0
golang.org/x/text v0.16.0
google.golang.org/grpc v1.62.0
google.golang.org/protobuf v1.33.0
google.golang.org/grpc v1.64.0
google.golang.org/protobuf v1.34.1
gopkg.in/segmentio/analytics-go.v3 v3.1.0
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
k8s.io/apimachinery v0.28.2
k8s.io/apimachinery v0.29.3
)
require (
github.com/emicklei/go-restful/v3 v3.11.0 // indirect
k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 // indirect
sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect
sigs.k8s.io/yaml v1.4.0 // indirect
)
require (
contrib.go.opencensus.io/exporter/prometheus v0.4.2 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.11.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.6.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
github.com/ClickHouse/ch-go v0.61.3 // indirect
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 // indirect
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/aws/aws-sdk-go v1.45.26 // indirect
github.com/aws/aws-sdk-go v1.53.16 // indirect
github.com/beevik/etree v1.1.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dennwc/varint v1.0.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/edsrzf/mmap-go v1.1.0 // indirect
github.com/felixge/httpsnoop v1.0.3 // indirect
github.com/expr-lang/expr v1.16.9 // indirect
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect
github.com/go-faster/city v1.0.1 // indirect
github.com/go-faster/errors v0.7.1 // indirect
github.com/go-jose/go-jose/v4 v4.0.1 // indirect
github.com/go-logfmt/logfmt v0.6.0 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/gopherjs/gopherjs v1.17.2 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
github.com/gosimple/unidecode v1.0.0 // indirect
github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 // indirect
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 // indirect
github.com/hashicorp/go-version v1.7.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/influxdata/go-syslog/v3 v3.0.1-0.20210608084020-ac565dc76ba6 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jonboulle/clockwork v0.2.2 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/jpillora/backoff v1.0.0 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
github.com/klauspost/compress v1.17.7 // indirect
github.com/klauspost/compress v1.17.8 // indirect
github.com/klauspost/cpuid v1.2.3 // indirect
github.com/knadh/koanf/v2 v2.0.1 // indirect
github.com/knadh/koanf/v2 v2.1.1 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/leodido/ragel-machinery v0.0.0-20181214104525-299bdde78165 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/leodido/go-syslog/v4 v4.1.0 // indirect
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c // indirect
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/minio/md5-simd v1.1.0 // indirect
github.com/minio/sha256-simd v0.1.1 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
@@ -145,68 +142,67 @@ require (
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
github.com/oklog/run v1.1.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/onsi/gomega v1.19.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.88.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.102.0 // indirect
github.com/paulmach/orb v0.11.1 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/prometheus/client_golang v1.17.0 // indirect
github.com/prometheus/client_model v0.5.0 // indirect
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c // indirect
github.com/prometheus/client_golang v1.19.1 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common/sigv4 v0.1.0 // indirect
github.com/prometheus/procfs v0.11.1 // indirect
github.com/prometheus/statsd_exporter v0.22.7 // indirect
github.com/prometheus/procfs v0.15.0 // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect
github.com/segmentio/asm v1.2.0 // indirect
github.com/segmentio/backo-go v1.0.1 // indirect
github.com/shirou/gopsutil/v3 v3.23.12 // indirect
github.com/shirou/gopsutil/v3 v3.24.4 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/smarty/assertions v1.15.0 // indirect
github.com/spf13/cobra v1.7.0 // indirect
github.com/spf13/cobra v1.8.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/valyala/fastjson v1.6.4 // indirect
github.com/vjeantet/grok v1.0.1 // indirect
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
github.com/yusufpapurcu/wmi v1.2.3 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/collector v0.88.0 // indirect
go.opentelemetry.io/collector/config/configtelemetry v0.88.0 // indirect
go.opentelemetry.io/collector/featuregate v1.0.0-rcv0017 // indirect
go.opentelemetry.io/collector/semconv v0.88.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0 // indirect
go.opentelemetry.io/contrib/propagators/b3 v1.20.0 // indirect
go.opentelemetry.io/otel/bridge/opencensus v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.19.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 // indirect
go.opentelemetry.io/otel/exporters/prometheus v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.42.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.19.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.opentelemetry.io/proto/otlp v1.0.0 // indirect
go.opentelemetry.io/collector v0.102.1 // indirect
go.opentelemetry.io/collector/config/configtelemetry v0.102.1 // indirect
go.opentelemetry.io/collector/confmap/provider/envprovider v0.102.0 // indirect
go.opentelemetry.io/collector/confmap/provider/httpprovider v0.102.0 // indirect
go.opentelemetry.io/collector/confmap/provider/httpsprovider v0.102.0 // indirect
go.opentelemetry.io/collector/confmap/provider/yamlprovider v0.102.0 // indirect
go.opentelemetry.io/collector/featuregate v1.9.0 // indirect
go.opentelemetry.io/collector/semconv v0.102.0 // indirect
go.opentelemetry.io/contrib/config v0.7.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect
go.opentelemetry.io/contrib/propagators/b3 v1.27.0 // indirect
go.opentelemetry.io/otel/bridge/opencensus v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/prometheus v0.49.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.27.0 // indirect
go.opentelemetry.io/otel/metric v1.27.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.27.0 // indirect
go.opentelemetry.io/otel/trace v1.27.0 // indirect
go.opentelemetry.io/proto/otlp v1.2.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/goleak v1.3.0 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.21.0 // indirect
golang.org/x/time v0.3.0 // indirect
gonum.org/v1/gonum v0.14.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 // indirect
golang.org/x/time v0.5.0 // indirect
gonum.org/v1/gonum v0.15.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240528184218-531527333157 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/square/go-jose.v2 v2.6.0 // indirect
k8s.io/klog/v2 v2.110.1 // indirect
k8s.io/utils v0.0.0-20230711102312-30195339c3c7 // indirect
k8s.io/client-go v0.29.3 // indirect
k8s.io/klog/v2 v2.120.1 // indirect
k8s.io/utils v0.0.0-20240502163921-fe8a2dddb1d0 // indirect
)
replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.11.0
replace github.com/prometheus/prometheus => github.com/SigNoz/prometheus v1.11.1

755
go.sum

File diff suppressed because it is too large Load Diff

View File

@@ -16,7 +16,7 @@ func (aH *APIHandler) setApdexSettings(w http.ResponseWriter, r *http.Request) {
}
if err := dao.DB().SetApdexSettings(context.Background(), req); err != nil {
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal})
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
}
@@ -27,7 +27,7 @@ func (aH *APIHandler) getApdexSettings(w http.ResponseWriter, r *http.Request) {
services := r.URL.Query().Get("services")
apdexSet, err := dao.DB().GetApdexSettings(context.Background(), strings.Split(strings.TrimSpace(services), ","))
if err != nil {
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal})
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
}

View File

@@ -34,7 +34,7 @@ func (am *AuthMiddleware) ViewAccess(f func(http.ResponseWriter, *http.Request))
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
})
}, nil)
return
}
@@ -42,7 +42,7 @@ func (am *AuthMiddleware) ViewAccess(f func(http.ResponseWriter, *http.Request))
RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to viewers/editors/admins"),
})
}, nil)
return
}
ctx := context.WithValue(r.Context(), constants.ContextUserKey, user)
@@ -58,14 +58,14 @@ func (am *AuthMiddleware) EditAccess(f func(http.ResponseWriter, *http.Request))
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
})
}, nil)
return
}
if !(auth.IsEditor(user) || auth.IsAdmin(user)) {
RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to editors/admins"),
})
}, nil)
return
}
ctx := context.WithValue(r.Context(), constants.ContextUserKey, user)
@@ -81,7 +81,7 @@ func (am *AuthMiddleware) SelfAccess(f func(http.ResponseWriter, *http.Request))
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
})
}, nil)
return
}
id := mux.Vars(r)["id"]
@@ -89,7 +89,7 @@ func (am *AuthMiddleware) SelfAccess(f func(http.ResponseWriter, *http.Request))
RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible for self access or to the admins"),
})
}, nil)
return
}
ctx := context.WithValue(r.Context(), constants.ContextUserKey, user)
@@ -105,14 +105,14 @@ func (am *AuthMiddleware) AdminAccess(f func(http.ResponseWriter, *http.Request)
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
})
}, nil)
return
}
if !auth.IsAdmin(user) {
RespondError(w, &model.ApiError{
Typ: model.ErrorForbidden,
Err: errors.New("API is accessible to admins only"),
})
}, nil)
return
}
ctx := context.WithValue(r.Context(), constants.ContextUserKey, user)

View File

@@ -27,10 +27,8 @@ import (
"github.com/pkg/errors"
"github.com/prometheus/common/promlog"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/remote"
"github.com/prometheus/prometheus/util/stats"
@@ -50,11 +48,11 @@ import (
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/dao"
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager"
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
chErrors "go.signoz.io/signoz/pkg/query-service/responseerrors"
"go.signoz.io/signoz/pkg/query-service/rules"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils"
@@ -262,14 +260,8 @@ func (r *ClickHouseReader) Start(readerReady chan bool) {
configFile: r.promConfigFile,
}
// fanoutStorage := remoteStorage
fanoutStorage := storage.NewFanout(logger, remoteStorage)
ctxScrape, cancelScrape := context.WithCancel(context.Background())
discoveryManagerScrape := discovery.NewManager(ctxScrape, log.With(logger, "component", "discovery manager scrape"), discovery.Name("scrape"))
scrapeManager := scrape.NewManager(nil, log.With(logger, "component", "scrape manager"), fanoutStorage)
opts := promql.EngineOpts{
Logger: log.With(logger, "component", "query engine"),
Reg: nil,
@@ -286,16 +278,6 @@ func (r *ClickHouseReader) Start(readerReady chan bool) {
reloaders := []func(cfg *config.Config) error{
remoteStorage.ApplyConfig,
// The Scrape managers need to reload before the Discovery manager as
// they need to read the most updated config when receiving the new targets list.
scrapeManager.ApplyConfig,
func(cfg *config.Config) error {
c := make(map[string]discovery.Configs)
for _, v := range cfg.ScrapeConfigs {
c[v.JobName] = v.ServiceDiscoveryConfigs
}
return discoveryManagerScrape.ApplyConfig(c)
},
}
// sync.Once is used to make sure we can close the channel at different execution stages(SIGTERM or when the config is loaded).
@@ -315,55 +297,11 @@ func (r *ClickHouseReader) Start(readerReady chan bool) {
}
var g group.Group
{
// Scrape discovery manager.
g.Add(
func() error {
err := discoveryManagerScrape.Run()
level.Info(logger).Log("msg", "Scrape discovery manager stopped")
return err
},
func(err error) {
level.Info(logger).Log("msg", "Stopping scrape discovery manager...")
cancelScrape()
},
)
}
{
// Scrape manager.
g.Add(
func() error {
// When the scrape manager receives a new targets list
// it needs to read a valid config for each job.
// It depends on the config being in sync with the discovery manager so
// we wait until the config is fully loaded.
<-reloadReady.C
err := scrapeManager.Run(discoveryManagerScrape.SyncCh())
level.Info(logger).Log("msg", "Scrape manager stopped")
return err
},
func(err error) {
// Scrape manager needs to be stopped before closing the local TSDB
// so that it doesn't try to write samples to a closed storage.
level.Info(logger).Log("msg", "Stopping scrape manager...")
scrapeManager.Stop()
},
)
}
{
// Initial configuration loading.
cancel := make(chan struct{})
g.Add(
func() error {
// select {
// case <-dbOpen:
// break
// // In case a shutdown is initiated before the dbOpen is released
// case <-cancel:
// reloadReady.Close()
// return nil
// }
var err error
r.promConfig, err = reloadConfig(cfg.configFile, logger, reloaders...)
if err != nil {
@@ -3081,7 +3019,7 @@ func (r *ClickHouseReader) GetMetricResultEE(ctx context.Context, query string)
// GetMetricResult runs the query and returns list of time series
func (r *ClickHouseReader) GetMetricResult(ctx context.Context, query string) ([]*model.Series, error) {
defer utils.Elapsed("GetMetricResult")()
defer utils.Elapsed("GetMetricResult", nil)()
zap.L().Info("Executing metric result query: ", zap.String("query", query))
@@ -4594,26 +4532,27 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
return seriesList, getPersonalisedError(rows.Err())
}
func logComment(ctx context.Context) string {
// Get the key-value pairs from context for log comment
func logCommentKVs(ctx context.Context) map[string]string {
kv := ctx.Value(common.LogCommentKey)
if kv == nil {
return ""
return nil
}
logCommentKVs, ok := kv.(map[string]string)
if !ok {
return ""
return nil
}
x, _ := json.Marshal(logCommentKVs)
return string(x)
return logCommentKVs
}
// GetTimeSeriesResultV3 runs the query and returns list of time series
func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query string) ([]*v3.Series, error) {
defer utils.Elapsed("GetTimeSeriesResultV3", query, fmt.Sprintf("logComment: %s", logComment(ctx)))()
ctxArgs := map[string]interface{}{"query": query}
for k, v := range logCommentKVs(ctx) {
ctxArgs[k] = v
}
defer utils.Elapsed("GetTimeSeriesResultV3", ctxArgs)()
rows, err := r.db.Query(ctx, query)
@@ -4655,7 +4594,12 @@ func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query stri
// GetListResultV3 runs the query and returns list of rows
func (r *ClickHouseReader) GetListResultV3(ctx context.Context, query string) ([]*v3.Row, error) {
defer utils.Elapsed("GetListResultV3", query, fmt.Sprintf("logComment: %s", logComment(ctx)))()
ctxArgs := map[string]interface{}{"query": query}
for k, v := range logCommentKVs(ctx) {
ctxArgs[k] = v
}
defer utils.Elapsed("GetListResultV3", ctxArgs)()
rows, err := r.db.Query(ctx, query)

View File

@@ -16,6 +16,7 @@ import (
"github.com/jmoiron/sqlx"
"github.com/mitchellh/mapstructure"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -42,7 +43,7 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
return nil, err
}
tableSchema := `CREATE TABLE IF NOT EXISTS dashboards (
table_schema := `CREATE TABLE IF NOT EXISTS dashboards (
id INTEGER PRIMARY KEY AUTOINCREMENT,
uuid TEXT NOT NULL UNIQUE,
created_at datetime NOT NULL,
@@ -50,24 +51,24 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
data TEXT NOT NULL
);`
_, err = db.Exec(tableSchema)
_, err = db.Exec(table_schema)
if err != nil {
return nil, fmt.Errorf("error in creating dashboard table: %s", err.Error())
}
tableSchema = `CREATE TABLE IF NOT EXISTS rules (
table_schema = `CREATE TABLE IF NOT EXISTS rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
updated_at datetime NOT NULL,
deleted INTEGER DEFAULT 0,
data TEXT NOT NULL
);`
_, err = db.Exec(tableSchema)
_, err = db.Exec(table_schema)
if err != nil {
return nil, fmt.Errorf("error in creating rules table: %s", err.Error())
}
tableSchema = `CREATE TABLE IF NOT EXISTS notification_channels (
table_schema = `CREATE TABLE IF NOT EXISTS notification_channels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at datetime NOT NULL,
updated_at datetime NOT NULL,
@@ -77,12 +78,12 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
data TEXT NOT NULL
);`
_, err = db.Exec(tableSchema)
_, err = db.Exec(table_schema)
if err != nil {
return nil, fmt.Errorf("error in creating notification_channles table: %s", err.Error())
}
tableSchema = `CREATE TABLE IF NOT EXISTS planned_maintenance (
tableSchema := `CREATE TABLE IF NOT EXISTS planned_maintenance (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
description TEXT,
@@ -98,7 +99,7 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
return nil, fmt.Errorf("error in creating planned_maintenance table: %s", err.Error())
}
tableSchema = `CREATE TABLE IF NOT EXISTS ttl_status (
table_schema = `CREATE TABLE IF NOT EXISTS ttl_status (
id INTEGER PRIMARY KEY AUTOINCREMENT,
transaction_id TEXT NOT NULL,
created_at datetime NOT NULL,
@@ -109,7 +110,7 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) {
status TEXT NOT NULL
);`
_, err = db.Exec(tableSchema)
_, err = db.Exec(table_schema)
if err != nil {
return nil, fmt.Errorf("error in creating ttl_status table: %s", err.Error())
}
@@ -191,7 +192,7 @@ func (c *Data) Scan(src interface{}) error {
}
// CreateDashboard creates a new dashboard
func CreateDashboard(ctx context.Context, data map[string]interface{}) (*Dashboard, *model.ApiError) {
func CreateDashboard(ctx context.Context, data map[string]interface{}, fm interfaces.FeatureLookup) (*Dashboard, *model.ApiError) {
dash := &Dashboard{
Data: data,
}
@@ -212,7 +213,15 @@ func CreateDashboard(ctx context.Context, data map[string]interface{}) (*Dashboa
mapData, err := json.Marshal(dash.Data)
if err != nil {
zap.L().Error("Error in marshalling data field in dashboard: ", zap.Any("dashboard", dash), zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
newCount, _ := countTraceAndLogsPanel(data)
if newCount > 0 {
fErr := checkFeatureUsage(fm, newCount)
if fErr != nil {
return nil, fErr
}
}
result, err := db.Exec("INSERT INTO dashboards (uuid, created_at, created_by, updated_at, updated_by, data) VALUES ($1, $2, $3, $4, $5, $6)",
@@ -220,14 +229,19 @@ func CreateDashboard(ctx context.Context, data map[string]interface{}) (*Dashboa
if err != nil {
zap.L().Error("Error in inserting dashboard data: ", zap.Any("dashboard", dash), zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
lastInsertId, err := result.LastInsertId()
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
dash.Id = int(lastInsertId)
traceAndLogsPanelUsage, _ := countTraceAndLogsPanel(data)
if traceAndLogsPanelUsage > 0 {
updateFeatureUsage(fm, traceAndLogsPanelUsage)
}
return dash, nil
}
@@ -238,13 +252,13 @@ func GetDashboards(ctx context.Context) ([]Dashboard, *model.ApiError) {
err := db.Select(&dashboards, query)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
return dashboards, nil
}
func DeleteDashboard(ctx context.Context, uuid string) *model.ApiError {
func DeleteDashboard(ctx context.Context, uuid string, fm interfaces.FeatureLookup) *model.ApiError {
dashboard, dErr := GetDashboard(ctx, uuid)
if dErr != nil {
@@ -260,9 +274,22 @@ func DeleteDashboard(ctx context.Context, uuid string) *model.ApiError {
query := `DELETE FROM dashboards WHERE uuid=?`
_, err := db.Exec(query, uuid)
result, err := db.Exec(query, uuid)
if err != nil {
return &model.ApiError{Typ: model.ErrorInternal, Err: err}
return &model.ApiError{Typ: model.ErrorExec, Err: err}
}
affectedRows, err := result.RowsAffected()
if err != nil {
return &model.ApiError{Typ: model.ErrorExec, Err: err}
}
if affectedRows == 0 {
return &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no dashboard found with uuid: %s", uuid)}
}
traceAndLogsPanelUsage, _ := countTraceAndLogsPanel(dashboard.Data)
if traceAndLogsPanelUsage > 0 {
updateFeatureUsage(fm, -traceAndLogsPanelUsage)
}
return nil
@@ -281,7 +308,7 @@ func GetDashboard(ctx context.Context, uuid string) (*Dashboard, *model.ApiError
return &dashboard, nil
}
func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface{}) (*Dashboard, *model.ApiError) {
func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface{}, fm interfaces.FeatureLookup) (*Dashboard, *model.ApiError) {
mapData, err := json.Marshal(data)
if err != nil {
@@ -303,8 +330,14 @@ func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface
}
// check if the count of trace and logs QB panel has changed, if yes, then check feature flag count
_, existingTotal := countTraceAndLogsPanel(dashboard.Data)
_, newTotal := countTraceAndLogsPanel(data)
existingCount, existingTotal := countTraceAndLogsPanel(dashboard.Data)
newCount, newTotal := countTraceAndLogsPanel(data)
if newCount > existingCount {
err := checkFeatureUsage(fm, newCount-existingCount)
if err != nil {
return nil, err
}
}
if existingTotal > newTotal && existingTotal-newTotal > 1 {
// if the total count of panels has reduced by more than 1,
@@ -317,6 +350,7 @@ func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface
if len(differenceIds) > 1 {
return nil, model.BadRequest(fmt.Errorf("deleting more than one panel is not supported"))
}
}
dashboard.UpdatedAt = time.Now()
@@ -328,7 +362,11 @@ func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface
if err != nil {
zap.L().Error("Error in inserting dashboard data", zap.Any("data", data), zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
if existingCount != newCount {
// if the count of trace and logs panel has changed, we need to update feature flag count as well
updateFeatureUsage(fm, newCount-existingCount)
}
return dashboard, nil
}
@@ -351,6 +389,51 @@ func LockUnlockDashboard(ctx context.Context, uuid string, lock bool) *model.Api
return nil
}
func updateFeatureUsage(fm interfaces.FeatureLookup, usage int64) *model.ApiError {
feature, err := fm.GetFeatureFlag(model.QueryBuilderPanels)
if err != nil {
switch err.(type) {
case model.ErrFeatureUnavailable:
zap.L().Error("feature unavailable", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err))
return model.BadRequest(err)
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err))
return model.BadRequest(err)
}
}
feature.Usage += usage
if feature.Usage >= feature.UsageLimit && feature.UsageLimit != -1 {
feature.Active = false
}
if feature.Usage < feature.UsageLimit || feature.UsageLimit == -1 {
feature.Active = true
}
err = fm.UpdateFeatureFlag(feature)
if err != nil {
return model.BadRequest(err)
}
return nil
}
func checkFeatureUsage(fm interfaces.FeatureLookup, usage int64) *model.ApiError {
feature, err := fm.GetFeatureFlag(model.QueryBuilderPanels)
if err != nil {
switch err.(type) {
case model.ErrFeatureUnavailable:
zap.L().Error("feature unavailable", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err))
return model.BadRequest(err)
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.QueryBuilderPanels), zap.Error(err))
return model.BadRequest(err)
}
}
if feature.UsageLimit-(feature.Usage+usage) < 0 && feature.UsageLimit != -1 {
return model.BadRequest(fmt.Errorf("feature usage exceeded"))
}
return nil
}
// UpdateSlug updates the slug
func (d *Dashboard) UpdateSlug() {
var title string

View File

@@ -46,7 +46,7 @@ func readCurrentDir(dir string, fm interfaces.FeatureLookup) error {
id := data["uuid"]
if id == nil {
_, apiErr := CreateDashboard(context.Background(), data)
_, apiErr := CreateDashboard(context.Background(), data, fm)
if apiErr != nil {
zap.L().Error("Creating Dashboards: Error in file", zap.String("filename", filename), zap.Error(apiErr.Err))
}
@@ -65,12 +65,12 @@ func upsertDashboard(uuid string, data map[string]interface{}, filename string,
_, apiErr := GetDashboard(context.Background(), uuid)
if apiErr == nil {
zap.S().Infof("Creating Dashboards: Already exists: %s\t%s", filename, "Dashboard already present in database, Updating dashboard")
_, apiErr := UpdateDashboard(context.Background(), uuid, data)
_, apiErr := UpdateDashboard(context.Background(), uuid, data, fm)
return apiErr
}
zap.S().Infof("Creating Dashboards: UUID not found: %s\t%s", filename, "Dashboard not present in database, Creating dashboard")
_, apiErr = CreateDashboard(context.Background(), data)
_, apiErr = CreateDashboard(context.Background(), data, fm)
return apiErr
}

File diff suppressed because it is too large Load Diff

View File

@@ -15,7 +15,7 @@ func (aH *APIHandler) insertIngestionKey(w http.ResponseWriter, r *http.Request)
}
if err := dao.DB().InsertIngestionKey(context.Background(), req); err != nil {
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal})
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
}
@@ -25,7 +25,7 @@ func (aH *APIHandler) insertIngestionKey(w http.ResponseWriter, r *http.Request)
func (aH *APIHandler) getIngestionKeys(w http.ResponseWriter, r *http.Request) {
ingestionKeys, err := dao.DB().GetIngestionKeys(context.Background())
if err != nil {
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal})
RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorInternal}, nil)
return
}

View File

@@ -27,8 +27,8 @@ func (bi *BuiltInIntegrations) list(ctx context.Context) (
[]IntegrationDetails, *model.ApiError,
) {
integrations := maps.Values(builtInIntegrations)
slices.SortFunc(integrations, func(i1, i2 IntegrationDetails) bool {
return i1.Id < i2.Id
slices.SortFunc(integrations, func(i1, i2 IntegrationDetails) int {
return strings.Compare(i1.Id, i2.Id)
})
return integrations, nil
}

View File

@@ -3,6 +3,7 @@ package logparsingpipeline
import (
"context"
"sort"
"strings"
"time"
_ "github.com/SigNoz/signoz-otel-collector/pkg/parser/grok"
@@ -90,7 +91,15 @@ func SimulatePipelinesProcessing(
delete(sigLog.Attributes_int64, inputOrderAttribute)
}
return outputSignozLogs, collectorErrs, nil
for _, log := range collectorErrs {
// if log is empty or log comes from featuregate.go, then remove it
if log == "" || strings.Contains(log, "featuregate.go") {
continue
}
collectorWarnAndErrorLogs = append(collectorWarnAndErrorLogs, log)
}
return outputSignozLogs, collectorWarnAndErrorLogs, nil
}
// plog doesn't contain an ID field.

View File

@@ -958,6 +958,9 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("cannot parse the request body: %v", err)}
}
// sanitize the request body
queryRangeParams.CompositeQuery.Sanitize()
// validate the request body
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err}

View File

@@ -14,7 +14,7 @@ import (
metricsV3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
chErrors "go.signoz.io/signoz/pkg/query-service/responseerrors"
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
"go.signoz.io/signoz/pkg/query-service/cache"
"go.signoz.io/signoz/pkg/query-service/interfaces"

View File

@@ -14,7 +14,7 @@ import (
metricsV4 "go.signoz.io/signoz/pkg/query-service/app/metrics/v4"
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
chErrors "go.signoz.io/signoz/pkg/query-service/responseerrors"
chErrors "go.signoz.io/signoz/pkg/query-service/errors"
"go.signoz.io/signoz/pkg/query-service/cache"
"go.signoz.io/signoz/pkg/query-service/interfaces"

View File

@@ -119,12 +119,12 @@ func NewCollectorSimulator(
return nil, cleanupFn, model.InternalError(errors.Wrap(err, "could not close tmp simulation config file"))
}
fp := fileprovider.New()
fp := fileprovider.NewFactory()
confProvider, err := otelcol.NewConfigProvider(otelcol.ConfigProviderSettings{
ResolverSettings: confmap.ResolverSettings{
URIs: []string{simulationConfigPath},
Providers: map[string]confmap.Provider{fp.Scheme(): fp},
Converters: []confmap.Converter{expandconverter.New()},
URIs: []string{simulationConfigPath},
ProviderFactories: []confmap.ProviderFactory{fp},
ConverterFactories: []confmap.ConverterFactory{expandconverter.NewFactory()},
},
})
if err != nil {

View File

@@ -28,7 +28,7 @@ func createLogsExporter(
func NewFactory() exporter.Factory {
return exporter.NewFactory(
"memory",
component.MustNewType("memory"),
createDefaultConfig,
exporter.WithLogs(createLogsExporter, component.StabilityLevelBeta))
}

View File

@@ -35,7 +35,7 @@ func createLogsReceiver(
// NewFactory creates a new OTLP receiver factory.
func NewFactory() receiver.Factory {
return receiver.NewFactory(
"memory",
component.MustNewType("memory"),
createDefaultConfig,
receiver.WithLogs(createLogsReceiver, component.StabilityLevelBeta))
}

View File

@@ -3,6 +3,7 @@ package collectorsimulator
import (
"context"
"fmt"
"strings"
"time"
"github.com/pkg/errors"
@@ -68,7 +69,15 @@ func SimulateLogsProcessing(
)
}
return result, simulationErrs, nil
for _, log := range simulationErrs {
// if log is empty or log comes from featuregate.go, then remove it
if log == "" || strings.Contains(log, "featuregate.go") {
continue
}
collectorErrs = append(collectorErrs, log)
}
return result, collectorErrs, nil
}
func SendLogsToSimulator(

View File

@@ -37,7 +37,7 @@ type Queries interface {
GetIngestionKeys(ctx context.Context) ([]model.IngestionKey, *model.ApiError)
PrecheckLogin(ctx context.Context, email, sourceUrl string) (*model.PrecheckResponse, *model.ApiError)
PrecheckLogin(ctx context.Context, email, sourceUrl string) (*model.PrecheckResponse, model.BaseApiError)
}
type Mutations interface {

View File

@@ -596,7 +596,7 @@ func (mds *ModelDaoSqlite) UpdateUserFlags(ctx context.Context, userId string, f
return flags, nil
}
func (mds *ModelDaoSqlite) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*model.PrecheckResponse, *model.ApiError) {
func (mds *ModelDaoSqlite) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*model.PrecheckResponse, model.BaseApiError) {
// assume user is valid unless proven otherwise and assign default values for rest of the fields
resp := &model.PrecheckResponse{IsUser: true, CanSelfRegister: false, SSO: false, SsoUrl: "", SsoError: ""}

View File

@@ -1,8 +1,6 @@
package responseerrors
package errors
import (
"errors"
)
import "errors"
var (
// ErrResourceBytesLimitExceeded is returned when the resource bytes limit is exceeded

View File

@@ -0,0 +1,70 @@
package alertscustomstep
import (
"context"
"encoding/json"
"time"
"github.com/jmoiron/sqlx"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/rules"
"go.uber.org/multierr"
"go.uber.org/zap"
)
var Version = "0.47-alerts-custom-step"
func Migrate(conn *sqlx.DB) error {
ruleDB := rules.NewRuleDB(conn)
storedRules, err := ruleDB.GetStoredRules(context.Background())
if err != nil {
return err
}
for _, storedRule := range storedRules {
parsedRule, errs := rules.ParsePostableRule([]byte(storedRule.Data))
if len(errs) > 0 {
// this should not happen but if it does, we should not stop the migration
zap.L().Error("Error parsing rule", zap.Error(multierr.Combine(errs...)), zap.Int("rule", storedRule.Id))
continue
}
zap.L().Info("Rule parsed", zap.Int("rule", storedRule.Id))
updated := false
if parsedRule.RuleCondition != nil {
if parsedRule.RuleCondition.QueryType() == v3.QueryTypeBuilder {
if parsedRule.EvalWindow <= rules.Duration(6*time.Hour) {
for _, query := range parsedRule.RuleCondition.CompositeQuery.BuilderQueries {
if query.StepInterval > 60 {
updated = true
zap.L().Info("Updating step interval", zap.Int("rule", storedRule.Id), zap.Int64("old", query.StepInterval), zap.Int64("new", 60))
query.StepInterval = 60
}
}
}
}
}
if !updated {
zap.L().Info("Rule not updated", zap.Int("rule", storedRule.Id))
continue
}
ruleJSON, jsonErr := json.Marshal(parsedRule)
if jsonErr != nil {
zap.L().Error("Error marshalling rule; skipping rule migration", zap.Error(jsonErr), zap.Int("rule", storedRule.Id))
continue
}
stmt, prepareError := conn.PrepareContext(context.Background(), `UPDATE rules SET data=$3 WHERE id=$4;`)
if prepareError != nil {
zap.L().Error("Error in preparing statement for UPDATE to rules", zap.Error(prepareError))
continue
}
defer stmt.Close()
if _, err := stmt.Exec(ruleJSON, storedRule.Id); err != nil {
zap.L().Error("Error in Executing prepared statement for UPDATE to rules", zap.Error(err))
}
}
return nil
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/jmoiron/sqlx"
alertstov4 "go.signoz.io/signoz/pkg/query-service/migrate/0_45_alerts_to_v4"
alertscustomstep "go.signoz.io/signoz/pkg/query-service/migrate/0_47_alerts_custom_step"
"go.uber.org/zap"
)
@@ -63,5 +64,16 @@ func Migrate(dsn string) error {
}
}
if m, err := getMigrationVersion(conn, "0.47_alerts_custom_step"); err == nil && m == nil {
if err := alertscustomstep.Migrate(conn); err != nil {
zap.L().Error("failed to migrate 0.47_alerts_custom_step", zap.Error(err))
} else {
_, err := conn.Exec("INSERT INTO data_migrations (version, succeeded) VALUES ('0.47_alerts_custom_step', true)")
if err != nil {
return err
}
}
}
return nil
}

View File

@@ -14,25 +14,6 @@ import (
"k8s.io/apimachinery/pkg/labels"
)
type status string
const (
StatusSuccess status = "success"
StatusError status = "error"
)
type APIResponse struct {
Status status `json:"status"`
Data interface{} `json:"data,omitempty"`
Error StructuredError `json:"error,omitempty"`
}
type StructuredError struct {
Msg string `json:"msg"`
DocURL string `json:"docURL,omitempty"`
Errors []StructuredError `json:"errors,omitempty"`
}
type BaseApiError interface {
Type() ErrorType
ToError() error
@@ -41,10 +22,8 @@ type BaseApiError interface {
}
type ApiError struct {
Typ ErrorType
Err error
DocURL string
Errors []StructuredError
Typ ErrorType
Err error
}
func (a *ApiError) Type() ErrorType {
@@ -84,8 +63,8 @@ const (
ErrorUnauthorized ErrorType = "unauthorized"
ErrorForbidden ErrorType = "forbidden"
ErrorConflict ErrorType = "conflict"
ErrorStreamingNotSupported ErrorType = "streaming_is_not_supported"
ErrorStatusServiceUnavailable ErrorType = "service_unavailable"
ErrorStreamingNotSupported ErrorType = "streaming is not supported"
ErrorStatusServiceUnavailable ErrorType = "service unavailable"
)
// BadRequest returns a ApiError object of bad request
@@ -96,6 +75,14 @@ func BadRequest(err error) *ApiError {
}
}
// BadRequestStr returns a ApiError object of bad request
func BadRequestStr(s string) *ApiError {
return &ApiError{
Typ: ErrorBadData,
Err: fmt.Errorf(s),
}
}
// InternalError returns a ApiError object of internal type
func InternalError(err error) *ApiError {
return &ApiError{

View File

@@ -428,6 +428,16 @@ func (c *CompositeQuery) EnabledQueries() int {
return count
}
func (c *CompositeQuery) Sanitize() {
// remove groupBy for queries with list panel type
for _, query := range c.BuilderQueries {
if len(query.GroupBy) > 0 && c.PanelType == PanelTypeList {
query.GroupBy = []AttributeKey{}
}
}
}
func (c *CompositeQuery) Validate() error {
if c == nil {
return fmt.Errorf("composite query is required")
@@ -747,9 +757,9 @@ func (b *BuilderQuery) Validate(panelType PanelType) error {
}
}
if b.GroupBy != nil {
if len(b.GroupBy) > 0 && panelType == PanelTypeList {
return fmt.Errorf("group by is not supported for list panel type")
}
// if len(b.GroupBy) > 0 && panelType == PanelTypeList {
// return fmt.Errorf("group by is not supported for list panel type")
// }
for _, groupBy := range b.GroupBy {
if err := groupBy.Validate(); err != nil {

View File

@@ -484,6 +484,10 @@ func (r *ThresholdRule) prepareQueryRange(ts time.Time) *v3.QueryRangeParamsV3 {
}
}
if r.ruleCondition.CompositeQuery.PanelType != v3.PanelTypeGraph {
r.ruleCondition.CompositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,

View File

@@ -460,6 +460,7 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEma
if userEmail != "" {
a.SetUserEmail(userEmail)
a.SetCompanyDomain(userEmail)
}
if !a.isTelemetryEnabled() {

View File

@@ -537,7 +537,7 @@ func (tb *LogPipelinesTestBed) PostPipelinesToQSExpectingStatusCode(
)
}
var result model.APIResponse
var result app.ApiResponse
err = json.Unmarshal(responseBody, &result)
if err != nil {
tb.t.Fatalf(
@@ -587,7 +587,7 @@ func (tb *LogPipelinesTestBed) GetPipelinesFromQS() *logparsingpipeline.Pipeline
)
}
var result model.APIResponse
var result app.ApiResponse
err = json.Unmarshal(responseBody, &result)
if err != nil {
tb.t.Fatalf(
@@ -718,7 +718,7 @@ func (tb *LogPipelinesTestBed) assertNewAgentGetsPipelinesOnConnection(
)
}
func unmarshalPipelinesResponse(apiResponse *model.APIResponse) (
func unmarshalPipelinesResponse(apiResponse *app.ApiResponse) (
*logparsingpipeline.PipelinesResponse,
error,
) {

View File

@@ -500,7 +500,7 @@ func (tb *IntegrationsTestBed) GetDashboardByIdFromQS(dashboardUuid string) *das
func (tb *IntegrationsTestBed) RequestQS(
path string,
postData interface{},
) *model.APIResponse {
) *app.ApiResponse {
req, err := NewAuthenticatedTestRequest(
tb.testUser, path, postData,
)
@@ -523,7 +523,7 @@ func (tb *IntegrationsTestBed) RequestQS(
)
}
var result model.APIResponse
var result app.ApiResponse
err = json.Unmarshal(responseBody, &result)
if err != nil {
tb.t.Fatalf(

View File

@@ -192,7 +192,7 @@ services:
<<: *db-depend
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.0}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -205,7 +205,7 @@ services:
# condition: service_healthy
otel-collector:
image: signoz/signoz-otel-collector:0.88.24
image: signoz/signoz-otel-collector:0.102.0
container_name: signoz-otel-collector
command:
[

View File

@@ -1,20 +1,19 @@
package utils
import (
"fmt"
"time"
"go.uber.org/zap"
)
func Elapsed(funcName string, args ...interface{}) func() {
func Elapsed(funcName string, args map[string]interface{}) func() {
start := time.Now()
argsStr := ""
for _, v := range args {
argsStr += fmt.Sprintf("%v, ", v)
}
argsStr = argsStr[:len(argsStr)-2]
return func() {
zap.L().Info("Elapsed time", zap.String("func_name", funcName), zap.Duration("duration", time.Since(start)), zap.String("args", argsStr))
var zapFields []zap.Field
zapFields = append(zapFields, zap.String("func_name", funcName), zap.Duration("duration", time.Since(start)))
for k, v := range args {
zapFields = append(zapFields, zap.Any(k, v))
}
zap.L().Info("Elapsed time", zapFields...)
}
}