Compare commits

...

54 Commits

Author SHA1 Message Date
nityanandagohain
781732f25a Merge remote-tracking branch 'origin/develop' into issue_4777 2024-03-30 19:22:20 +05:30
nityanandagohain
77e55a0ec9 feat: allow query restrictions for log queries 2024-03-30 18:41:53 +05:30
Nityananda Gohain
a34c59762b feat: allow characters in attribute names (#4775) 2024-03-30 17:57:01 +05:30
Nityananda Gohain
397da5857f fix: enrich all queries with non materialized attributes (#4772) 2024-03-30 08:55:46 +05:30
Vikrant Gupta
43ceb052d8 feat: do not retry query range API's with i/o timeout error (#4768)
* feat: do not retry query range API's with i/o timeout error

* feat: do not retry query range API's with i/o timeout error
2024-03-29 16:00:22 +05:30
Yunus M
6eced60bf5 feat: update time range selection flows to handle relative and absolu… (#4742)
* feat: update time range selection flows to handle relative and absolute times

* fix: lint error

* fix: lint error

* feat: update logic to handle custom relative times on load and standardize relative time formats

* fix: type issue

* fix: handle light mode and on custom time range select

* chore: update alert frequency corresponding times

* chore: update copy URL

* feat: update styles
2024-03-29 14:53:48 +05:30
Rajat Dabade
7c2f5352d2 [Refactor]: Table Grid Formula issue. (#4758)
* refactor: change the logic to match data from another query

* refactor: updated logic

* refactor: clean up

* refactor: updated case to handle formula

* chore: nit

* refactor: isEqual instead of nested loops

* chore: added comments

* refactor: updated logic

* refactor: clean up

* refactor: updated case to handle formula

* chore: nit

* refactor: isEqual instead of nested loops
2024-03-29 14:41:16 +05:30
Vikrant Gupta
e6e377beff fix: billing graph page crash (#4764) 2024-03-29 11:08:33 +05:30
Prashant Shahi
6da9de6591 Merge pull request #4588 from SigNoz/chore/send-language-service-as-list
chore: send language and service name events as list
2024-03-28 22:21:55 +05:45
Vishal Sharma
7549aee656 Merge branch 'develop' into chore/send-language-service-as-list 2024-03-28 21:54:16 +05:30
Vishal Sharma
da4a6266c5 feat: add events API (#4761) 2024-03-28 21:43:41 +05:30
Vishal Sharma
6ac938f2a6 Merge branch 'develop' into chore/send-language-service-as-list 2024-03-28 21:40:57 +05:30
Raj Kamal Singh
990fc83269 Feat/integrations v0 mongo and nginx (#4763)
* feat: flesh out pre-requisites for collecting mongodb logs and metrics

* chore: remove stale pipelines in bundled integrations

* chore: clean up 'collect metrics' step for mongodb

* feat: add instructions for collecting and parsing mongodb logs

* feat: add metrics and logs attributes to mongodb data collected list

* feat: nginx logs collection instructions and some other cleanup

* feat: add list of parsed log attributes to data collected list for nginx

* chore: do not run pipeline population integration test if no built-in integration has a pipeline
2024-03-28 19:57:07 +05:30
Yunus M
5d5ff47d5e fix: update devtool property to eval-source-map (#4760) 2024-03-28 16:58:35 +05:30
Yunus M
9f30bba9a8 feat: add support to pin attributes in logs details view (#4692)
* feat: add support to pin attributes in logs details view

* feat: add safety checks

* feat: update styles

* feat: update styles

* feat: move json parsing in try catch block
2024-03-28 16:55:59 +05:30
Yunus M
6014bb76b6 feat: support drag select in chart - alerts page (#4618)
* feat: support drag select in chart - alerts page

* feat: handle back navigation after drag select
2024-03-28 16:51:29 +05:30
Vikrant Gupta
e25b54f86a fix: 404 resource not found issues (#4757) 2024-03-28 16:46:16 +05:30
Vikrant Gupta
5959963b9d fix: [SIG-575]: no data in new trace explorer page specific scenario (#4748)
Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2024-03-28 16:34:09 +05:30
Srikanth Chekuri
31b1d58a70 chore: fix alerting options (#4752) 2024-03-27 20:25:18 +05:30
Raj Kamal Singh
0ac9f6f663 Feat: QS: redis integration v0: instructions for collecting and parsing logs (#4753)
* chore: minor cleanups to postgres integration instructions

* chore: update instructions for connecting redis integration

* feat: add instructions for collecting redis logs

* chore: flesh out prerequisites for connecting redis integration

* chore: add list of metrics collected for redis
2024-03-27 20:03:27 +05:30
Yunus M
a30b75a2a8 feat: show environments in a separate dropdown (#4717)
* feat: show environments in a separate dropdown
2024-03-27 18:46:05 +05:30
SagarRajput-7
dbd4363ff8 feat: [SIG-573]: Fixed billing page issues (#4744)
* feat: [SIG-573]: Fixed billing page issues

* feat: [SIG-573]: Fixed jest test case
2024-03-27 11:55:28 +05:30
SagarRajput-7
ad1b01f225 feat: [SIG-566]: Added message to alert user about their past due - subscription status (#4724)
* feat: [SIG-566]: Added message to alert user about their past due - subscription status

* feat: [SIG-566]: Added message string to billings.json

* feat: [SIG-566]: Added strings to billings.json

* feat: [SIG-566]: updated test cases

* feat: [SIG-566]: updated message text

* feat: [SIG-566]: code fix

* feat: [SIG-566]: code fix
2024-03-27 10:23:57 +05:30
Vikrant Gupta
e1679790f7 fix: log chips not forming making filtering not work (#4749)
* fix: log chips not forming making filtering not work

* fix: remove console log
2024-03-27 01:01:24 +05:30
Srikanth Chekuri
ae594061e9 chore: fix query-service logging (#4696) 2024-03-27 00:07:29 +05:30
Vikrant Gupta
9e02147d4c fix: [SIG-574]: support __ in the groupBy clause (#4747) 2024-03-26 23:54:31 +05:30
Tan Wei Been
2b3d1c8ee5 [Fix]: Using exported dashboards as input to dashboard provisioning #2 (#4726)
* fix(be,fe): upsert dashboard on provision, export with uuid from frontend

* chore(fe): formatting in dashboard description

* fix: miss out while merging

---------

Co-authored-by: Håvard <haavard.markhus@inmeta.no>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
Co-authored-by: Haavasma <61970295+Haavasma@users.noreply.github.com>
2024-03-26 17:09:59 +05:30
Rajat Dabade
4c91dbcff0 Explorer Toolbar maximised and minimised (#4721) 2024-03-26 17:09:13 +05:30
Srikanth Chekuri
83f68f13db feat: add ability to customize alert frequency (#4697) 2024-03-26 12:40:53 +05:30
Vibhu Pandey
994814864c fix: send 403 on wrong password entry during change password operation (#4733) 2024-03-26 06:20:35 +05:30
Raj Kamal Singh
f24135f5b0 Feat: QS: postgres integration: instructions for collecting and parsing logs (#4738)
* chore: offer metrics config instructions for signoz cloud only

* chore: some more cleanups

* chore: get log collection instructions started

* feat: flesh out log collection otel config for postgres

* chore: some cleanup

* chore: some more cleanup

* chore: some more cleanup
2024-03-23 11:39:28 +05:30
Vikrant Gupta
5745727031 fix: [SIG-565]: design feedback for integrations (#4723)
* fix: [SIG-565]: design feedback for integrations

* feat: added dotted line in the test connection modal

* feat: handle the URL change for integration details page to support back navigation

* feat: added ghost loading states

* feat: added margin for details header

* feat: added margin for details header

* feat: increase the list sizes to 20

* fix: handle icons

* fix: remove unused classes
2024-03-22 14:59:43 +05:30
Vikrant Gupta
ae0d685b29 feat: [SIG-572]: allow number of lines changing in the logs list view (#4737)
* feat: [SIG-572]: allow number of lines changing in the logs list view

* feat: [SIG-572]: allow number of lines changing in the logs list view

* feat: added options to change row values in table view

* fix: build issues
2024-03-22 13:40:55 +05:30
Vikrant Gupta
f34a49e19c fix: [SIG-570]: handle case where - being present in the key (#4735) 2024-03-22 13:40:43 +05:30
Vikrant Gupta
9e557a0ebe feat: [SIG-571]: added support for has and nhas operator for json filter (#4736)
* feat: [SIG-571]: added support for has and nhas operator for json filter

* fix: address review comments

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2024-03-22 13:39:47 +05:30
Vikrant Gupta
0df3c26f04 feat: implement download logs feature for logs explorer new design (#4728)
* feat: implement download logs feature for logs explorer new design

* feat: address review comments

* feat: added timestamp and body to the start

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2024-03-22 13:28:38 +05:30
SagarRajput-7
0df86454ce fix: [SIG-567]: prevented stage-&-run API on legend change (#4720)
* fix: prevented stage-&-run API on legend change

* fix: code refactor

---------

Co-authored-by: Sagar Rajput <sagarrajput@192.168.1.2>
2024-03-21 16:31:59 +05:30
Vishal Sharma
63f0ae1c7c chore: update events (#4725)
* chore: update events

* chore: disable TELEMETRY_EVENT_QUERY_RANGE_API for saas

* chore: don't use mustCompile as it can cause panics
2024-03-20 19:59:28 +05:30
CheetoDa
d9f232683d Merge pull request #4688 from SigNoz/php-onboarding-docs
feat:php flow
2024-03-20 09:06:34 +05:30
CheetoDa
ad9d77d33f feat:php flow 2024-03-19 17:04:11 +05:30
CheetoDa
5a8479f4e9 feat:php flow 2024-03-19 17:04:11 +05:30
Raj Kamal Singh
f4e94c0ad1 chore: update postgres config instructions to work for both signoz cloud and self-hosted (#4718) 2024-03-19 12:28:22 +05:30
Raj Kamal Singh
6f3183823f Feat: postgres integration v0 (#4704)
* chore: update annotations for pre blocks in configuration instructions

* chore: update list of collected metrics for postgres integration

* chore: change non-string units to string in metrics collected list

* chore: some cleanups for postgres config instructions

* chore: some cleanup to metrics connection status resource labels

* chore: remove stub pipeline in postgres integration - no interesting log parsing to be done
2024-03-18 18:20:12 +05:30
Raj Kamal Singh
01bb39da6a chore: some cleanups in plumbing for integration connection status (#4716) 2024-03-18 15:22:31 +05:30
Raj Kamal Singh
43f9830e8d Feat: integrations v0 metrics connection status (#4715)
* chore: add test expectations for integration metrics connection status

* chore: reorg logs connection status calculation for parallelization

* chore: add interface for reader.GetMetricLastReceivedTsMillis

* chore: add plumbing for calculating integration metrics connection status

* chore: impl and test mocks for reader.GetMetricReceivedLatest

* chore: wrap things up and get test passing

* chore: some cleanup

* chore: some more cleanup

* chore: use prom metric names for integration connection test
2024-03-18 10:01:53 +05:30
Yunus M
4c2174958f chore: remove share invite link message (#4691) 2024-03-15 13:38:43 +05:30
Yunus M
07747e73d6 fix: context filter input overflow issue, min height for logs list view (#4710) 2024-03-15 13:25:06 +05:30
Vikrant Gupta
60946b5e9d feat: remove disabled in case of dashboard locked (#4709) 2024-03-15 12:28:03 +05:30
SagarRajput-7
0365fa5421 feat: handled inactive tab handling by removing the display flex override (#4708)
Co-authored-by: Sagar Rajput <sagarrajput@192.168.1.2>
2024-03-15 12:19:07 +05:30
Vikrant Gupta
cf22039562 Revert "Explorer Toolbar maximised and minimised (#4656)" (#4705)
This reverts commit aadb962b6c.
2024-03-15 01:26:31 +05:30
Vikrant Gupta
2a62982885 feat: support case insensitive operators (#4379) 2024-03-14 13:33:35 +05:30
Vikrant Gupta
1e1624ed4c fix: [GH-3932]: do not retry API's in case of 4XX status code (#4376)
* fix: [GH-3932]: do not retry API's in case of 400 status code

* feat: do not retry 4XX response status
2024-03-14 12:07:47 +05:30
Vishal Sharma
5c2a9e8362 Merge branch 'develop' into chore/send-language-service-as-list 2024-02-29 15:58:24 +05:30
makeavish
aad840da59 chore: send language and service name events as list 2024-02-23 14:08:17 +05:30
242 changed files with 6138 additions and 2050 deletions

View File

@@ -74,7 +74,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
requestBody, err := io.ReadAll(r.Body)
if err != nil {
zap.S().Errorf("received no input in api\n", err)
zap.L().Error("received no input in api", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -82,7 +82,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
err = json.Unmarshal(requestBody, &req)
if err != nil {
zap.S().Errorf("received invalid user registration request", zap.Error(err))
zap.L().Error("received invalid user registration request", zap.Error(err))
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
return
}
@@ -90,13 +90,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
// get invite object
invite, err := baseauth.ValidateInvite(ctx, req)
if err != nil {
zap.S().Errorf("failed to validate invite token", err)
zap.L().Error("failed to validate invite token", zap.Error(err))
RespondError(w, model.BadRequest(err), nil)
return
}
if invite == nil {
zap.S().Errorf("failed to validate invite token: it is either empty or invalid", err)
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
return
}
@@ -104,7 +104,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
// get auth domain from email domain
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
if apierr != nil {
zap.S().Errorf("failed to get domain from email", apierr)
zap.L().Error("failed to get domain from email", zap.Error(apierr))
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
}
@@ -205,24 +205,24 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.S().Errorf("[receiveGoogleAuth] sso requested but feature unavailable %s in org domain %s", model.SSO)
zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
q := r.URL.Query()
if errType := q.Get("error"); errType != "" {
zap.S().Errorf("[receiveGoogleAuth] failed to login with google auth", q.Get("error_description"))
zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description")))
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently)
return
}
relayState := q.Get("state")
zap.S().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState))
parsedState, err := url.Parse(relayState)
if err != nil || relayState == "" {
zap.S().Errorf("[receiveGoogleAuth] failed to process response - invalid response from IDP", err, r)
zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
@@ -244,14 +244,14 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
identity, err := callbackHandler.HandleCallback(r)
if err != nil {
zap.S().Errorf("[receiveGoogleAuth] failed to process HandleCallback ", domain.String(), zap.Error(err))
zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email)
if err != nil {
zap.S().Errorf("[receiveGoogleAuth] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
@@ -266,14 +266,14 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
if !ah.CheckFeature(model.SSO) {
zap.S().Errorf("[receiveSAML] sso requested but feature unavailable %s in org domain %s", model.SSO)
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
return
}
err := r.ParseForm()
if err != nil {
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
@@ -281,11 +281,11 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
// the relay state is sent when a login request is submitted to
// Idp.
relayState := r.FormValue("RelayState")
zap.S().Debug("[receiveML] relay state", zap.String("relayState", relayState))
zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState))
parsedState, err := url.Parse(relayState)
if err != nil || relayState == "" {
zap.S().Errorf("[receiveSAML] failed to process response - invalid response from IDP", err, r)
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
handleSsoError(w, r, redirectUri)
return
}
@@ -302,34 +302,34 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
sp, err := domain.PrepareSamlRequest(parsedState)
if err != nil {
zap.S().Errorf("[receiveSAML] failed to prepare saml request for domain (%s): %v", domain.String(), err)
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
if err != nil {
zap.S().Errorf("[receiveSAML] failed to retrieve assertion info from saml response for organization (%s): %v", domain.String(), err)
zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
if assertionInfo.WarningInfo.InvalidTime {
zap.S().Errorf("[receiveSAML] expired saml response for organization (%s): %v", domain.String(), err)
zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
email := assertionInfo.NameID
if email == "" {
zap.S().Errorf("[receiveSAML] invalid email in the SSO response (%s)", domain.String())
zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String()))
handleSsoError(w, r, redirectUri)
return
}
nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email)
if err != nil {
zap.S().Errorf("[receiveSAML] failed to generate redirect URI after successful login ", domain.String(), zap.Error(err))
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}

View File

@@ -191,7 +191,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
url := fmt.Sprintf("%s/trial?licenseKey=%s", constants.LicenseSignozIo, currentActiveLicenseKey)
req, err := http.NewRequest("GET", url, nil)
if err != nil {
zap.S().Error("Error while creating request for trial details", err)
zap.L().Error("Error while creating request for trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid blocking the UI
ah.Respond(w, resp)
@@ -200,7 +200,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
trialResp, err := hClient.Do(req)
if err != nil {
zap.S().Error("Error while fetching trial details", err)
zap.L().Error("Error while fetching trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid incorrectly blocking the UI
ah.Respond(w, resp)
@@ -211,7 +211,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
trialRespBody, err := io.ReadAll(trialResp.Body)
if err != nil || trialResp.StatusCode != http.StatusOK {
zap.S().Error("Error while fetching trial details", err)
zap.L().Error("Error while fetching trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid incorrectly blocking the UI
ah.Respond(w, resp)
@@ -222,7 +222,7 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
var trialRespData model.SubscriptionServerResp
if err := json.Unmarshal(trialRespBody, &trialRespData); err != nil {
zap.S().Error("Error while decoding trial details", err)
zap.L().Error("Error while decoding trial details", zap.Error(err))
// If there is an error in fetching trial details, we will still return the license details
// to avoid incorrectly blocking the UI
ah.Respond(w, resp)

View File

@@ -18,14 +18,14 @@ import (
func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.CustomMetricsFunction) {
zap.S().Info("CustomMetricsFunction feature is not enabled in this plan")
zap.L().Info("CustomMetricsFunction feature is not enabled in this plan")
ah.APIHandler.QueryRangeMetricsV2(w, r)
return
}
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
if apiErrorObj != nil {
zap.S().Errorf(apiErrorObj.Err.Error())
zap.L().Error("Error in parsing metric query params", zap.Error(apiErrorObj.Err))
RespondError(w, apiErrorObj, nil)
return
}

View File

@@ -43,8 +43,8 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
return
}
pat := model.PAT{
Name: req.Name,
Role: req.Role,
Name: req.Name,
Role: req.Role,
ExpiresAt: req.ExpiresInDays,
}
err = validatePATRequest(pat)
@@ -65,7 +65,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60)
}
zap.S().Debugf("Got Create PAT request: %+v", pat)
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
var apierr basemodel.BaseApiError
if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil {
RespondError(w, apierr, nil)
@@ -115,7 +115,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
req.UpdatedByUserID = user.Id
id := mux.Vars(r)["id"]
req.UpdatedAt = time.Now().Unix()
zap.S().Debugf("Got Update PAT request: %+v", req)
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil {
RespondError(w, apierr, nil)
@@ -135,7 +135,7 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
}, nil)
return
}
zap.S().Infof("Get PATs for user: %+v", user.Id)
zap.L().Info("Get PATs for user", zap.String("user_id", user.Id))
pats, apierr := ah.AppDao().ListPATs(ctx)
if apierr != nil {
RespondError(w, apierr, nil)
@@ -156,7 +156,7 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
return
}
zap.S().Debugf("Revoke PAT with id: %+v", id)
zap.L().Info("Revoke PAT with id", zap.String("id", id))
if apierr := ah.AppDao().RevokePAT(ctx, id, user.Id); apierr != nil {
RespondError(w, apierr, nil)
return

View File

@@ -15,7 +15,7 @@ import (
func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.SmartTraceDetail) {
zap.S().Info("SmartTraceDetail feature is not enabled in this plan")
zap.L().Info("SmartTraceDetail feature is not enabled in this plan")
ah.APIHandler.SearchTraces(w, r)
return
}
@@ -26,7 +26,7 @@ func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
}
spanLimit, err := strconv.Atoi(constants.SpanLimitStr)
if err != nil {
zap.S().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable: ", err)
zap.L().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable", zap.Error(err))
return
}
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm)

View File

@@ -22,7 +22,7 @@ import (
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
defer utils.Elapsed("GetMetricResult")()
zap.S().Infof("Executing metric result query: %s", query)
zap.L().Info("Executing metric result query: ", zap.String("query", query))
var hash string
// If getSubTreeSpans function is used in the clickhouse query
@@ -38,9 +38,8 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
}
rows, err := r.conn.Query(ctx, query)
zap.S().Debug(query)
if err != nil {
zap.S().Debug("Error in processing query: ", err)
zap.L().Error("Error in processing query", zap.Error(err))
return nil, "", fmt.Errorf("error in processing query")
}
@@ -117,7 +116,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
}
default:
zap.S().Errorf("invalid var found in metric builder query result", v, colName)
zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName))
}
}
sort.Strings(groupBy)
@@ -140,7 +139,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
}
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
// if err != nil {
// zap.S().Error("Error in dropping temporary table: ", err)
// zap.L().Error("Error in dropping temporary table: ", err)
// return nil, err
// }
if hash == "" {
@@ -152,7 +151,7 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
zap.S().Debugf("Executing getSubTreeSpans function")
zap.L().Debug("Executing getSubTreeSpans function")
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
@@ -162,28 +161,28 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
if err != nil {
zap.S().Error("Error in dropping temporary table: ", err)
zap.L().Error("Error in dropping temporary table", zap.Error(err))
return query, hash, err
}
// Create temporary table to store the getSubTreeSpans() results
zap.S().Debugf("Creating temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash))
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
if err != nil {
zap.S().Error("Error in creating temporary table: ", err)
zap.L().Error("Error in creating temporary table", zap.Error(err))
return query, hash, err
}
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
getSpansSubQuery := subtreeInput
// Execute the subTree query
zap.S().Debugf("Executing subTree query: %s", getSpansSubQuery)
zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery))
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
// zap.S().Info(getSpansSubQuery)
// zap.L().Info(getSpansSubQuery)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("Error in processing sql query")
}
@@ -196,16 +195,16 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
if len(getSpansSubQueryDBResponses) == 0 {
return query, hash, fmt.Errorf("No spans found for the given query")
}
zap.S().Debugf("Executing query to fetch all the spans from the same TraceID: %s", modelQuery)
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("Error in processing sql query")
}
// Process model to fetch the spans
zap.S().Debugf("Processing model to fetch the spans")
zap.L().Debug("Processing model to fetch the spans")
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
for _, item := range searchScanResponses {
var jsonItem basemodel.SearchSpanResponseItem
@@ -218,17 +217,17 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
}
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
// Use map to store pointer to the spans to avoid duplicates and save memory
zap.S().Debugf("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
if err != nil {
zap.S().Error("Error in getSubTreeAlgorithm function: ", err)
zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err))
return query, hash, err
}
zap.S().Debugf("Preparing batch to store subtree spans in temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
if err != nil {
zap.S().Error("Error in preparing batch statement: ", err)
zap.L().Error("Error in preparing batch statement", zap.Error(err))
return query, hash, err
}
for _, span := range treeSearchResponse {
@@ -251,14 +250,14 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
span.Events,
)
if err != nil {
zap.S().Debug("Error in processing sql query: ", err)
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, err
}
}
zap.S().Debugf("Inserting the subtree spans in temporary table getSubTreeSpans%s", hash)
zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
err = statement.Send()
if err != nil {
zap.S().Error("Error in sending statement: ", err)
zap.L().Error("Error in sending statement", zap.Error(err))
return query, hash, err
}
return query, hash, nil
@@ -323,7 +322,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
spans = append(spans, span)
}
zap.S().Debug("Building Tree")
zap.L().Debug("Building Tree")
roots, err := buildSpanTrees(&spans)
if err != nil {
return nil, err
@@ -333,7 +332,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
// For each root, get the subtree spans
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
targetSpan := &model.SpanForTraceDetails{}
// zap.S().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
// zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
// Search target span object in the tree
for _, root := range roots {
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
@@ -341,7 +340,7 @@ func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSub
break
}
if err != nil {
zap.S().Error("Error during BreadthFirstSearch(): ", err)
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
return nil, err
}
}

View File

@@ -49,7 +49,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
break
}
if err != nil {
zap.S().Error("Error during BreadthFirstSearch(): ", err)
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
return nil, err
}
}
@@ -186,7 +186,7 @@ func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTra
// If the parent span is not found, add current span to list of roots
if parent == nil {
// zap.S().Debug("Parent Span not found parent_id: ", span.ParentID)
// zap.L().Debug("Parent Span not found parent_id: ", span.ParentID)
roots = append(roots, span)
span.ParentID = ""
continue

View File

@@ -134,7 +134,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
var reader interfaces.DataConnector
storage := os.Getenv("STORAGE")
if storage == "clickhouse" {
zap.S().Info("Using ClickHouse as datastore ...")
zap.L().Info("Using ClickHouse as datastore ...")
qb := db.NewDataConnector(
localDB,
serverOptions.PromConfigPath,
@@ -419,30 +419,33 @@ func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface
signozMetricsUsed := false
signozLogsUsed := false
dataSources := []string{}
signozTracesUsed := false
if postData != nil {
if postData.CompositeQuery != nil {
data["queryType"] = postData.CompositeQuery.QueryType
data["panelType"] = postData.CompositeQuery.PanelType
signozLogsUsed, signozMetricsUsed, _ = telemetry.GetInstance().CheckSigNozSignals(postData)
signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData)
}
}
if signozMetricsUsed || signozLogsUsed {
if signozMetricsUsed || signozLogsUsed || signozTracesUsed {
if signozMetricsUsed {
dataSources = append(dataSources, "metrics")
telemetry.GetInstance().AddActiveMetricsUser()
}
if signozLogsUsed {
dataSources = append(dataSources, "logs")
telemetry.GetInstance().AddActiveLogsUser()
}
data["dataSources"] = dataSources
if signozTracesUsed {
telemetry.GetInstance().AddActiveTracesUser()
}
data["metricsUsed"] = signozMetricsUsed
data["logsUsed"] = signozLogsUsed
data["tracesUsed"] = signozTracesUsed
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, userEmail, true)
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false)
}
}
return data, true
@@ -485,7 +488,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
if _, ok := telemetry.EnabledPaths()[path]; ok {
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail)
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail, true, false)
}
}
@@ -522,7 +525,7 @@ func (s *Server) initListeners() error {
return err
}
zap.S().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort))
// listen on private port to support internal services
privateHostPort := s.serverOptions.PrivateHostPort
@@ -535,7 +538,7 @@ func (s *Server) initListeners() error {
if err != nil {
return err
}
zap.S().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort))
return nil
}
@@ -547,7 +550,7 @@ func (s *Server) Start() error {
if !s.serverOptions.DisableRules {
s.ruleManager.Start()
} else {
zap.S().Info("msg: Rules disabled as rules.disable is set to TRUE")
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
}
err := s.initListeners()
@@ -561,23 +564,23 @@ func (s *Server) Start() error {
}
go func() {
zap.S().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort))
switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do
default:
zap.S().Error("Could not start HTTP server", zap.Error(err))
zap.L().Error("Could not start HTTP server", zap.Error(err))
}
s.unavailableChannel <- healthcheck.Unavailable
}()
go func() {
zap.S().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
if err != nil {
zap.S().Error("Could not start pprof server", zap.Error(err))
zap.L().Error("Could not start pprof server", zap.Error(err))
}
}()
@@ -587,14 +590,14 @@ func (s *Server) Start() error {
}
go func() {
zap.S().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort))
switch err := s.privateHTTP.Serve(s.privateConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do
zap.S().Info("private http server closed")
zap.L().Info("private http server closed")
default:
zap.S().Error("Could not start private HTTP server", zap.Error(err))
zap.L().Error("Could not start private HTTP server", zap.Error(err))
}
s.unavailableChannel <- healthcheck.Unavailable
@@ -602,10 +605,10 @@ func (s *Server) Start() error {
}()
go func() {
zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
if err != nil {
zap.S().Info("opamp ws server failed to start", err)
zap.L().Error("opamp ws server failed to start", zap.Error(err))
s.unavailableChannel <- healthcheck.Unavailable
}
}()
@@ -681,7 +684,7 @@ func makeRulesManager(
return nil, fmt.Errorf("rule manager error: %v", err)
}
zap.S().Info("rules manager is ready")
zap.L().Info("rules manager is ready")
return manager, nil
}

View File

@@ -17,25 +17,25 @@ import (
func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel.UserPayload, error) {
patToken := r.Header.Get("SIGNOZ-API-KEY")
if len(patToken) > 0 {
zap.S().Debugf("Received a non-zero length PAT token")
zap.L().Debug("Received a non-zero length PAT token")
ctx := context.Background()
dao := apiHandler.AppDao()
pat, err := dao.GetPAT(ctx, patToken)
if err == nil && pat != nil {
zap.S().Debugf("Found valid PAT: %+v", pat)
zap.L().Debug("Found valid PAT: ", zap.Any("pat", pat))
if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 {
zap.S().Debugf("PAT has expired: %+v", pat)
zap.L().Info("PAT has expired: ", zap.Any("pat", pat))
return nil, fmt.Errorf("PAT has expired")
}
group, apiErr := dao.GetGroupByName(ctx, pat.Role)
if apiErr != nil {
zap.S().Debugf("Error while getting group for PAT: %+v", apiErr)
zap.L().Error("Error while getting group for PAT: ", zap.Any("apiErr", apiErr))
return nil, apiErr
}
user, err := dao.GetUser(ctx, pat.UserID)
if err != nil {
zap.S().Debugf("Error while getting user for PAT: %+v", err)
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
return nil, err
}
telemetry.GetInstance().SetPatTokenUser()
@@ -48,7 +48,7 @@ func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel
}, nil
}
if err != nil {
zap.S().Debugf("Error while getting user for PAT: %+v", err)
zap.L().Error("Error while getting user for PAT: ", zap.Error(err))
return nil, err
}
}

View File

@@ -22,19 +22,19 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.S().Errorf("failed to get domain from email", apierr)
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, model.InternalErrorStr("failed to get domain from email")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {
zap.S().Errorf("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
return nil, model.InternalErrorStr("failed to generate password hash")
}
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
if apiErr != nil {
zap.S().Debugf("GetGroupByName failed, err: %v\n", apiErr.Err)
zap.L().Error("GetGroupByName failed", zap.Error(apiErr))
return nil, apiErr
}
@@ -51,7 +51,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
user, apiErr = m.CreateUser(ctx, user, false)
if apiErr != nil {
zap.S().Debugf("CreateUser failed, err: %v\n", apiErr.Err)
zap.L().Error("CreateUser failed", zap.Error(apiErr))
return nil, apiErr
}
@@ -65,7 +65,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
userPayload, apierr := m.GetUserByEmail(ctx, email)
if !apierr.IsNil() {
zap.S().Errorf(" failed to get user with email received from auth provider", apierr.Error())
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
return "", model.BadRequestStr("invalid user email received from the auth provider")
}
@@ -75,7 +75,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
newUser, apiErr := m.createUserForSAMLRequest(ctx, email)
user = newUser
if apiErr != nil {
zap.S().Errorf("failed to create user with email received from auth provider: %v", apierr.Error())
zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr))
return "", apiErr
}
} else {
@@ -84,7 +84,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
tokenStore, err := baseauth.GenerateJWTForUser(user)
if err != nil {
zap.S().Errorf("failed to generate token for SSO login user", err)
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", model.InternalErrorStr("failed to generate token for the user")
}
@@ -143,8 +143,8 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
// do nothing, just skip sso
ssoAvailable = false
default:
zap.S().Errorf("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, model.BadRequest(err)
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, model.BadRequestStr(err.Error())
}
}
@@ -160,7 +160,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
if len(emailComponents) > 0 {
emailDomain = emailComponents[1]
}
zap.S().Errorf("failed to get org domain from email", zap.String("emailDomain", emailDomain), apierr.ToError())
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
return resp, apierr
}
@@ -176,7 +176,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
escapedUrl, _ := url.QueryUnescape(sourceUrl)
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
zap.S().Errorf("failed to parse referer", err)
zap.L().Error("failed to parse referer", zap.Error(err))
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
@@ -185,7 +185,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl)
if err != nil {
zap.S().Errorf("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), err)
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return resp, model.InternalError(err)
}

View File

@@ -48,13 +48,13 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
if domainIdStr != "" {
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
zap.S().Errorf("failed to parse domainId from relay state", err)
zap.L().Error("failed to parse domainId from relay state", zap.Error(err))
return nil, fmt.Errorf("failed to parse domainId from IdP response")
}
domain, err = m.GetDomain(ctx, domainId)
if (err != nil) || domain == nil {
zap.S().Errorf("failed to find domain from domainId received in IdP response", err.Error())
zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
@@ -64,7 +64,7 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
domainFromDB, err := m.GetDomainByName(ctx, domainNameStr)
domain = domainFromDB
if (err != nil) || domain == nil {
zap.S().Errorf("failed to find domain from domainName received in IdP response", err.Error())
zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err))
return nil, fmt.Errorf("invalid credentials")
}
}
@@ -132,7 +132,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
for _, s := range stored {
domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId}
if err := domain.LoadConfig(s.Data); err != nil {
zap.S().Errorf("ListDomains() failed", zap.Error(err))
zap.L().Error("ListDomains() failed", zap.Error(err))
}
domains = append(domains, domain)
}
@@ -153,7 +153,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
configJson, err := json.Marshal(domain)
if err != nil {
zap.S().Errorf("failed to unmarshal domain config", zap.Error(err))
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
@@ -167,7 +167,7 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
time.Now().Unix())
if err != nil {
zap.S().Errorf("failed to insert domain in db", zap.Error(err))
zap.L().Error("failed to insert domain in db", zap.Error(err))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
@@ -178,13 +178,13 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) ba
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
if domain.Id == uuid.Nil {
zap.S().Errorf("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain update failed"))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.S().Errorf("domain update failed", zap.Error(err))
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
@@ -195,7 +195,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
domain.Id)
if err != nil {
zap.S().Errorf("domain update failed", zap.Error(err))
zap.L().Error("domain update failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain update failed"))
}
@@ -206,7 +206,7 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) ba
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
if id == uuid.Nil {
zap.S().Errorf("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
@@ -215,7 +215,7 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.Bas
id)
if err != nil {
zap.S().Errorf("domain delete failed", zap.Error(err))
zap.L().Error("domain delete failed", zap.Error(err))
return model.InternalError(fmt.Errorf("domain delete failed"))
}

View File

@@ -26,12 +26,12 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basem
p.Revoked,
)
if err != nil {
zap.S().Errorf("Failed to insert PAT in db, err: %v", zap.Error(err))
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
id, err := result.LastInsertId()
if err != nil {
zap.S().Errorf("Failed to get last inserted id, err: %v", zap.Error(err))
zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
p.Id = strconv.Itoa(int(id))
@@ -62,7 +62,7 @@ func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemo
p.UpdatedByUserID,
id)
if err != nil {
zap.S().Errorf("Failed to update PAT in db, err: %v", zap.Error(err))
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT update failed"))
}
return nil
@@ -74,7 +74,7 @@ func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed
lastUsed,
token)
if err != nil {
zap.S().Errorf("Failed to update PAT last used in db, err: %v", zap.Error(err))
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT last used update failed"))
}
return nil
@@ -84,7 +84,7 @@ func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApi
pats := []model.PAT{}
if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil {
zap.S().Errorf("Failed to fetch PATs err: %v", zap.Error(err))
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
}
for i := range pats {
@@ -129,7 +129,7 @@ func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) base
"UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3",
userID, updatedAt, id)
if err != nil {
zap.S().Errorf("Failed to revoke PAT in db, err: %v", zap.Error(err))
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
return model.InternalError(fmt.Errorf("PAT revoke failed"))
}
return nil

View File

@@ -47,13 +47,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
zap.S().Errorf("failed to connect to license.signoz.io", err)
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
}
httpBody, err := io.ReadAll(httpResponse.Body)
if err != nil {
zap.S().Errorf("failed to read activation response from license.signoz.io", err)
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
}
@@ -63,7 +63,7 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError)
result := ActivationResult{}
err = json.Unmarshal(httpBody, &result)
if err != nil {
zap.S().Errorf("failed to marshal activation response from license.signoz.io", err)
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
}

View File

@@ -97,7 +97,7 @@ func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error {
l.ValidationMessage)
if err != nil {
zap.S().Errorf("error in inserting license data: ", zap.Error(err))
zap.L().Error("error in inserting license data: ", zap.Error(err))
return fmt.Errorf("failed to insert license in db: %v", err)
}
@@ -121,7 +121,7 @@ func (r *Repo) UpdatePlanDetails(ctx context.Context,
_, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key)
if err != nil {
zap.S().Errorf("error in updating license: ", zap.Error(err))
zap.L().Error("error in updating license: ", zap.Error(err))
return fmt.Errorf("failed to update license in db: %v", err)
}

View File

@@ -100,7 +100,7 @@ func (lm *Manager) SetActive(l *model.License) {
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.S().Panicf("Couldn't activate features: %v", err)
zap.L().Panic("Couldn't activate features", zap.Error(err))
}
if !lm.validatorRunning {
// we want to make sure only one validator runs,
@@ -125,13 +125,13 @@ func (lm *Manager) LoadActiveLicense() error {
if active != nil {
lm.SetActive(active)
} else {
zap.S().Info("No active license found, defaulting to basic plan")
zap.L().Info("No active license found, defaulting to basic plan")
// if no active license is found, we default to basic(free) plan with all default features
lm.activeFeatures = model.BasicPlan
setDefaultFeatures(lm)
err := lm.InitFeatures(lm.activeFeatures)
if err != nil {
zap.S().Error("Couldn't initialize features: ", err)
zap.L().Error("Couldn't initialize features", zap.Error(err))
return err
}
}
@@ -191,7 +191,7 @@ func (lm *Manager) Validator(ctx context.Context) {
// Validate validates the current active license
func (lm *Manager) Validate(ctx context.Context) (reterr error) {
zap.S().Info("License validation started")
zap.L().Info("License validation started")
if lm.activeLicense == nil {
return nil
}
@@ -201,12 +201,12 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
lm.lastValidated = time.Now().Unix()
if reterr != nil {
zap.S().Errorf("License validation completed with error", reterr)
zap.L().Error("License validation completed with error", zap.Error(reterr))
atomic.AddUint64(&lm.failedAttempts, 1)
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED,
map[string]interface{}{"err": reterr.Error()}, "")
map[string]interface{}{"err": reterr.Error()}, "", true, false)
} else {
zap.S().Info("License validation completed with no errors")
zap.L().Info("License validation completed with no errors")
}
lm.mutex.Unlock()
@@ -214,7 +214,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
if apiError != nil {
zap.S().Errorf("failed to validate license", apiError)
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError.Err
}
@@ -235,7 +235,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
}
if err := l.ParsePlan(); err != nil {
zap.S().Errorf("failed to parse updated license", zap.Error(err))
zap.L().Error("failed to parse updated license", zap.Error(err))
return err
}
@@ -245,7 +245,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
if err != nil {
// unexpected db write issue but we can let the user continue
// and wait for update to work in next cycle.
zap.S().Errorf("failed to validate license", zap.Error(err))
zap.L().Error("failed to validate license", zap.Error(err))
}
}
@@ -263,14 +263,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
userEmail, err := auth.GetEmailFromJwt(ctx)
if err == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail)
map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false)
}
}
}()
response, apiError := validate.ActivateLicense(key, "")
if apiError != nil {
zap.S().Errorf("failed to activate license", zap.Error(apiError.Err))
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
return nil, apiError
}
@@ -284,14 +284,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
err := l.ParsePlan()
if err != nil {
zap.S().Errorf("failed to activate license", zap.Error(err))
zap.L().Error("failed to activate license", zap.Error(err))
return nil, model.InternalError(err)
}
// store the license before activating it
err = lm.repo.InsertLicense(ctx, l)
if err != nil {
zap.S().Errorf("failed to activate license", zap.Error(err))
zap.L().Error("failed to activate license", zap.Error(err))
return nil, model.InternalError(err)
}

View File

@@ -14,10 +14,10 @@ import (
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
"go.signoz.io/signoz/ee/query-service/app"
"go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/constants"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/version"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder"
zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync"
@@ -27,18 +27,19 @@ import (
)
func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
config := zap.NewDevelopmentConfig()
config := zap.NewProductionConfig()
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
defer stop()
config.EncoderConfig.EncodeDuration = zapcore.StringDurationEncoder
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
consoleEncoder := zapcore.NewConsoleEncoder(config.EncoderConfig)
defaultLogLevel := zapcore.DebugLevel
config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder
config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder
config.EncoderConfig.TimeKey = "timestamp"
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig)
consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig)
defaultLogLevel := zapcore.InfoLevel
res := resource.NewWithAttributes(
semconv.SchemaURL,
semconv.ServiceNameKey.String("query-service"),
@@ -48,14 +49,15 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel),
)
if enableQueryServiceLogOTLPExport == true {
conn, err := grpc.DialContext(ctx, constants.OTLPTarget, grpc.WithBlock(), grpc.WithInsecure(), grpc.WithTimeout(time.Second*30))
if enableQueryServiceLogOTLPExport {
ctx, _ := context.WithTimeout(ctx, time.Second*30)
conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
log.Println("failed to connect to otlp collector to export query service logs with error:", err)
log.Fatalf("failed to establish connection: %v", err)
} else {
logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize)
if err != nil {
logExportBatchSizeInt = 1000
logExportBatchSizeInt = 512
}
ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{
BatchSize: logExportBatchSizeInt,
@@ -113,7 +115,6 @@ func main() {
zap.ReplaceGlobals(loggerMgr)
defer loggerMgr.Sync() // flushes buffer, if any
logger := loggerMgr.Sugar()
version.PrintVersion()
serverOptions := &app.ServerOptions{
@@ -137,22 +138,22 @@ func main() {
auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET")
if len(auth.JwtSecret) == 0 {
zap.S().Warn("No JWT secret key is specified.")
zap.L().Warn("No JWT secret key is specified.")
} else {
zap.S().Info("No JWT secret key set successfully.")
zap.L().Info("JWT secret key set successfully.")
}
server, err := app.NewServer(serverOptions)
if err != nil {
logger.Fatal("Failed to create server", zap.Error(err))
zap.L().Fatal("Failed to create server", zap.Error(err))
}
if err := server.Start(); err != nil {
logger.Fatal("Could not start servers", zap.Error(err))
zap.L().Fatal("Could not start server", zap.Error(err))
}
if err := auth.InitAuthCache(context.Background()); err != nil {
logger.Fatal("Failed to initialize auth cache", zap.Error(err))
zap.L().Fatal("Failed to initialize auth cache", zap.Error(err))
}
signalsChannel := make(chan os.Signal, 1)
@@ -161,9 +162,9 @@ func main() {
for {
select {
case status := <-server.HealthCheckStatus():
logger.Info("Received HealthCheck status: ", zap.Int("status", int(status)))
zap.L().Info("Received HealthCheck status: ", zap.Int("status", int(status)))
case <-signalsChannel:
logger.Fatal("Received OS Interrupt Signal ... ")
zap.L().Fatal("Received OS Interrupt Signal ... ")
server.Stop()
}
}

View File

@@ -9,8 +9,8 @@ import (
"github.com/google/uuid"
"github.com/pkg/errors"
saml2 "github.com/russellhaering/gosaml2"
"go.signoz.io/signoz/ee/query-service/sso/saml"
"go.signoz.io/signoz/ee/query-service/sso"
"go.signoz.io/signoz/ee/query-service/sso/saml"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -24,16 +24,16 @@ const (
// OrgDomain identify org owned web domains for auth and other purposes
type OrgDomain struct {
Id uuid.UUID `json:"id"`
Name string `json:"name"`
OrgId string `json:"orgId"`
SsoEnabled bool `json:"ssoEnabled"`
SsoType SSOType `json:"ssoType"`
Id uuid.UUID `json:"id"`
Name string `json:"name"`
OrgId string `json:"orgId"`
SsoEnabled bool `json:"ssoEnabled"`
SsoType SSOType `json:"ssoType"`
SamlConfig *SamlConfig `json:"samlConfig"`
SamlConfig *SamlConfig `json:"samlConfig"`
GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"`
Org *basemodel.Organization
Org *basemodel.Organization
}
func (od *OrgDomain) String() string {
@@ -100,8 +100,8 @@ func (od *OrgDomain) GetSAMLCert() string {
return ""
}
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
// requesting OAuth and also used in processing response from google
// PrepareGoogleOAuthProvider creates GoogleProvider that is used in
// requesting OAuth and also used in processing response from google
func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
if od.GoogleAuthConfig == nil {
return nil, fmt.Errorf("Google auth is not setup correctly for this domain")
@@ -137,38 +137,36 @@ func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServicePro
}
func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1)
// build redirect url from window.location sent by frontend
redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path)
// prepare state that gets relayed back when the auth provider
// calls back our url. here we pass the app url (where signoz runs)
// and the domain Id. The domain Id helps in identifying sso config
// when the call back occurs and the app url is useful in redirecting user
// back to the right path.
// when the call back occurs and the app url is useful in redirecting user
// back to the right path.
// why do we need to pass app url? the callback typically is handled by backend
// and sometimes backend might right at a different port or is unaware of frontend
// endpoint (unless SITE_URL param is set). hence, we receive this build sso request
// along with frontend window.location and use it to relay the information through
// auth provider to the backend (HandleCallback or HandleSSO method).
// along with frontend window.location and use it to relay the information through
// auth provider to the backend (HandleCallback or HandleSSO method).
relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId)
switch (od.SsoType) {
switch od.SsoType {
case SAML:
sp, err := od.PrepareSamlRequest(siteUrl)
if err != nil {
return "", err
}
return sp.BuildAuthURL(relayState)
case GoogleAuth:
googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl)
if err != nil {
return "", err
@@ -176,9 +174,8 @@ func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) {
return googleProvider.BuildAuthURL(relayState)
default:
zap.S().Errorf("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
return "", fmt.Errorf("unsupported SSO config for the domain")
zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name))
return "", fmt.Errorf("unsupported SSO config for the domain")
}
}

View File

@@ -102,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*
IDPCertificateStore: certStore,
SPKeyStore: randomKeyStore,
}
zap.S().Debugf("SAML request:", sp)
zap.L().Debug("SAML request", zap.Any("sp", sp))
return sp, nil
}

View File

@@ -91,12 +91,12 @@ func (lm *Manager) UploadUsage() {
// check if license is present or not
license, err := lm.licenseRepo.GetActiveLicense(ctx)
if err != nil {
zap.S().Errorf("failed to get active license: %v", zap.Error(err))
zap.L().Error("failed to get active license", zap.Error(err))
return
}
if license == nil {
// we will not start the usage reporting if license is not present.
zap.S().Info("no license present, skipping usage reporting")
zap.L().Info("no license present, skipping usage reporting")
return
}
@@ -123,7 +123,7 @@ func (lm *Manager) UploadUsage() {
dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
zap.S().Errorf("failed to get usage from clickhouse: %v", zap.Error(err))
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
return
}
for _, u := range dbusages {
@@ -133,16 +133,16 @@ func (lm *Manager) UploadUsage() {
}
if len(usages) <= 0 {
zap.S().Info("no snapshots to upload, skipping.")
zap.L().Info("no snapshots to upload, skipping.")
return
}
zap.S().Info("uploading usage data")
zap.L().Info("uploading usage data")
orgName := ""
orgNames, orgError := lm.modelDao.GetOrgs(ctx)
if orgError != nil {
zap.S().Errorf("failed to get org data: %v", zap.Error(orgError))
zap.L().Error("failed to get org data: %v", zap.Error(orgError))
}
if len(orgNames) == 1 {
orgName = orgNames[0].Name
@@ -152,14 +152,14 @@ func (lm *Manager) UploadUsage() {
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
zap.S().Errorf("error while decrypting usage data: %v", zap.Error(err))
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
return
}
usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil {
zap.S().Errorf("error while unmarshalling usage data: %v", zap.Error(err))
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
return
}
@@ -184,13 +184,13 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
for i := 1; i <= MaxRetries; i++ {
apiErr := licenseserver.SendUsage(ctx, payload)
if apiErr != nil && i == MaxRetries {
zap.S().Errorf("retries stopped : %v", zap.Error(apiErr))
zap.L().Error("retries stopped : %v", zap.Error(apiErr))
// not returning error here since it is captured in the failed count
return
} else if apiErr != nil {
// sleeping for exponential backoff
sleepDuration := RetryInterval * time.Duration(i)
zap.S().Errorf("failed to upload snapshot retrying after %v secs : %v", sleepDuration.Seconds(), zap.Error(apiErr.Err))
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
time.Sleep(sleepDuration)
} else {
break
@@ -201,7 +201,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
func (lm *Manager) Stop() {
lm.scheduler.Stop()
zap.S().Debug("sending usage data before shutting down")
zap.L().Info("sending usage data before shutting down")
// send usage before shutting down
lm.UploadUsage()

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_2022_1972)" stroke="#fff" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M6.667 2h.006M9.333 1.333h.007M1.333 6l13.334-3.333M8 8V4.333M11.333 8H4.667a2 2 0 00-2 2v2.667a2 2 0 002 2h6.666a2 2 0 002-2V10a2 2 0 00-2-2zM6 8v3.333M10 8v3.333M2.667 11.334h10.666"/></g><defs><clipPath id="prefix__clip0_2022_1972"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 507 B

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round"><path d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z" stroke-linejoin="round"/><path d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3M10.75 10.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"/></g></svg>

After

Width:  |  Height:  |  Size: 644 B

View File

@@ -0,0 +1 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg"><g stroke="#C0C1C3" stroke-width="1.333" stroke-linecap="round" stroke-linejoin="round"><path d="M2 4.667V3.333C2 2.6 2.6 2 3.333 2h1.334M11.333 2h1.334C13.4 2 14 2.6 14 3.333v1.334M14 11.334v1.333C14 13.4 13.4 14 12.667 14h-1.334M4.667 14H3.333C2.6 14 2 13.4 2 12.667v-1.333M8.667 4.667H5.333a.667.667 0 00-.666.666v2c0 .368.298.667.666.667h3.334a.667.667 0 00.666-.667v-2a.667.667 0 00-.666-.667zM10.667 8H7.333a.667.667 0 00-.666.667v2c0 .368.298.666.666.666h3.334a.667.667 0 00.666-.666v-2A.667.667 0 0010.667 8z"/></g></svg>

After

Width:  |  Height:  |  Size: 604 B

View File

@@ -37,11 +37,16 @@
"text_condition1": "Send a notification when",
"text_condition2": "the threshold",
"text_condition3": "during the last",
"option_1min": "1 min",
"option_5min": "5 mins",
"option_10min": "10 mins",
"option_15min": "15 mins",
"option_30min": "30 mins",
"option_60min": "60 mins",
"option_4hours": "4 hours",
"option_3hours": "3 hours",
"option_6hours": "6 hours",
"option_12hours": "12 hours",
"option_24hours": "24 hours",
"field_threshold": "Alert Threshold",
"option_allthetimes": "all the times",
@@ -112,6 +117,7 @@
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
"field_unit": "Threshold unit",
"text_alert_on_absent": "Send a notification if data is missing for",
"text_alert_frequency": "Run alert every",
"text_for": "minutes",
"selected_query_placeholder": "Select query"
}

View File

@@ -14,6 +14,5 @@
"delete_domain_message": "Are you sure you want to delete this domain?",
"delete_domain": "Delete Domain",
"add_domain": "Add Domains",
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
}

View File

@@ -37,11 +37,16 @@
"text_condition1": "Send a notification when",
"text_condition2": "the threshold",
"text_condition3": "during the last",
"option_1min": "1 min",
"option_5min": "5 mins",
"option_10min": "10 mins",
"option_15min": "15 mins",
"option_30min": "30 mins",
"option_60min": "60 mins",
"option_3hours": "3 hours",
"option_4hours": "4 hours",
"option_6hours": "6 hours",
"option_12hours": "12 hours",
"option_24hours": "24 hours",
"field_threshold": "Alert Threshold",
"option_allthetimes": "all the times",
@@ -112,6 +117,7 @@
"exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.",
"field_unit": "Threshold unit",
"text_alert_on_absent": "Send a notification if data is missing for",
"text_alert_frequency": "Run alert every",
"text_for": "minutes",
"selected_query_placeholder": "Select query"
}

View File

@@ -0,0 +1,14 @@
{
"days_remaining": "days remaining in your billing period.",
"billing": "Billing",
"manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.",
"enterprise_cloud": "Enterprise Cloud",
"enterprise": "Enterprise",
"card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.",
"upgrade_plan": "Upgrade Plan",
"manage_billing": "Manage Billing",
"upgrade_now_text": "Upgrade now to have uninterrupted access",
"billing_start_info": "Your billing will start only after the trial period",
"checkout_plans": "Check out features in paid plans",
"here": "here"
}

View File

@@ -14,6 +14,5 @@
"delete_domain_message": "Are you sure you want to delete this domain?",
"delete_domain": "Delete Domain",
"add_domain": "Add Domains",
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly",
"invite_link_share_manually": "After inviting members, please copy the invite link and send them the link manually"
"saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly"
}

View File

@@ -30,7 +30,8 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
statusCode,
payload: null,
error: errorMessage,
message: null,
message: (response.data as any)?.status,
body: JSON.stringify((response.data as any).data),
};
}

View File

@@ -8,7 +8,7 @@ const listAllDomain = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(`orgs/${props.orgId}/domains`);
const response = await axios.get(`/orgs/${props.orgId}/domains`);
return {
statusCode: 200,

View File

@@ -13,6 +13,7 @@ export interface UsageResponsePayloadProps {
billTotal: number;
};
discount: number;
subscriptionStatus?: string;
}
const getUsage = async (

View File

@@ -24,7 +24,7 @@ export const getAggregateAttribute = async ({
const response: AxiosResponse<{
data: IQueryAutocompleteResponse;
}> = await ApiV3Instance.get(
`autocomplete/aggregate_attributes?${createQueryParams({
`/autocomplete/aggregate_attributes?${createQueryParams({
aggregateOperator,
searchText,
dataSource,

View File

@@ -25,7 +25,7 @@ export const getAggregateKeys = async ({
const response: AxiosResponse<{
data: IQueryAutocompleteResponse;
}> = await ApiV3Instance.get(
`autocomplete/attribute_keys?${createQueryParams({
`/autocomplete/attribute_keys?${createQueryParams({
aggregateOperator,
searchText,
dataSource,

View File

@@ -2,4 +2,4 @@ import axios from 'api';
import { DeleteViewPayloadProps } from 'types/api/saveViews/types';
export const deleteView = (uuid: string): Promise<DeleteViewPayloadProps> =>
axios.delete(`explorer/views/${uuid}`);
axios.delete(`/explorer/views/${uuid}`);

View File

@@ -6,4 +6,4 @@ import { DataSource } from 'types/common/queryBuilder';
export const getAllViews = (
sourcepage: DataSource,
): Promise<AxiosResponse<AllViewsProps>> =>
axios.get(`explorer/views?sourcePage=${sourcepage}`);
axios.get(`/explorer/views?sourcePage=${sourcepage}`);

View File

@@ -8,7 +8,7 @@ export const saveView = ({
viewName,
extraData,
}: SaveViewProps): Promise<AxiosResponse<SaveViewPayloadProps>> =>
axios.post('explorer/views', {
axios.post('/explorer/views', {
name: viewName,
sourcePage,
compositeQuery,

View File

@@ -11,7 +11,7 @@ export const updateView = ({
sourcePage,
viewKey,
}: UpdateViewProps): Promise<UpdateViewPayloadProps> =>
axios.put(`explorer/views/${viewKey}`, {
axios.put(`/explorer/views/${viewKey}`, {
name: viewName,
compositeQuery,
extraData,

View File

@@ -0,0 +1,23 @@
import { Color } from '@signozhq/design-tokens';
import { useIsDarkMode } from 'hooks/useDarkMode';
function ConfigureIcon(): JSX.Element {
const isDarkMode = useIsDarkMode();
return (
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg">
<g
stroke={isDarkMode ? Color.BG_VANILLA_100 : Color.BG_INK_500}
strokeWidth="1.333"
strokeLinecap="round"
>
<path
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
strokeLinejoin="round"
/>
<path d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3M10.75 10.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z" />
</g>
</svg>
);
}
export default ConfigureIcon;

View File

@@ -5,13 +5,14 @@ import './CustomTimePicker.styles.scss';
import { Input, Popover, Tooltip, Typography } from 'antd';
import cx from 'classnames';
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
import { Options } from 'container/TopNav/DateTimeSelection/config';
import {
FixedDurationSuggestionOptions,
Options,
RelativeDurationSuggestionOptions,
} from 'container/TopNav/DateTimeSelectionV2/config';
import dayjs from 'dayjs';
import { defaultTo, noop } from 'lodash-es';
import { isValidTimeFormat } from 'lib/getMinMax';
import { defaultTo, isFunction, noop } from 'lodash-es';
import debounce from 'lodash-es/debounce';
import { CheckCircle, ChevronDown, Clock } from 'lucide-react';
import {
@@ -33,7 +34,14 @@ interface CustomTimePickerProps {
onError: (value: boolean) => void;
selectedValue: string;
selectedTime: string;
onValidCustomDateChange: ([t1, t2]: any[]) => void;
onValidCustomDateChange: ({
time: [t1, t2],
timeStr,
}: {
time: [dayjs.Dayjs | null, dayjs.Dayjs | null];
timeStr: string;
}) => void;
onCustomTimeStatusUpdate?: (isValid: boolean) => void;
open: boolean;
setOpen: Dispatch<SetStateAction<boolean>>;
items: any[];
@@ -53,6 +61,7 @@ function CustomTimePicker({
open,
setOpen,
onValidCustomDateChange,
onCustomTimeStatusUpdate,
newPopover,
customDateTimeVisible,
setCustomDTPickerVisible,
@@ -85,6 +94,7 @@ function CustomTimePicker({
return Options[index].label;
}
}
for (
let index = 0;
index < RelativeDurationSuggestionOptions.length;
@@ -94,12 +104,17 @@ function CustomTimePicker({
return RelativeDurationSuggestionOptions[index].label;
}
}
for (let index = 0; index < FixedDurationSuggestionOptions.length; index++) {
if (FixedDurationSuggestionOptions[index].value === selectedTime) {
return FixedDurationSuggestionOptions[index].label;
}
}
if (isValidTimeFormat(selectedTime)) {
return selectedTime;
}
return '';
};
@@ -161,13 +176,22 @@ function CustomTimePicker({
setInputStatus('error');
onError(true);
setInputErrorMessage('Please enter time less than 6 months');
if (isFunction(onCustomTimeStatusUpdate)) {
onCustomTimeStatusUpdate(true);
}
} else {
onValidCustomDateChange([minTime, currentTime]);
onValidCustomDateChange({
time: [minTime, currentTime],
timeStr: inputValue,
});
}
} else {
setInputStatus('error');
onError(true);
setInputErrorMessage(null);
if (isFunction(onCustomTimeStatusUpdate)) {
onCustomTimeStatusUpdate(false);
}
}
}, 300);
@@ -320,4 +344,5 @@ CustomTimePicker.defaultProps = {
setCustomDTPickerVisible: noop,
onCustomDateHandler: noop,
handleGoLive: noop,
onCustomTimeStatusUpdate: noop,
};

View File

@@ -1,10 +1,13 @@
.query-builder-search-wrapper {
margin-top: 10px;
height: 46px;
border: 1px solid var(--bg-slate-400);
border-bottom: none;
margin-top: 10px;
border: 1px solid var(--bg-slate-400);
border-bottom: none;
.ant-select-selector {
border: none !important;
}
}
.ant-select-selector {
border: none !important;
input {
font-size: 12px;
}
}
}

View File

@@ -37,12 +37,17 @@ const convert = new Convert();
interface LogFieldProps {
fieldKey: string;
fieldValue: string;
linesPerRow?: number;
}
type LogSelectedFieldProps = LogFieldProps &
type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> &
Pick<AddToQueryHOCProps, 'onAddToQuery'>;
function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element {
function LogGeneralField({
fieldKey,
fieldValue,
linesPerRow = 1,
}: LogFieldProps): JSX.Element {
const html = useMemo(
() => ({
__html: convert.toHtml(dompurify.sanitize(fieldValue)),
@@ -55,7 +60,11 @@ function LogGeneralField({ fieldKey, fieldValue }: LogFieldProps): JSX.Element {
<Text ellipsis type="secondary" className="log-field-key">
{`${fieldKey} : `}
</Text>
<LogText dangerouslySetInnerHTML={html} className="log-value" />
<LogText
dangerouslySetInnerHTML={html}
className="log-value"
linesPerRow={linesPerRow > 1 ? linesPerRow : undefined}
/>
</TextContainer>
);
}
@@ -92,6 +101,7 @@ type ListLogViewProps = {
onSetActiveLog: (log: ILog) => void;
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
activeLog?: ILog | null;
linesPerRow: number;
};
function ListLogView({
@@ -100,6 +110,7 @@ function ListLogView({
onSetActiveLog,
onAddToQuery,
activeLog,
linesPerRow,
}: ListLogViewProps): JSX.Element {
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
@@ -179,7 +190,11 @@ function ListLogView({
/>
<div>
<LogContainer>
<LogGeneralField fieldKey="Log" fieldValue={flattenLogData.body} />
<LogGeneralField
fieldKey="Log"
fieldValue={flattenLogData.body}
linesPerRow={linesPerRow}
/>
{flattenLogData.stream && (
<LogGeneralField fieldKey="Stream" fieldValue={flattenLogData.stream} />
)}
@@ -222,4 +237,8 @@ ListLogView.defaultProps = {
activeLog: null,
};
LogGeneralField.defaultProps = {
linesPerRow: 1,
};
export default ListLogView;

View File

@@ -2,6 +2,10 @@ import { Color } from '@signozhq/design-tokens';
import { Card, Typography } from 'antd';
import styled from 'styled-components';
interface LogTextProps {
linesPerRow?: number;
}
export const Container = styled(Card)<{
$isActiveLog: boolean;
$isDarkMode: boolean;
@@ -23,7 +27,7 @@ export const Container = styled(Card)<{
export const Text = styled(Typography.Text)`
&&& {
min-width: 1.5rem;
min-width: 2.5rem;
white-space: nowrap;
}
`;
@@ -41,11 +45,19 @@ export const LogContainer = styled.div`
gap: 6px;
`;
export const LogText = styled.div`
export const LogText = styled.div<LogTextProps>`
display: inline-block;
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
${({ linesPerRow }): string =>
linesPerRow
? `-webkit-line-clamp: ${linesPerRow};
line-clamp: ${linesPerRow};
display: -webkit-box;
-webkit-box-orient: vertical;
white-space: normal; `
: 'white-space: nowrap;'};
};
`;
export const SelectedLog = styled.div`

View File

@@ -27,7 +27,7 @@
line-height: 18px;
letter-spacing: 0.08em;
text-align: left;
color: var(--bg-slate-200, #52575c);
color: #52575c;
}
.menu-items {
@@ -65,7 +65,7 @@
padding: 12px;
.title {
color: var(--bg-slate-200, #52575c);
color: #52575c;
font-family: Inter;
font-size: 11px;
font-style: normal;
@@ -149,7 +149,7 @@
}
.title {
color: var(--bg-slate-200, #52575c);
color: #52575c;
font-family: Inter;
font-size: 11px;
font-style: normal;

View File

@@ -120,38 +120,36 @@ export default function LogsFormatOptionsMenu({
{selectedItem && (
<>
{selectedItem === 'raw' && (
<>
<div className="horizontal-line" />
<div className="max-lines-per-row">
<div className="title"> max lines per row </div>
<div className="raw-format max-lines-per-row-input">
<button
type="button"
className="periscope-btn"
onClick={decrementMaxLinesPerRow}
>
{' '}
<Minus size={12} />{' '}
</button>
<InputNumber
min={1}
max={10}
value={maxLinesPerRow}
onChange={handleLinesPerRowChange}
/>
<button
type="button"
className="periscope-btn"
onClick={incrementMaxLinesPerRow}
>
{' '}
<Plus size={12} />{' '}
</button>
</div>
<>
<div className="horizontal-line" />
<div className="max-lines-per-row">
<div className="title"> max lines per row </div>
<div className="raw-format max-lines-per-row-input">
<button
type="button"
className="periscope-btn"
onClick={decrementMaxLinesPerRow}
>
{' '}
<Minus size={12} />{' '}
</button>
<InputNumber
min={1}
max={10}
value={maxLinesPerRow}
onChange={handleLinesPerRowChange}
/>
<button
type="button"
className="periscope-btn"
onClick={incrementMaxLinesPerRow}
>
{' '}
<Plus size={12} />{' '}
</button>
</div>
</>
)}
</div>
</>
<div className="selected-item-content-container active">
{!addNewColumn && <div className="horizontal-line" />}

View File

@@ -17,4 +17,5 @@ export enum LOCALSTORAGE {
IS_IDENTIFIED_USER = 'IS_IDENTIFIED_USER',
DASHBOARD_VARIABLES = 'DASHBOARD_VARIABLES',
SHOW_EXPLORER_TOOLBAR = 'SHOW_EXPLORER_TOOLBAR',
PINNED_ATTRIBUTES = 'PINNED_ATTRIBUTES',
}

View File

@@ -27,5 +27,7 @@ export enum QueryParams {
viewName = 'viewName',
viewKey = 'viewKey',
expandedWidgetId = 'expandedWidgetId',
integration = 'integration',
pagination = 'pagination',
relativeTime = 'relativeTime',
}

View File

@@ -56,14 +56,14 @@ describe('BillingContainer', () => {
expect(cost).toBeInTheDocument();
const manageBilling = screen.getByRole('button', {
name: /manage billing/i,
name: 'manage_billing',
});
expect(manageBilling).toBeInTheDocument();
const dollar = screen.getByText(/\$0/i);
expect(dollar).toBeInTheDocument();
const currentBill = screen.getByText('Billing');
const currentBill = screen.getByText('billing');
expect(currentBill).toBeInTheDocument();
});
@@ -75,7 +75,7 @@ describe('BillingContainer', () => {
const freeTrailText = await screen.findByText('Free Trial');
expect(freeTrailText).toBeInTheDocument();
const currentBill = screen.getByText('Billing');
const currentBill = screen.getByText('billing');
expect(currentBill).toBeInTheDocument();
const dollar0 = await screen.findByText(/\$0/i);
@@ -85,18 +85,14 @@ describe('BillingContainer', () => {
);
expect(onTrail).toBeInTheDocument();
const numberOfDayRemaining = await screen.findByText(
/1 days remaining in your billing period./i,
);
const numberOfDayRemaining = await screen.findByText(/1 days_remaining/i);
expect(numberOfDayRemaining).toBeInTheDocument();
const upgradeButton = await screen.findAllByRole('button', {
name: /upgrade/i,
name: /upgrade_plan/i,
});
expect(upgradeButton[1]).toBeInTheDocument();
expect(upgradeButton.length).toBe(2);
const checkPaidPlan = await screen.findByText(
/Check out features in paid plans/i,
);
const checkPaidPlan = await screen.findByText(/checkout_plans/i);
expect(checkPaidPlan).toBeInTheDocument();
const link = screen.getByRole('link', { name: /here/i });
@@ -114,7 +110,7 @@ describe('BillingContainer', () => {
render(<BillingContainer />);
});
const currentBill = screen.getByText('Billing');
const currentBill = screen.getByText('billing');
expect(currentBill).toBeInTheDocument();
const dollar0 = await screen.findByText(/\$0/i);
@@ -126,17 +122,17 @@ describe('BillingContainer', () => {
expect(onTrail).toBeInTheDocument();
const receivedCardDetails = await screen.findByText(
/We have received your card details, your billing will only start after the end of your free trial period./i,
/card_details_recieved_and_billing_info/i,
);
expect(receivedCardDetails).toBeInTheDocument();
const manageBillingButton = await screen.findByRole('button', {
name: /manage billing/i,
name: /manage_billing/i,
});
expect(manageBillingButton).toBeInTheDocument();
const dayRemainingInBillingPeriod = await screen.findByText(
/1 days remaining in your billing period./i,
/1 days_remaining/i,
);
expect(dayRemainingInBillingPeriod).toBeInTheDocument();
});
@@ -156,7 +152,7 @@ describe('BillingContainer', () => {
const billingPeriod = await findByText(billingPeriodText);
expect(billingPeriod).toBeInTheDocument();
const currentBill = screen.getByText('Billing');
const currentBill = screen.getByText('billing');
expect(currentBill).toBeInTheDocument();
const dollar0 = await screen.findByText(/\$1,278.3/i);
@@ -181,7 +177,7 @@ describe('BillingContainer', () => {
);
render(<BillingContainer />);
const dayRemainingInBillingPeriod = await screen.findByText(
/11 days remaining in your billing period./i,
/11 days_remaining/i,
);
expect(dayRemainingInBillingPeriod).toBeInTheDocument();
});

View File

@@ -17,7 +17,7 @@ import {
} from 'antd';
import { ColumnsType } from 'antd/es/table';
import updateCreditCardApi from 'api/billing/checkout';
import getUsage from 'api/billing/getUsage';
import getUsage, { UsageResponsePayloadProps } from 'api/billing/getUsage';
import manageCreditCardApi from 'api/billing/manage';
import Spinner from 'components/Spinner';
import { SOMETHING_WENT_WRONG } from 'constants/api';
@@ -26,8 +26,9 @@ import useAnalytics from 'hooks/analytics/useAnalytics';
import useAxiosError from 'hooks/useAxiosError';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import { pick } from 'lodash-es';
import { isEmpty, pick } from 'lodash-es';
import { useCallback, useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useMutation, useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -49,6 +50,11 @@ interface DataType {
cost: string;
}
enum SubscriptionStatus {
PastDue = 'past_due',
Active = 'active',
}
const renderSkeletonInput = (): JSX.Element => (
<Skeleton.Input
style={{ marginTop: '10px', height: '40px', width: '100%' }}
@@ -116,15 +122,19 @@ const dummyColumns: ColumnsType<DataType> = [
},
];
// eslint-disable-next-line sonarjs/cognitive-complexity
export default function BillingContainer(): JSX.Element {
const daysRemainingStr = 'days remaining in your billing period.';
const { t } = useTranslation(['billings']);
const daysRemainingStr = t('days_remaining');
const [headerText, setHeaderText] = useState('');
const [billAmount, setBillAmount] = useState(0);
const [activeLicense, setActiveLicense] = useState<License | null>(null);
const [daysRemaining, setDaysRemaining] = useState(0);
const [isFreeTrial, setIsFreeTrial] = useState(false);
const [data, setData] = useState<any[]>([]);
const [apiResponse, setApiResponse] = useState<any>({});
const [apiResponse, setApiResponse] = useState<
Partial<UsageResponsePayloadProps>
>({});
const { trackEvent } = useAnalytics();
@@ -139,6 +149,9 @@ export default function BillingContainer(): JSX.Element {
const processUsageData = useCallback(
(data: any): void => {
if (isEmpty(data?.payload)) {
return;
}
const {
details: { breakdown = [], billTotal },
billingPeriodStart,
@@ -186,6 +199,9 @@ export default function BillingContainer(): JSX.Element {
[licensesData?.payload?.onTrial],
);
const isSubscriptionPastDue =
apiResponse.subscriptionStatus === SubscriptionStatus.PastDue;
const { isLoading, isFetching: isFetchingBillingData } = useQuery(
[REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId],
{
@@ -342,14 +358,27 @@ export default function BillingContainer(): JSX.Element {
[apiResponse, billAmount, isLoading, isFetchingBillingData],
);
const { Text } = Typography;
const subscriptionPastDueMessage = (): JSX.Element => (
<Typography>
{`We were not able to process payments for your account. Please update your card details `}
<Text type="danger" onClick={handleBilling} style={{ cursor: 'pointer' }}>
{t('here')}
</Text>
{` if your payment information has changed. Email us at `}
<Text type="secondary">cloud-support@signoz.io</Text>
{` otherwise. Be sure to provide this information immediately to avoid interruption to your service.`}
</Typography>
);
return (
<div className="billing-container">
<Flex vertical style={{ marginBottom: 16 }}>
<Typography.Text style={{ fontWeight: 500, fontSize: 18 }}>
Billing
{t('billing')}
</Typography.Text>
<Typography.Text color={Color.BG_VANILLA_400}>
Manage your billing information, invoices, and monitor costs.
{t('manage_billing_and_costs')}
</Typography.Text>
</Flex>
@@ -361,7 +390,7 @@ export default function BillingContainer(): JSX.Element {
<Flex justify="space-between" align="center">
<Flex vertical>
<Typography.Title level={5} style={{ marginTop: 2, fontWeight: 500 }}>
{isCloudUserVal ? 'Enterprise Cloud' : 'Enterprise'}{' '}
{isCloudUserVal ? t('enterprise_cloud') : t('enterprise')}{' '}
{isFreeTrial ? <Tag color="success"> Free Trial </Tag> : ''}
</Typography.Title>
{!isLoading && !isFetchingBillingData ? (
@@ -378,8 +407,8 @@ export default function BillingContainer(): JSX.Element {
onClick={handleBilling}
>
{isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription
? 'Upgrade Plan'
: 'Manage Billing'}
? t('upgrade_plan')
: t('manage_billing')}
</Button>
</Flex>
@@ -389,21 +418,34 @@ export default function BillingContainer(): JSX.Element {
ellipsis
style={{ fontWeight: '300', color: '#49aa19', fontSize: 12 }}
>
We have received your card details, your billing will only start after
the end of your free trial period.
{t('card_details_recieved_and_billing_info')}
</Typography.Text>
)}
{!isLoading && !isFetchingBillingData ? (
<Alert
message={headerText}
type="info"
showIcon
style={{ marginTop: 12 }}
/>
headerText && (
<Alert
message={headerText}
type="info"
showIcon
style={{ marginTop: 12 }}
/>
)
) : (
<Skeleton.Input active style={{ height: 20, marginTop: 20 }} />
)}
{isSubscriptionPastDue &&
(!isLoading && !isFetchingBillingData ? (
<Alert
message={subscriptionPastDueMessage()}
type="error"
showIcon
style={{ marginTop: 12 }}
/>
) : (
<Skeleton.Input active style={{ height: 20, marginTop: 20 }} />
))}
</Card>
<BillingUsageGraphCallback />
@@ -434,16 +476,16 @@ export default function BillingContainer(): JSX.Element {
<Col span={20} className="plan-benefits">
<Typography.Text className="plan-benefit">
<CheckCircleOutlined />
Upgrade now to have uninterrupted access
{t('upgrade_now_text')}
</Typography.Text>
<Typography.Text className="plan-benefit">
<CheckCircleOutlined />
Your billing will start only after the trial period
{t('Your billing will start only after the trial period')}
</Typography.Text>
<Typography.Text className="plan-benefit">
<CheckCircleOutlined />
<span>
Check out features in paid plans &nbsp;
{t('checkout_plans')} &nbsp;
<a
href="https://signoz.io/pricing/"
style={{
@@ -452,7 +494,7 @@ export default function BillingContainer(): JSX.Element {
target="_blank"
rel="noreferrer"
>
here
{t('here')}
</a>
</span>
</Typography.Text>
@@ -464,7 +506,7 @@ export default function BillingContainer(): JSX.Element {
loading={isLoadingBilling || isLoadingManageBilling}
onClick={handleBilling}
>
Upgrade Plan
{t('upgrade_plan')}
</Button>
</Col>
</Row>

View File

@@ -3,9 +3,7 @@ import '../../../lib/uPlotLib/uPlotLib.styles.scss';
import { Color } from '@signozhq/design-tokens';
import { Card, Flex, Typography } from 'antd';
import { getComponentForPanelType } from 'constants/panelTypes';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { PropsTypePropsMap } from 'container/GridPanelSwitch/types';
import Uplot from 'components/Uplot';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import tooltipPlugin from 'lib/uPlotLib/plugins/tooltipPlugin';
@@ -14,7 +12,7 @@ import getRenderer from 'lib/uPlotLib/utils/getRenderer';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { getXAxisScale } from 'lib/uPlotLib/utils/getXAxisScale';
import { getYAxisScale } from 'lib/uPlotLib/utils/getYAxisScale';
import { FC, useMemo, useRef } from 'react';
import { useMemo, useRef } from 'react';
import uPlot from 'uplot';
import {
@@ -43,6 +41,21 @@ const paths = (
return renderer(u, seriesIdx, idx0, idx1, extendGap, buildClip);
};
const calculateStartEndTime = (
data: any,
): { startTime: number; endTime: number } => {
const timestamps: number[] = [];
data?.details?.breakdown?.forEach((breakdown: any) => {
breakdown?.dayWiseBreakdown?.breakdown?.forEach((entry: any) => {
timestamps.push(entry?.timestamp);
});
});
const billingTime = [data?.billingPeriodStart, data?.billingPeriodEnd];
const startTime: number = Math.min(...timestamps, ...billingTime);
const endTime: number = Math.max(...timestamps, ...billingTime);
return { startTime, endTime };
};
export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
const { data, billAmount } = props;
const graphCompatibleData = useMemo(
@@ -54,11 +67,9 @@ export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const containerDimensions = useResizeObserver(graphRef);
const { billingPeriodStart: startTime, billingPeriodEnd: endTime } = data;
const Component = getComponentForPanelType(PANEL_TYPES.BAR) as FC<
PropsTypePropsMap[PANEL_TYPES]
>;
const { startTime, endTime } = useMemo(() => calculateStartEndTime(data), [
data,
]);
const getGraphSeries = (color: string, label: string): any => ({
drawStyle: 'bars',
@@ -183,7 +194,7 @@ export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element {
</Flex>
</Flex>
<div ref={graphRef} style={{ height: '100%', paddingBottom: 48 }}>
<Component data={chartData} options={optionsForChart} />
<Uplot data={chartData} options={optionsForChart} />
</div>
</Card>
);

View File

@@ -0,0 +1,84 @@
.download-logs-popover {
.ant-popover-inner {
border-radius: 4px;
border: 1px solid var(--bg-slate-400);
background: linear-gradient(
139deg,
rgba(18, 19, 23, 0.8) 0%,
rgba(18, 19, 23, 0.9) 98.68%
);
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
backdrop-filter: blur(20px);
padding: 12px 18px 12px 14px;
.download-logs-content {
display: flex;
flex-direction: column;
gap: 8px;
align-items: flex-start;
.action-btns {
padding: 4px 0px !important;
width: 159px;
display: flex;
align-items: center;
color: var(--bg-vanilla-400);
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: normal;
letter-spacing: 0.14px;
gap: 6px;
.ant-btn-icon {
margin-inline-end: 0px;
}
}
.action-btns:hover {
&.ant-btn-text {
background-color: rgba(171, 189, 255, 0.04) !important;
}
}
.export-heading {
color: #52575c;
font-size: 11px;
font-style: normal;
font-weight: 600;
line-height: 18px; /* 163.636% */
letter-spacing: 0.88px;
text-transform: uppercase;
}
}
}
}
.lightMode {
.download-logs-popover {
.ant-popover-inner {
border: 1px solid var(--bg-vanilla-300);
background: linear-gradient(
139deg,
rgba(255, 255, 255, 0.8) 0%,
rgba(255, 255, 255, 0.9) 98.68%
);
box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2);
.download-logs-content {
.action-btns {
color: var(--bg-ink-400);
}
.action-btns:hover {
&.ant-btn-text {
background-color: var(--bg-vanilla-300) !important;
}
}
.export-heading {
color: var(--bg-ink-200);
}
}
}
}
}

View File

@@ -0,0 +1,84 @@
import './DownloadV2.styles.scss';
import { Button, Popover, Typography } from 'antd';
import { Excel } from 'antd-table-saveas-excel';
import { FileDigit, FileDown, Sheet } from 'lucide-react';
import { unparse } from 'papaparse';
import { DownloadProps } from './DownloadV2.types';
function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element {
const downloadExcelFile = (): void => {
const headers = Object.keys(Object.assign({}, ...data)).map((item) => {
const updatedTitle = item
.split('_')
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
.join(' ');
return {
title: updatedTitle,
dataIndex: item,
};
});
const excel = new Excel();
excel
.addSheet(fileName)
.addColumns(headers)
.addDataSource(data, {
str2Percent: true,
})
.saveAs(`${fileName}.xlsx`);
};
const downloadCsvFile = (): void => {
const csv = unparse(data);
const csvBlob = new Blob([csv], { type: 'text/csv;charset=utf-8;' });
const csvUrl = URL.createObjectURL(csvBlob);
const downloadLink = document.createElement('a');
downloadLink.href = csvUrl;
downloadLink.download = `${fileName}.csv`;
downloadLink.click();
downloadLink.remove();
};
return (
<Popover
trigger={['click']}
placement="bottomRight"
rootClassName="download-logs-popover"
arrow={false}
content={
<div className="download-logs-content">
<Typography.Text className="export-heading">Export As</Typography.Text>
<Button
icon={<Sheet size={14} />}
type="text"
onClick={downloadExcelFile}
className="action-btns"
>
Excel (.xlsx)
</Button>
<Button
icon={<FileDigit size={14} />}
type="text"
onClick={downloadCsvFile}
className="action-btns"
>
CSV
</Button>
</div>
}
>
<Button
className="periscope-btn"
loading={isLoading}
icon={<FileDown size={14} />}
/>
</Popover>
);
}
Download.defaultProps = {
isLoading: undefined,
};
export default Download;

View File

@@ -0,0 +1,10 @@
export type DownloadOptions = {
isDownloadEnabled: boolean;
fileName: string;
};
export type DownloadProps = {
data: Record<string, string>[];
isLoading?: boolean;
fileName: string;
};

View File

@@ -1,11 +1,7 @@
import { DndContext, DragEndEvent } from '@dnd-kit/core';
import { useEffect, useState } from 'react';
import ExplorerOptions, { ExplorerOptionsProps } from './ExplorerOptions';
import {
getExplorerToolBarVisibility,
setExplorerToolBarVisibility,
} from './utils';
import { getExplorerToolBarVisibility } from './utils';
type ExplorerOptionsWrapperProps = Omit<
ExplorerOptionsProps,
@@ -27,29 +23,16 @@ function ExplorerOptionWrapper({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const handleDragEnd = (event: DragEndEvent): void => {
const { active, over } = event;
if (
over !== null &&
active.id === 'explorer-options-draggable' &&
over.id === 'explorer-options-droppable'
) {
setIsExplorerOptionHidden(true);
setExplorerToolBarVisibility(false, sourcepage);
}
};
return (
<DndContext onDragEnd={handleDragEnd}>
<ExplorerOptions
disabled={disabled}
query={query}
isLoading={isLoading}
onExport={onExport}
sourcepage={sourcepage}
isExplorerOptionHidden={isExplorerOptionHidden}
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
/>
</DndContext>
<ExplorerOptions
disabled={disabled}
query={query}
isLoading={isLoading}
onExport={onExport}
sourcepage={sourcepage}
isExplorerOptionHidden={isExplorerOptionHidden}
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
/>
);
}

View File

@@ -4,7 +4,7 @@
.explorer-update {
position: fixed;
bottom: 24px;
left: calc(50% - 225px);
left: calc(50% - 250px);
display: flex;
align-items: center;
gap: 12px;
@@ -37,21 +37,24 @@
}
}
.explorer-options {
display: flex;
gap: 16px;
padding: 10px 12px;
border-radius: 50px;
border: 1px solid var(--bg-slate-400);
background: rgba(22, 24, 29, 0.6);
box-shadow: 4px 4px 16px 4px rgba(0, 0, 0, 0.25);
backdrop-filter: blur(20px);
position: fixed;
bottom: 24px;
left: calc(50% + 240px);
padding: 10px 12px;
transform: translate(calc(-50% - 120px), 0);
transition: left 0.2s linear;
border: 1px solid var(--bg-slate-400);
border-radius: 50px;
background: rgba(22, 24, 29, 0.6);
box-shadow: 4px 4px 16px 4px rgba(0, 0, 0, 0.25);
backdrop-filter: blur(20px);
cursor: default;
display: flex;
gap: 16px;
z-index: 1;
.ant-select-selector {
padding: 0 !important;
}
@@ -236,9 +239,9 @@
.lightMode {
.explorer-options {
background: transparent;
box-shadow: none;
border: 1px solid var(--bg-vanilla-300);
background: rgba(255, 255, 255, 0.8);
box-shadow: 4px 4px 16px 4px rgba(255, 255, 255, 0.55);
backdrop-filter: blur(20px);
hr {

View File

@@ -1,7 +1,6 @@
/* eslint-disable react/jsx-props-no-spreading */
import './ExplorerOptions.styles.scss';
import { useDraggable } from '@dnd-kit/core';
import { Color } from '@signozhq/design-tokens';
import {
Button,
@@ -32,7 +31,15 @@ import useErrorNotification from 'hooks/useErrorNotification';
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
import { useNotifications } from 'hooks/useNotifications';
import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery';
import { Check, ConciergeBell, Disc3, Plus, X, XCircle } from 'lucide-react';
import {
Check,
ConciergeBell,
Disc3,
PanelBottomClose,
Plus,
X,
XCircle,
} from 'lucide-react';
import {
CSSProperties,
Dispatch,
@@ -51,12 +58,13 @@ import { DataSource } from 'types/common/queryBuilder';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import ExplorerOptionsDroppableArea from './ExplorerOptionsDroppableArea';
import ExplorerOptionsHideArea from './ExplorerOptionsHideArea';
import {
DATASOURCE_VS_ROUTES,
generateRGBAFromHex,
getRandomColor,
saveNewViewHandler,
setExplorerToolBarVisibility,
} from './utils';
const allowedRoles = [USER_ROLES.ADMIN, USER_ROLES.AUTHOR, USER_ROLES.EDITOR];
@@ -79,7 +87,6 @@ function ExplorerOptions({
const history = useHistory();
const ref = useRef<RefSelectProps>(null);
const isDarkMode = useIsDarkMode();
const [isDragEnabled, setIsDragEnabled] = useState(false);
const onModalToggle = useCallback((value: boolean) => {
setIsExport(value);
@@ -271,31 +278,18 @@ function ExplorerOptions({
[isDarkMode],
);
const {
attributes,
listeners,
setNodeRef,
transform,
isDragging,
} = useDraggable({
id: 'explorer-options-draggable',
disabled: isDragEnabled,
});
const hideToolbar = (): void => {
setExplorerToolBarVisibility(false, sourcepage);
if (setIsExplorerOptionHidden) {
setIsExplorerOptionHidden(true);
}
};
const isEditDeleteSupported = allowedRoles.includes(role as string);
const style: React.CSSProperties | undefined = transform
? {
transform: `translate3d(${transform.x - 338}px, ${transform.y}px, 0)`,
width: `${400 - transform.y * 6}px`,
maxWidth: '440px', // initial width of the explorer options
overflow: 'hidden',
}
: undefined;
return (
<>
{isQueryUpdated && !isExplorerOptionHidden && !isDragging && (
{isQueryUpdated && !isExplorerOptionHidden && (
<div
className={cx(
isEditDeleteSupported ? '' : 'hide-update',
@@ -330,12 +324,7 @@ function ExplorerOptions({
background: extraData
? `linear-gradient(90deg, rgba(0,0,0,0) -5%, ${rgbaColor} 9%, rgba(0,0,0,0) 30%)`
: 'transparent',
backdropFilter: 'blur(20px)',
...style,
}}
ref={setNodeRef}
{...listeners}
{...attributes}
>
<div className="view-options">
<Select<string, { key: string; value: string }>
@@ -352,9 +341,6 @@ function ExplorerOptions({
allowClear={{
clearIcon: <XCircle size={16} style={{ marginTop: '-3px' }} />,
}}
onDropdownVisibleChange={(open): void => {
setIsDragEnabled(open);
}}
onClear={handleClearSelect}
ref={ref}
>
@@ -410,11 +396,17 @@ function ExplorerOptions({
<Plus size={16} />
</Button>
</Tooltip>
<Tooltip title="Hide">
<Button disabled={disabled} shape="circle" onClick={hideToolbar}>
<PanelBottomClose size={16} />
</Button>
</Tooltip>
</div>
</div>
)}
<ExplorerOptionsDroppableArea
<ExplorerOptionsHideArea
isExplorerOptionHidden={isExplorerOptionHidden}
setIsExplorerOptionHidden={setIsExplorerOptionHidden}
sourcepage={sourcepage}

View File

@@ -49,7 +49,7 @@
.explorer-option-droppable-container {
.explorer-show-btn {
background: var(--bg-vanilla-400);
background: var(--bg-vanilla-200);
}
}
}

View File

@@ -1,7 +1,6 @@
/* eslint-disable no-nested-ternary */
import './ExplorerOptionsDroppableArea.styles.scss';
import './ExplorerOptionsHideArea.styles.scss';
import { useDroppable } from '@dnd-kit/core';
import { Color } from '@signozhq/design-tokens';
import { Button, Tooltip } from 'antd';
import { Disc3, X } from 'lucide-react';
@@ -19,7 +18,7 @@ interface DroppableAreaProps {
onUpdateQueryHandler: () => void;
}
function ExplorerOptionsDroppableArea({
function ExplorerOptionsHideArea({
isQueryUpdated,
isExplorerOptionHidden,
sourcepage,
@@ -27,10 +26,6 @@ function ExplorerOptionsDroppableArea({
handleClearSelect,
onUpdateQueryHandler,
}: DroppableAreaProps): JSX.Element {
const { setNodeRef } = useDroppable({
id: 'explorer-options-droppable',
});
const handleShowExplorerOption = (): void => {
if (setIsExplorerOptionHidden) {
setIsExplorerOptionHidden(false);
@@ -39,7 +34,7 @@ function ExplorerOptionsDroppableArea({
};
return (
<div ref={setNodeRef} className="explorer-option-droppable-container">
<div className="explorer-option-droppable-container">
{isExplorerOptionHidden && (
<>
{isQueryUpdated && (
@@ -75,9 +70,9 @@ function ExplorerOptionsDroppableArea({
);
}
ExplorerOptionsDroppableArea.defaultProps = {
ExplorerOptionsHideArea.defaultProps = {
isExplorerOptionHidden: undefined,
setIsExplorerOptionHidden: undefined,
};
export default ExplorerOptionsDroppableArea;
export default ExplorerOptionsHideArea;

View File

@@ -1,20 +1,30 @@
import { InfoCircleOutlined } from '@ant-design/icons';
import Spinner from 'components/Spinner';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { QueryParams } from 'constants/query';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import GridPanelSwitch from 'container/GridPanelSwitch';
import { getFormatNameByOptionId } from 'container/NewWidget/RightContainer/alertFomatCategories';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { Time } from 'container/TopNav/DateTimeSelection/config';
import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config';
import {
CustomTimeType,
Time as TimeV2,
} from 'container/TopNav/DateTimeSelectionV2/config';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import useUrlQuery from 'hooks/useUrlQuery';
import GetMinMax from 'lib/getMinMax';
import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { useEffect, useMemo, useRef, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
import { UpdateTimeInterval } from 'store/actions';
import { AppState } from 'store/reducers';
import { AlertDef } from 'types/api/alerts/def';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -32,7 +42,7 @@ export interface ChartPreviewProps {
query: Query | null;
graphType?: PANEL_TYPES;
selectedTime?: timePreferenceType;
selectedInterval?: Time | TimeV2;
selectedInterval?: Time | TimeV2 | CustomTimeType;
headline?: JSX.Element;
alertDef?: AlertDef;
userQueryKey?: string;
@@ -46,7 +56,7 @@ function ChartPreview({
query,
graphType = PANEL_TYPES.TIME_SERIES,
selectedTime = 'GLOBAL_TIME',
selectedInterval = '5min',
selectedInterval = '5m',
headline,
userQueryKey,
allowSelectedIntervalForStepGen = false,
@@ -54,6 +64,7 @@ function ChartPreview({
yAxisUnit,
}: ChartPreviewProps): JSX.Element | null {
const { t } = useTranslation('alerts');
const dispatch = useDispatch();
const threshold = alertDef?.condition.target || 0;
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
@@ -63,6 +74,30 @@ function ChartPreview({
GlobalReducer
>((state) => state.globalTime);
const handleBackNavigation = (): void => {
const searchParams = new URLSearchParams(window.location.search);
const startTime = searchParams.get(QueryParams.startTime);
const endTime = searchParams.get(QueryParams.endTime);
if (startTime && endTime && startTime !== endTime) {
dispatch(
UpdateTimeInterval('custom', [
parseInt(getTimeString(startTime), 10),
parseInt(getTimeString(endTime), 10),
]),
);
}
};
useEffect(() => {
window.addEventListener('popstate', handleBackNavigation);
return (): void => {
window.removeEventListener('popstate', handleBackNavigation);
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const canQuery = useMemo((): boolean => {
if (!query || query == null) {
return false;
@@ -131,10 +166,34 @@ function ChartPreview({
const containerDimensions = useResizeObserver(graphRef);
const isDarkMode = useIsDarkMode();
const urlQuery = useUrlQuery();
const location = useLocation();
const optionName =
getFormatNameByOptionId(alertDef?.condition.targetUnit || '') || '';
const onDragSelect = useCallback(
(start: number, end: number): void => {
const startTimestamp = Math.trunc(start);
const endTimestamp = Math.trunc(end);
if (startTimestamp !== endTimestamp) {
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
}
const { maxTime, minTime } = GetMinMax('custom', [
startTimestamp,
endTimestamp,
]);
urlQuery.set(QueryParams.startTime, minTime.toString());
urlQuery.set(QueryParams.endTime, maxTime.toString());
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
history.push(generatedUrl);
},
[dispatch, location.pathname, urlQuery],
);
const options = useMemo(
() =>
getUPlotChartOptions({
@@ -145,6 +204,7 @@ function ChartPreview({
minTimeScale,
maxTimeScale,
isDarkMode,
onDragSelect,
thresholds: [
{
index: '0', // no impact
@@ -174,6 +234,7 @@ function ChartPreview({
minTimeScale,
maxTimeScale,
isDarkMode,
onDragSelect,
threshold,
t,
optionName,

View File

@@ -1,5 +1,6 @@
import {
Checkbox,
Collapse,
Form,
InputNumber,
InputNumberProps,
@@ -19,12 +20,18 @@ import {
AlertDef,
defaultCompareOp,
defaultEvalWindow,
defaultFrequency,
defaultMatchType,
} from 'types/api/alerts/def';
import { EQueryType } from 'types/common/dashboard';
import { popupContainer } from 'utils/selectPopupContainer';
import { FormContainer, InlineSelect, StepHeading } from './styles';
import {
FormContainer,
InlineSelect,
StepHeading,
VerticalLine,
} from './styles';
function RuleOptions({
alertDef,
@@ -200,6 +207,35 @@ function RuleOptions({
});
};
const onChangeFrequency = (value: string | unknown): void => {
const freq = (value as string) || alertDef.frequency;
setAlertDef({
...alertDef,
frequency: freq,
});
};
const renderFrequency = (): JSX.Element => (
<InlineSelect
getPopupContainer={popupContainer}
defaultValue={defaultFrequency}
style={{ minWidth: '120px' }}
value={alertDef.frequency}
onChange={onChangeFrequency}
>
<Select.Option value="1m0s">{t('option_1min')}</Select.Option>
<Select.Option value="5m0s">{t('option_5min')}</Select.Option>
<Select.Option value="10m0s">{t('option_10min')}</Select.Option>
<Select.Option value="15m0s">{t('option_15min')}</Select.Option>
<Select.Option value="30m0s">{t('option_30min')}</Select.Option>
<Select.Option value="1h0m0s">{t('option_60min')}</Select.Option>
<Select.Option value="3h0m0s">{t('option_3hours')}</Select.Option>
<Select.Option value="6h0m0s">{t('option_6hours')}</Select.Option>
<Select.Option value="12h0m0s">{t('option_12hours')}</Select.Option>
<Select.Option value="24h0m0s">{t('option_24hours')}</Select.Option>
</InlineSelect>
);
const selectedCategory = getCategoryByOptionId(currentQuery?.unit || '');
const categorySelectOptions = getCategorySelectOptionByName(
@@ -238,42 +274,57 @@ function RuleOptions({
/>
</Form.Item>
</Space>
<Space direction="horizontal" align="center">
<Form.Item noStyle name={['condition', 'alertOnAbsent']}>
<Checkbox
checked={alertDef?.condition?.alertOnAbsent}
onChange={(e): void => {
setAlertDef({
...alertDef,
condition: {
...alertDef.condition,
alertOnAbsent: e.target.checked,
},
});
}}
/>
</Form.Item>
<Typography.Text>{t('text_alert_on_absent')}</Typography.Text>
<Collapse>
<Collapse.Panel header={t('More options')} key="1">
<Space direction="vertical" size="large">
<VerticalLine>
<Space direction="horizontal" align="center">
<Typography.Text>{t('text_alert_frequency')}</Typography.Text>
{renderFrequency()}
</Space>
</VerticalLine>
<Form.Item noStyle name={['condition', 'absentFor']}>
<InputNumber
min={1}
value={alertDef?.condition?.absentFor}
onChange={(value): void => {
setAlertDef({
...alertDef,
condition: {
...alertDef.condition,
absentFor: Number(value) || 0,
},
});
}}
type="number"
onWheel={(e): void => e.currentTarget.blur()}
/>
</Form.Item>
<Typography.Text>{t('text_for')}</Typography.Text>
</Space>
<VerticalLine>
<Space direction="horizontal" align="center">
<Form.Item noStyle name={['condition', 'alertOnAbsent']}>
<Checkbox
checked={alertDef?.condition?.alertOnAbsent}
onChange={(e): void => {
setAlertDef({
...alertDef,
condition: {
...alertDef.condition,
alertOnAbsent: e.target.checked,
},
});
}}
/>
</Form.Item>
<Typography.Text>{t('text_alert_on_absent')}</Typography.Text>
<Form.Item noStyle name={['condition', 'absentFor']}>
<InputNumber
min={1}
value={alertDef?.condition?.absentFor}
onChange={(value): void => {
setAlertDef({
...alertDef,
condition: {
...alertDef.condition,
absentFor: Number(value) || 0,
},
});
}}
type="number"
onWheel={(e): void => e.currentTarget.blur()}
/>
</Form.Item>
<Typography.Text>{t('text_for')}</Typography.Text>
</Space>
</VerticalLine>
</Space>
</Collapse.Panel>
</Collapse>
</Space>
</FormContainer>
</>

View File

@@ -67,6 +67,13 @@ export const SeveritySelect = styled(Select)`
width: 25% !important;
`;
export const VerticalLine = styled.div`
border-left: 2px solid #e8e8e8; /* Adjust color and thickness as desired */
padding-left: 20px; /* Adjust spacing to content as needed */
margin-left: 20px; /* Adjust margin as desired */
height: 100%; /* Adjust based on your layout needs */
`;
export const InputSmall = styled(Input)`
width: 40% !important;
`;

View File

@@ -12,22 +12,30 @@ import {
// toChartInterval converts eval window to chart selection time interval
export const toChartInterval = (evalWindow: string | undefined): Time => {
switch (evalWindow) {
case '1m0s':
return '1m';
case '5m0s':
return '5min';
return '5m';
case '10m0s':
return '10min';
return '10m';
case '15m0s':
return '15min';
return '15m';
case '30m0s':
return '30min';
return '30m';
case '1h0m0s':
return '1hr';
return '1h';
case '3h0m0s':
return '3h';
case '4h0m0s':
return '4hr';
return '4h';
case '6h0m0s':
return '6h';
case '12h0m0s':
return '12h';
case '24h0m0s':
return '1day';
return '1d';
default:
return '5min';
return '5m';
}
};

View File

@@ -1,6 +1,7 @@
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { useIsDarkMode } from 'hooks/useDarkMode';
@@ -81,8 +82,13 @@ function GridCardGraph({
const searchParams = new URLSearchParams(window.location.search);
const startTime = searchParams.get(QueryParams.startTime);
const endTime = searchParams.get(QueryParams.endTime);
const relativeTime = searchParams.get(
QueryParams.relativeTime,
) as CustomTimeType;
if (startTime && endTime && startTime !== endTime) {
if (relativeTime) {
dispatch(UpdateTimeInterval(relativeTime));
} else if (startTime && endTime && startTime !== endTime) {
dispatch(
UpdateTimeInterval('custom', [
parseInt(getTimeString(startTime), 10),
@@ -146,6 +152,16 @@ function GridCardGraph({
widget?.panelTypes,
widget.timePreferance,
],
retry(failureCount, error): boolean {
if (
String(error).includes('status: error') &&
String(error).includes('i/o timeout')
) {
return false;
}
return failureCount < 2;
},
keepPreviousData: true,
enabled: queryEnabledCondition,
refetchOnMount: false,

View File

@@ -71,6 +71,7 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
key={log.id}
logData={log}
selectedFields={selectedFields}
linesPerRow={options.maxLines}
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
/>

View File

@@ -53,6 +53,19 @@
background: rgba(171, 189, 255, 0.04);
padding: 8px;
.ant-collapse-extra {
display: flex;
align-items: center;
.action-btn {
display: flex;
.ant-btn {
background: rgba(113, 144, 249, 0.08);
}
}
}
}
.ant-collapse-content {

View File

@@ -5,12 +5,13 @@
.ant-table-row:hover {
.ant-table-cell {
.value-field {
display: flex;
justify-content: space-between;
align-items: center;
.action-btn {
display: flex;
gap: 4px;
position: absolute;
top: 50%;
right: 16px;
transform: translateY(-50%);
gap: 8px;
}
}
}
@@ -28,6 +29,30 @@
}
}
.attribute-pin {
cursor: pointer;
padding: 0;
vertical-align: middle;
text-align: center;
.log-attribute-pin {
padding: 8px;
display: flex;
justify-content: center;
align-items: center;
.pin-attribute-icon {
border: none;
&.pinned svg {
fill: var(--bg-robin-500);
}
}
}
}
.value-field-container {
background: rgba(22, 25, 34, 0.4);
@@ -70,6 +95,10 @@
.value-field-container {
background: var(--bg-vanilla-300);
&.attribute-pin {
background: var(--bg-vanilla-100);
}
.action-btn {
.filter-btn {
background: var(--bg-vanilla-300);

View File

@@ -1,22 +1,29 @@
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable jsx-a11y/click-events-have-key-events */
import './TableView.styles.scss';
import { LinkOutlined } from '@ant-design/icons';
import { Color } from '@signozhq/design-tokens';
import { Button, Space, Spin, Tooltip, Tree, Typography } from 'antd';
import { ColumnsType } from 'antd/es/table';
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import cx from 'classnames';
import AddToQueryHOC, {
AddToQueryHOCProps,
} from 'components/Logs/AddToQueryHOC';
import CopyClipboardHOC from 'components/Logs/CopyClipboardHOC';
import { ResizeTable } from 'components/ResizeTable';
import { LOCALSTORAGE } from 'constants/localStorage';
import { OPERATORS } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { useIsDarkMode } from 'hooks/useDarkMode';
import history from 'lib/history';
import { fieldSearchFilter } from 'lib/logs/fieldSearch';
import { removeJSONStringifyQuotes } from 'lib/removeJSONStringifyQuotes';
import { isEmpty } from 'lodash-es';
import { ArrowDownToDot, ArrowUpFromDot } from 'lucide-react';
import { useMemo, useState } from 'react';
import { ArrowDownToDot, ArrowUpFromDot, Pin } from 'lucide-react';
import { useEffect, useMemo, useState } from 'react';
import { useDispatch } from 'react-redux';
import { generatePath } from 'react-router-dom';
import { Dispatch } from 'redux';
@@ -57,6 +64,28 @@ function TableView({
const dispatch = useDispatch<Dispatch<AppActions>>();
const [isfilterInLoading, setIsFilterInLoading] = useState<boolean>(false);
const [isfilterOutLoading, setIsFilterOutLoading] = useState<boolean>(false);
const isDarkMode = useIsDarkMode();
const [pinnedAttributes, setPinnedAttributes] = useState<
Record<string, boolean>
>({});
useEffect(() => {
const pinnedAttributesFromLocalStorage = getLocalStorageApi(
LOCALSTORAGE.PINNED_ATTRIBUTES,
);
if (pinnedAttributesFromLocalStorage) {
try {
const parsedPinnedAttributes = JSON.parse(pinnedAttributesFromLocalStorage);
setPinnedAttributes(parsedPinnedAttributes);
} catch (e) {
console.error('Error parsing pinned attributes from local storgage');
}
} else {
setPinnedAttributes({});
}
}, []);
const flattenLogData: Record<string, string> | null = useMemo(
() => (logData ? flattenObject(logData) : null),
@@ -74,6 +103,19 @@ function TableView({
}
};
const togglePinAttribute = (record: DataType): void => {
if (record) {
const newPinnedAttributes = { ...pinnedAttributes };
newPinnedAttributes[record.key] = !newPinnedAttributes[record.key];
setPinnedAttributes(newPinnedAttributes);
setLocalStorageApi(
LOCALSTORAGE.PINNED_ATTRIBUTES,
JSON.stringify(newPinnedAttributes),
);
}
};
const onClickHandler = (
operator: string,
fieldKey: string,
@@ -138,6 +180,37 @@ function TableView({
}
const columns: ColumnsType<DataType> = [
{
title: '',
dataIndex: 'pin',
key: 'pin',
width: 5,
align: 'left',
className: 'attribute-pin value-field-container',
render: (fieldData: Record<string, string>, record): JSX.Element => {
let pinColor = isDarkMode ? Color.BG_VANILLA_100 : Color.BG_INK_500;
if (pinnedAttributes[record?.key]) {
pinColor = Color.BG_ROBIN_500;
}
return (
<div className="log-attribute-pin value-field">
<div
className={cx(
'pin-attribute-icon',
pinnedAttributes[record?.key] ? 'pinned' : '',
)}
onClick={(): void => {
togglePinAttribute(record);
}}
>
<Pin size={14} color={pinColor} />
</div>
</div>
);
},
},
{
title: 'Field',
dataIndex: 'field',
@@ -264,12 +337,34 @@ function TableView({
},
},
];
function sortPinnedAttributes(
data: Record<string, string>[],
sortingObj: Record<string, boolean>,
): Record<string, string>[] {
const sortingKeys = Object.keys(sortingObj);
return data.sort((a, b) => {
const aKey = a.key;
const bKey = b.key;
const aSortIndex = sortingKeys.indexOf(aKey);
const bSortIndex = sortingKeys.indexOf(bKey);
if (sortingObj[aKey] && !sortingObj[bKey]) {
return -1;
}
if (!sortingObj[aKey] && sortingObj[bKey]) {
return 1;
}
return aSortIndex - bSortIndex;
});
}
const sortedAttributes = sortPinnedAttributes(dataSource, pinnedAttributes);
return (
<ResizeTable
columns={columns}
tableLayout="fixed"
dataSource={dataSource}
dataSource={sortedAttributes}
pagination={false}
showHeader={false}
className="attribute-table-container"

View File

@@ -2,6 +2,7 @@ import Graph from 'components/Graph';
import Spinner from 'components/Spinner';
import { QueryParams } from 'constants/query';
import { themeColors } from 'constants/theme';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config';
import useUrlQuery from 'hooks/useUrlQuery';
import getChartData, { GetChartDataProps } from 'lib/getChartData';
import GetMinMax from 'lib/getMinMax';
@@ -65,8 +66,13 @@ function LogsExplorerChart({
const searchParams = new URLSearchParams(window.location.search);
const startTime = searchParams.get(QueryParams.startTime);
const endTime = searchParams.get(QueryParams.endTime);
const relativeTime = searchParams.get(
QueryParams.relativeTime,
) as CustomTimeType;
if (startTime && endTime && startTime !== endTime) {
if (relativeTime) {
dispatch(UpdateTimeInterval(relativeTime));
} else if (startTime && endTime && startTime !== endTime) {
dispatch(
UpdateTimeInterval('custom', [
parseInt(getTimeString(startTime), 10),

View File

@@ -8,4 +8,5 @@
line-height: 18px;
letter-spacing: -0.005em;
text-align: left;
min-height: 500px;
}

View File

@@ -90,6 +90,7 @@ function LogsExplorerList({
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
activeLog={activeLog}
linesPerRow={options.maxLines}
/>
);
},

View File

@@ -14,6 +14,7 @@ import {
PANEL_TYPES,
} from 'constants/queryBuilder';
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
import Download from 'container/DownloadV2/DownloadV2';
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
import GoToTop from 'container/GoToTop';
import LogsExplorerChart from 'container/LogsExplorerChart';
@@ -21,6 +22,7 @@ import LogsExplorerList from 'container/LogsExplorerList';
import LogsExplorerTable from 'container/LogsExplorerTable';
import { useOptionsMenu } from 'container/OptionsMenu';
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
import dayjs from 'dayjs';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { addEmptyWidgetInDashboardJSONWithQuery } from 'hooks/dashboard/utils';
import { LogTimeRange } from 'hooks/logs/types';
@@ -33,8 +35,9 @@ import useClickOutside from 'hooks/useClickOutside';
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
import { useNotifications } from 'hooks/useNotifications';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { FlatLogData } from 'lib/logs/flatLogData';
import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData';
import { defaultTo, isEmpty } from 'lodash-es';
import { defaultTo, isEmpty, omit } from 'lodash-es';
import { Sliders } from 'lucide-react';
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
@@ -523,6 +526,23 @@ function LogsExplorerViews({
},
});
const flattenLogData = useMemo(
() =>
logs.map((log) => {
const timestamp =
typeof log.timestamp === 'string'
? dayjs(log.timestamp).format()
: dayjs(log.timestamp / 1e6).format();
return FlatLogData({
timestamp,
body: log.body,
...omit(log, 'timestamp', 'body'),
});
}),
[logs],
);
return (
<div className="logs-explorer-views-container">
{showHistogram && (
@@ -578,6 +598,11 @@ function LogsExplorerViews({
<div className="logs-actions-container">
{selectedPanelType === PANEL_TYPES.LIST && (
<div className="tab-options">
<Download
data={flattenLogData}
isLoading={isFetching}
fileName="log_data"
/>
<div className="format-options-container" ref={menuRef}>
<Button
className="periscope-btn"

View File

@@ -1,9 +1,12 @@
import { Time } from 'container/TopNav/DateTimeSelection/config';
import { Time as TimeV2 } from 'container/TopNav/DateTimeSelectionV2/config';
import {
CustomTimeType,
Time as TimeV2,
} from 'container/TopNav/DateTimeSelectionV2/config';
import { GetMinMaxPayload } from 'lib/getMinMax';
export const getGlobalTime = (
selectedTime: Time | TimeV2,
selectedTime: Time | TimeV2 | CustomTimeType,
globalTime: GetMinMaxPayload,
): GetMinMaxPayload | undefined => {
if (selectedTime === 'custom') {

View File

@@ -74,6 +74,7 @@ function LogsTable(props: LogsTableProps): JSX.Element {
key={log.id}
logData={log}
selectedFields={selected}
linesPerRow={linesPerRow}
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
/>

View File

@@ -23,7 +23,12 @@ function DashboardDescription(): JSX.Element {
handleDashboardLockToggle,
} = useDashboard();
const selectedData = selectedDashboard?.data || ({} as DashboardData);
const selectedData = selectedDashboard
? {
...selectedDashboard.data,
uuid: selectedDashboard.uuid,
}
: ({} as DashboardData);
const { title = '', tags, description } = selectedData || {};

View File

@@ -2,14 +2,13 @@ import './DashboardVariableSelection.styles.scss';
import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Input, Popover, Select, Tooltip, Typography } from 'antd';
import { Input, Popover, Select, Typography } from 'antd';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { debounce } from 'lodash-es';
import map from 'lodash-es/map';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { memo, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
@@ -52,7 +51,6 @@ function VariableItem({
onValueUpdate,
lastUpdatedVar,
}: VariableItemProps): JSX.Element {
const { isDashboardLocked } = useDashboard();
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
@@ -222,84 +220,77 @@ function VariableItem({
}, [variableData.type, variableData.customValue]);
return (
<Tooltip
placement="top"
title={isDashboardLocked ? 'Dashboard is locked' : ''}
>
<VariableContainer className="variable-item">
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
</Typography.Text>
<VariableValue>
{variableData.type === 'TEXTBOX' ? (
<Input
placeholder="Enter value"
disabled={isDashboardLocked}
<VariableContainer className="variable-item">
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
</Typography.Text>
<VariableValue>
{variableData.type === 'TEXTBOX' ? (
<Input
placeholder="Enter value"
bordered={false}
key={variableData.selectedValue?.toString()}
defaultValue={variableData.selectedValue?.toString()}
onChange={(e): void => {
debouncedHandleChange(e.target.value || '');
}}
style={{
width:
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
}}
/>
) : (
!errorMessage &&
optionsData && (
<Select
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
defaultValue={selectValue}
onChange={handleChange}
bordered={false}
key={variableData.selectedValue?.toString()}
defaultValue={variableData.selectedValue?.toString()}
onChange={(e): void => {
debouncedHandleChange(e.target.value || '');
}}
style={{
width:
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
}}
/>
) : (
!errorMessage &&
optionsData && (
<Select
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
defaultValue={selectValue}
onChange={handleChange}
bordered={false}
placeholder="Select value"
placement="bottomRight"
mode={mode}
dropdownMatchSelectWidth={false}
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
disabled={isDashboardLocked}
getPopupContainer={popupContainer}
>
{enableSelectAll && (
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
ALL
</Select.Option>
)}
{map(optionsData, (option) => (
<Select.Option
data-testid={`option-${option}`}
key={option.toString()}
value={option}
>
{option.toString()}
</Select.Option>
))}
</Select>
)
)}
{variableData.type !== 'TEXTBOX' && errorMessage && (
<span style={{ margin: '0 0.5rem' }}>
<Popover
placement="top"
content={<Typography>{errorMessage}</Typography>}
>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</VariableValue>
</VariableContainer>
</Tooltip>
placeholder="Select value"
placement="bottomRight"
mode={mode}
dropdownMatchSelectWidth={false}
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
getPopupContainer={popupContainer}
>
{enableSelectAll && (
<Select.Option data-testid="option-ALL" value={ALL_SELECT_VALUE}>
ALL
</Select.Option>
)}
{map(optionsData, (option) => (
<Select.Option
data-testid={`option-${option}`}
key={option.toString()}
value={option}
>
{option.toString()}
</Select.Option>
))}
</Select>
)
)}
{variableData.type !== 'TEXTBOX' && errorMessage && (
<span style={{ margin: '0 0.5rem' }}>
<Popover
placement="top"
content={<Typography>{errorMessage}</Typography>}
>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</VariableValue>
</VariableContainer>
);
}

View File

@@ -3,6 +3,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder';
import GridPanelSwitch from 'container/GridPanelSwitch';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { timePreferance } from 'container/NewWidget/RightContainer/timeItems';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
@@ -97,8 +98,13 @@ function WidgetGraph({
const searchParams = new URLSearchParams(window.location.search);
const startTime = searchParams.get(QueryParams.startTime);
const endTime = searchParams.get(QueryParams.endTime);
const relativeTime = searchParams.get(
QueryParams.relativeTime,
) as CustomTimeType;
if (startTime && endTime && startTime !== endTime) {
if (relativeTime) {
dispatch(UpdateTimeInterval(relativeTime));
} else if (startTime && endTime && startTime !== endTime) {
dispatch(
UpdateTimeInterval('custom', [
parseInt(getTimeString(startTime), 10),
@@ -133,6 +139,7 @@ function WidgetGraph({
softMax,
softMin,
panelType: selectedGraph,
currentQuery,
}),
[
widgetId,
@@ -148,6 +155,7 @@ function WidgetGraph({
softMax,
softMin,
selectedGraph,
currentQuery,
],
);

View File

@@ -0,0 +1,24 @@
## Install otel-collector in your Kubernetes infra
&nbsp;
Add the SigNoz Helm Chart repository
```bash
helm repo add signoz https://charts.signoz.io
```
&nbsp;
If the chart is already present, update the chart to the latest using:
```bash
helm repo update
```
&nbsp;
Install the Kubernetes Infrastructure chart provided by SigNoz
```bash
helm install my-release signoz/k8s-infra \
--set otelCollectorEndpoint=ingest.{{REGION}}.signoz.cloud:443 \
--set otelInsecure=false \
--set signozApiKey={{SIGNOZ_INGESTION_KEY}} \
--set global.clusterName=<CLUSTER_NAME>
```
- Replace `<CLUSTER_NAME>` with the name of the Kubernetes cluster or a unique identifier of the cluster.

View File

@@ -0,0 +1,64 @@
&nbsp;
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,16 @@
### Set environment variables and run app
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME={MYAPP} \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,60 @@
&nbsp;
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,16 @@
### Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME={{MYAPP}} \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_amd64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_amd64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@@ -0,0 +1,62 @@
&nbsp;
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,41 @@
&nbsp;
Once you are done instrumenting your PHP application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME=<SERVICE_NAME> \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,60 @@
&nbsp;
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,16 @@
### Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME={{MYAPP}} \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_linux_arm64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_linux_arm64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@@ -0,0 +1,62 @@
&nbsp;
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,41 @@
&nbsp;
Once you are done instrumenting your Rust application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME=<SERVICE_NAME> \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,60 @@
&nbsp;
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,16 @@
### Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME={{MYAPP}} \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,96 @@
### Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_amd64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_amd64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@@ -0,0 +1,62 @@
&nbsp;
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,41 @@
&nbsp;
Once you are done instrumenting your Rust application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME=<SERVICE_NAME> \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,60 @@
&nbsp;
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,16 @@
### Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME={{MYAPP}} \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=https://ingest.{{REGION}}.signoz.cloud:443 \
OTEL_EXPORTER_OTLP_HEADERS=signoz-access-token={{SIGNOZ_INGESTION_KEY}} \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <your-run-command> - Run command for your PHP application

View File

@@ -0,0 +1,96 @@
## Setup OpenTelemetry Binary as an agent
&nbsp;
### Step 1: Download otel-collector tar.gz
```bash
wget https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.79.0/otelcol-contrib_0.79.0_darwin_arm64.tar.gz
```
&nbsp;
### Step 2: Extract otel-collector tar.gz to the `otelcol-contrib` folder
```bash
mkdir otelcol-contrib && tar xvzf otelcol-contrib_0.79.0_darwin_arm64.tar.gz -C otelcol-contrib
```
&nbsp;
### Step 3: Create config.yaml in folder otelcol-contrib with the below content in it
```bash
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
hostmetrics:
collection_interval: 60s
scrapers:
cpu: {}
disk: {}
load: {}
filesystem: {}
memory: {}
network: {}
paging: {}
process:
mute_process_name_error: true
mute_process_exe_error: true
mute_process_io_error: true
processes: {}
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector-binary
static_configs:
- targets:
# - localhost:8888
processors:
batch:
send_batch_size: 1000
timeout: 10s
# Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
resourcedetection:
detectors: [env, system] # Before system detector, include ec2 for AWS, gcp for GCP and azure for Azure.
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
timeout: 2s
system:
hostname_sources: [os] # alternatively, use [dns,os] for setting FQDN as host.name and os as fallback
extensions:
health_check: {}
zpages: {}
exporters:
otlp:
endpoint: "ingest.{{REGION}}.signoz.cloud:443"
tls:
insecure: false
headers:
"signoz-access-token": "{{SIGNOZ_INGESTION_KEY}}"
logging:
verbosity: normal
service:
telemetry:
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages]
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics/internal:
receivers: [prometheus, hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
logs:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
```

View File

@@ -0,0 +1,62 @@
&nbsp;
After setting up the Otel collector agent, follow the steps below to instrument your PHP Application
### Step 1: Setup Development Environment
Add these crates just below the `[dependencies]` section of your `cargo.toml` file
To configure our PHP application to send data, you need to use OpenTelemetry PHP extension. Since the extension is built from the source, you need to have the build tools, which can be installed using the following command:
**Linux**:
```bash
sudo apt-get install gcc make autoconf
```
**MacOs(Homebrew)**:
```bash
brew install gcc make autoconf
```
&nbsp;
### Step 2: Build the extension
With our environment set up we can install the extension using [PECL](https://pecl.php.net/):
```bash
pecl install opentelemetry
```
After successfully installing the OpenTelemetry extension, add the extension to php.ini file of your project:
```bash
[opentelemetry]
extension=opentelemetry.so
```
Verify that the extension is enabled by running:
```bash
php -m | grep opentelemetry
```
Running the above command will **output**:
```bash
opentelemetry
```
&nbsp;
### Step 3: Add the dependencies
Add dependencies required to perform automatic instrumentation using this command :
```bash
composer config allow-plugins.php-http/discovery false
composer require \
open-telemetry/sdk \
open-telemetry/exporter-otlp \
php-http/guzzle7-adapter \
open-telemetry/transport-grpc
```

View File

@@ -0,0 +1,41 @@
&nbsp;
Once you are done instrumenting your Rust application, you can run it using the below commands
&nbsp;
### Step 1: Run OTel Collector
Run this command inside the `otelcol-contrib` directory that you created in the install Otel Collector step
```bash
./otelcol-contrib --config ./config.yaml &> otelcol-output.log & echo "$!" > otel-pid
```
&nbsp;
#### (Optional Step): View last 50 lines of `otelcol` logs
```bash
tail -f -n 50 otelcol-output.log
```
#### (Optional Step): Stop `otelcol`
```bash
kill "$(< otel-pid)"
```
&nbsp;
### Step 2: Running your PHP application
We will pass environment variables at the runtime:
```bash
env OTEL_PHP_AUTOLOAD_ENABLED=true \
OTEL_SERVICE_NAME=<SERVICE_NAME> \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf \
OTEL_EXPORTER_OTLP_ENDPOINT=<COLLECTOR_ENDPOINT> \
OTEL_PROPAGATORS=baggage,tracecontext \
<your-run-command>
```
- <COLLECTOR_ENDPOINT> - Endpoint at which the collector is running. Ex. -> `http://localhost:4317`
- <your-run-command> - Run command for your PHP application

View File

@@ -403,6 +403,38 @@ import APM_javascript_reactjs_macOsARM64_quickStart_runApplication from '../Modu
import APM_javascript_reactjs_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-installOtelCollector.md';
import APM_javascript_reactjs_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-instrumentApplication.md';
import APM_javascript_reactjs_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Javascript/md-docs/ReactJS/MacOsARM64/Recommended/reactjs-macosarm64-recommended-runApplication.md';
// PHP-Kubernetes
import APM_php_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-installOtelCollector.md';
import APM_php_kubernetes_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-instrumentApplication.md';
import APM_php_kubernetes_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/Kubernetes/php-kubernetes-runApplication.md';
// PHP-LinuxAMD64-quickstart
import APM_php_linuxAMD64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-instrumentApplication.md';
import APM_php_linuxAMD64_quickStart_runApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/QuickStart/php-linuxamd64-quickStart-runApplication.md';
// PHP-LinuxAMD64-recommended
import APM_php_linuxAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-installOtelCollector.md';
import APM_php_linuxAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-instrumentApplication.md';
import APM_php_linuxAMD64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/LinuxAMD64/Recommended/php-linuxamd64-recommended-runApplication.md';
// PHP-LinuxARM64-quickstart
import APM_php_linuxARM64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-instrumentApplication.md';
import APM_php_linuxARM64_quickStart_runApplication from '../Modules/APM/Php/md-docs/LinuxARM64/QuickStart/php-linuxarm64-quickStart-runApplication.md';
// PHP-LinuxARM64-recommended
import APM_php_linuxARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-installOtelCollector.md';
import APM_php_linuxARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-instrumentApplication.md';
import APM_php_linuxARM64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/LinuxARM64/Recommended/php-linuxarm64-recommended-runApplication.md';
// PHP-MacOsAMD64-quickstart
import APM_php_macOsAMD64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-instrumentApplication.md';
import APM_php_macOsAMD64_quickStart_runApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/QuickStart/php-macosamd64-quickStart-runApplication.md';
// PHP-MacOsAMD64-recommended
import APM_php_macOsAMD64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-installOtelCollector.md';
import APM_php_macOsAMD64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-instrumentApplication.md';
import APM_php_macOsAMD64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/MacOsAMD64/Recommended/php-macosamd64-recommended-runApplication.md';
// PHP-MacOsARM64-quickstart
import APM_php_macOsARM64_quickStart_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-instrumentApplication.md';
import APM_php_macOsARM64_quickStart_runApplication from '../Modules/APM/Php/md-docs/MacOsARM64/QuickStart/php-macosarm64-quickStart-runApplication.md';
// PHP-MacOsARM64-recommended
import APM_php_macOsARM64_recommendedSteps_setupOtelCollector from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-installOtelCollector.md';
import APM_php_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-instrumentApplication.md';
import APM_php_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Php/md-docs/MacOsARM64/Recommended/php-macosarm64-recommended-runApplication.md';
/// ////// Javascript Done
/// ///// Python Start
// Django
@@ -575,7 +607,6 @@ import APM_python_other_macOsARM64_recommendedSteps_setupOtelCollector from '../
import APM_python_other_macOsARM64_recommendedSteps_instrumentApplication from '../Modules/APM/Python/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-instrumentApplication.md';
import APM_python_other_macOsARM64_recommendedSteps_runApplication from '../Modules/APM/Python/md-docs/Others/MacOsARM64/Recommended/others-macosarm64-recommended-runApplication.md';
// ----------------------------------------------------------------------------
/// ////// Go Done
/// ///// ROR Start
// ROR-Kubernetes
import APM_rails_kubernetes_recommendedSteps_setupOtelCollector from '../Modules/APM/RubyOnRails/md-docs/Kubernetes/ror-kubernetes-installOtelCollector.md';
@@ -1546,4 +1577,36 @@ export const ApmDocFilePaths = {
APM_swift_macOsARM64_recommendedSteps_setupOtelCollector,
APM_swift_macOsARM64_recommendedSteps_instrumentApplication,
APM_swift_macOsARM64_recommendedSteps_runApplication,
APM_php_kubernetes_recommendedSteps_setupOtelCollector,
APM_php_kubernetes_recommendedSteps_instrumentApplication,
APM_php_kubernetes_recommendedSteps_runApplication,
APM_php_linuxAMD64_quickStart_instrumentApplication,
APM_php_linuxAMD64_quickStart_runApplication,
APM_php_linuxAMD64_recommendedSteps_setupOtelCollector,
APM_php_linuxAMD64_recommendedSteps_instrumentApplication,
APM_php_linuxAMD64_recommendedSteps_runApplication,
APM_php_linuxARM64_quickStart_instrumentApplication,
APM_php_linuxARM64_quickStart_runApplication,
APM_php_linuxARM64_recommendedSteps_setupOtelCollector,
APM_php_linuxARM64_recommendedSteps_instrumentApplication,
APM_php_linuxARM64_recommendedSteps_runApplication,
APM_php_macOsAMD64_quickStart_instrumentApplication,
APM_php_macOsAMD64_quickStart_runApplication,
APM_php_macOsAMD64_recommendedSteps_setupOtelCollector,
APM_php_macOsAMD64_recommendedSteps_instrumentApplication,
APM_php_macOsAMD64_recommendedSteps_runApplication,
APM_php_macOsARM64_quickStart_instrumentApplication,
APM_php_macOsARM64_quickStart_runApplication,
APM_php_macOsARM64_recommendedSteps_setupOtelCollector,
APM_php_macOsARM64_recommendedSteps_instrumentApplication,
APM_php_macOsARM64_recommendedSteps_runApplication,
};

View File

@@ -132,6 +132,11 @@ const supportedLanguages = [
id: 'swift',
imgURL: `/Logos/swift.png`,
},
{
name: 'php',
id: 'php',
imgURL: `/Logos/php.png`,
},
];
export const defaultLogsType = {
@@ -293,7 +298,8 @@ export const getSupportedFrameworks = ({
(moduleID === ModulesMap.APM && dataSourceName === '.NET') ||
(moduleID === ModulesMap.APM && dataSourceName === 'rust') ||
(moduleID === ModulesMap.APM && dataSourceName === 'elixir') ||
(moduleID === ModulesMap.APM && dataSourceName === 'swift')
(moduleID === ModulesMap.APM && dataSourceName === 'swift') ||
(moduleID === ModulesMap.APM && dataSourceName === 'php')
) {
return [];
}
@@ -322,7 +328,8 @@ export const hasFrameworks = ({
(moduleID === ModulesMap.APM && dataSourceName === '.NET') ||
(moduleID === ModulesMap.APM && dataSourceName === 'rust') ||
(moduleID === ModulesMap.APM && dataSourceName === 'elixir') ||
(moduleID === ModulesMap.APM && dataSourceName === 'swift')
(moduleID === ModulesMap.APM && dataSourceName === 'swift') ||
(moduleID === ModulesMap.APM && dataSourceName === 'php')
) {
return false;
}

View File

@@ -1,3 +1,5 @@
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { OptionsQuery } from './types';
export const URL_OPTIONS = 'options';
@@ -7,3 +9,46 @@ export const defaultOptionsQuery: OptionsQuery = {
maxLines: 2,
format: 'list',
};
export const defaultTraceSelectedColumns = [
{
key: 'serviceName',
dataType: DataTypes.String,
type: 'tag',
isColumn: true,
isJSON: false,
id: 'serviceName--string--tag--true',
},
{
key: 'name',
dataType: DataTypes.String,
type: 'tag',
isColumn: true,
isJSON: false,
id: 'name--string--tag--true',
},
{
key: 'durationNano',
dataType: DataTypes.Float64,
type: 'tag',
isColumn: true,
isJSON: false,
id: 'durationNano--float64--tag--true',
},
{
key: 'httpMethod',
dataType: DataTypes.String,
type: 'tag',
isColumn: true,
isJSON: false,
id: 'httpMethod--string--tag--true',
},
{
key: 'responseStatusCode',
dataType: DataTypes.String,
type: 'tag',
isColumn: true,
isJSON: false,
id: 'responseStatusCode--string--tag--true',
},
];

Some files were not shown because too many files have changed in this diff Show More