Compare commits

..

8 Commits

Author SHA1 Message Date
Shivanshu Raj Shrivastava
aa24227a99 chore: tf testing
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-04-29 15:57:41 +05:30
Shivanshu Raj Shrivastava
bd3794b7d4 chore: tf testing
Signed-off-by: Shivanshu Raj Shrivastava <shivanshu1333@gmail.com>
2025-04-29 15:49:35 +05:30
Gabber235
ef4e3a30fb fix: black gap on on cloud in sidebar (#7383) (#7427)
Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-04-28 20:32:32 +00:00
Vikrant Gupta
39532d5da0 fix(zeus): build pipelines LD flags (#7754) 2025-04-28 19:40:22 +00:00
Vishal Sharma
4d216bae4d feat: init userpilot (#7579) 2025-04-28 18:42:14 +00:00
Vikrant Gupta
21563914c7 fix(ruler): telemetry for rules (#7751)
### Summary

- fix the telemetry for rules and notification channels 
- not adding bun as this needs to go away soon
2025-04-28 23:07:28 +05:30
Vibhu Pandey
accb77f227 chore(use-*): remove use-new-traces-schema and use-new-logs-schema flags (#7741)
### Summary

remove use-new-traces-schema and use-new-logs-schema flags
2025-04-28 21:01:35 +05:30
Vibhu Pandey
e73e1bd078 feat(zeus): add zeus package (#7745)
* feat(zeus): add zeus package

* feat(signoz): add DI for zeus

* feat(zeus): integrate with the codebase

* ci(make): change makefile

* ci: change workflows to point to the new zeus url

* Update ee/query-service/usage/manager.go

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* Update ee/query-service/license/manager.go

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* fix: fix nil retriable

* fix: fix zeus DI

* fix: fix path of ldflag

* feat(zeus): added zeus integration tests

* feat(zeus): added zeus integration tests

* feat(zeus): format the pytest

---------

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
Co-authored-by: vikrantgupta25 <vikrant.thomso@gmail.com>
Co-authored-by: Vikrant Gupta <vikrant@signoz.io>
2025-04-28 14:20:47 +00:00
77 changed files with 4278 additions and 315 deletions

View File

@@ -104,6 +104,8 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1'
GO_CGO_ENABLED: 1

View File

@@ -101,6 +101,8 @@ jobs:
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1'
GO_CGO_ENABLED: 1

View File

@@ -14,9 +14,9 @@ ARCHS ?= amd64 arm64
TARGET_DIR ?= $(shell pwd)/target
ZEUS_URL ?= https://api.signoz.cloud
GO_BUILD_LDFLAG_ZEUS_URL = -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=$(ZEUS_URL)
LICENSE_URL ?= https://license.signoz.io/api/v1
GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=$(LICENSE_URL)
GO_BUILD_LDFLAG_ZEUS_URL = -X github.com/SigNoz/signoz/ee/zeus.url=$(ZEUS_URL)
LICENSE_URL ?= https://license.signoz.io
GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=$(LICENSE_URL)
GO_BUILD_VERSION_LDFLAGS = -X github.com/SigNoz/signoz/pkg/version.version=$(VERSION) -X github.com/SigNoz/signoz/pkg/version.hash=$(COMMIT_SHORT_SHA) -X github.com/SigNoz/signoz/pkg/version.time=$(TIMESTAMP) -X github.com/SigNoz/signoz/pkg/version.branch=$(BRANCH_NAME)
GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS))

View File

@@ -35,6 +35,8 @@ builds:
- -X github.com/SigNoz/signoz/pkg/version.hash={{ .ShortCommit }}
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
- >-

View File

@@ -9,6 +9,8 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
"github.com/SigNoz/signoz/pkg/types/authtypes"
)
type DayWiseBreakdown struct {
@@ -90,8 +92,13 @@ func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request)
// this function is called by zeus when inserting licenses in the query-service
func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
var licenseKey ApplyLicenseRequest
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(w, err)
return
}
var licenseKey ApplyLicenseRequest
if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil {
RespondError(w, model.BadRequest(err), nil)
return
@@ -102,9 +109,10 @@ func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
return
}
_, apiError := ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
if apiError != nil {
RespondError(w, apiError, nil)
_, err = ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey)
if err != nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, map[string]interface{}{"err": err.Error()}, claims.Email, true, false)
render.Error(w, err)
return
}
@@ -112,10 +120,9 @@ func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) {
}
func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) {
apiError := ah.LM().RefreshLicense(r.Context())
if apiError != nil {
RespondError(w, apiError, nil)
err := ah.LM().RefreshLicense(r.Context())
if err != nil {
render.Error(w, err)
return
}
@@ -127,7 +134,6 @@ func getCheckoutPortalResponse(redirectURL string) *Redirect {
}
func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
checkoutRequest := &model.CheckoutRequest{}
if err := json.NewDecoder(r.Body).Decode(checkoutRequest); err != nil {
RespondError(w, model.BadRequest(err), nil)
@@ -140,9 +146,9 @@ func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
return
}
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key)
redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key, ah.Signoz.Zeus)
if err != nil {
RespondError(w, err, nil)
render.Error(w, err)
return
}
@@ -230,7 +236,6 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) {
}
func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
portalRequest := &model.PortalRequest{}
if err := json.NewDecoder(r.Body).Decode(portalRequest); err != nil {
RespondError(w, model.BadRequest(err), nil)
@@ -243,9 +248,9 @@ func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
return
}
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key)
redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key, ah.Signoz.Zeus)
if err != nil {
RespondError(w, err, nil)
render.Error(w, err)
return
}

View File

@@ -119,7 +119,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
req.UpdatedByUserID = claims.UserID
req.UpdatedAt = time.Now()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
zap.L().Info("Got UpdateSteps PAT request", zap.Any("pat", req))
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(r.Context(), claims.OrgID, req, id); apierr != nil {
RespondError(w, apierr, nil)

View File

@@ -112,7 +112,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// initiate license manager
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore)
lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Zeus)
if err != nil {
return nil, err
}
@@ -195,7 +195,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
}
// start the usagemanager
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.Config.TelemetryStore.Clickhouse.DSN)
usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus)
if err != nil {
return nil, err
}
@@ -327,6 +327,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am)
apiHandler.MetricExplorerRoutes(r, am)
apiHandler.RegisterTraceFunnelsRoutes(r, am)
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},

View File

@@ -1,16 +0,0 @@
package signozio
type status string
type ValidateLicenseResponse struct {
Status status `json:"status"`
Data map[string]interface{} `json:"data"`
}
type CheckoutSessionRedirect struct {
RedirectURL string `json:"url"`
}
type CheckoutResponse struct {
Status status `json:"status"`
Data CheckoutSessionRedirect `json:"data"`
}

View File

@@ -1,222 +1,67 @@
package signozio
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/pkg/errors"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
var C *Client
const (
POST = "POST"
APPLICATION_JSON = "application/json"
)
type Client struct {
Prefix string
GatewayUrl string
}
func New() *Client {
return &Client{
Prefix: constants.LicenseSignozIo,
GatewayUrl: constants.ZeusURL,
}
}
func init() {
C = New()
}
func ValidateLicenseV3(licenseKey string) (*model.LicenseV3, *model.ApiError) {
// Creating an HTTP client with a timeout for better control
client := &http.Client{
Timeout: 10 * time.Second,
}
req, err := http.NewRequest("GET", C.GatewayUrl+"/v2/licenses/me", nil)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to create request"))
}
// Setting the custom header
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
response, err := client.Do(req)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to make post request"))
}
body, err := io.ReadAll(response.Body)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read validation response from %v", C.GatewayUrl)))
}
defer response.Body.Close()
switch response.StatusCode {
case 200:
a := ValidateLicenseResponse{}
err = json.Unmarshal(body, &a)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
}
license, err := model.NewLicenseV3(a.Data)
if err != nil {
return nil, model.BadRequest(errors.Wrap(err, "failed to generate new license v3"))
}
return license, nil
case 400:
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
case 401:
return nil, model.Unauthorized(errors.Wrap(fmt.Errorf(string(body)),
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
default:
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
}
}
func NewPostRequestWithCtx(ctx context.Context, url string, contentType string, body io.Reader) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, POST, url, body)
func ValidateLicenseV3(ctx context.Context, licenseKey string, zeus zeus.Zeus) (*model.LicenseV3, error) {
data, err := zeus.GetLicense(ctx, licenseKey)
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", contentType)
return req, err
var m map[string]any
if err = json.Unmarshal(data, &m); err != nil {
return nil, err
}
license, err := model.NewLicenseV3(m)
if err != nil {
return nil, err
}
return license, nil
}
// SendUsage reports the usage of signoz to license server
func SendUsage(ctx context.Context, usage model.UsagePayload) *model.ApiError {
reqString, _ := json.Marshal(usage)
req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString))
func SendUsage(ctx context.Context, usage model.UsagePayload, zeus zeus.Zeus) error {
body, err := json.Marshal(usage)
if err != nil {
return model.BadRequest(errors.Wrap(err, "unable to create http request"))
return err
}
res, err := http.DefaultClient.Do(req)
if err != nil {
return model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
}
body, err := io.ReadAll(res.Body)
if err != nil {
return model.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
}
defer res.Body.Close()
switch res.StatusCode {
case 200, 201:
return nil
case 400, 401:
return model.BadRequest(errors.Wrap(errors.New(string(body)),
"bad request error received from license.signoz.io"))
default:
return model.InternalError(errors.Wrap(errors.New(string(body)),
"internal error received from license.signoz.io"))
}
return zeus.PutMeters(ctx, usage.LicenseKey.String(), body)
}
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string) (string, *model.ApiError) {
hClient := &http.Client{}
reqString, err := json.Marshal(checkoutRequest)
func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
body, err := json.Marshal(checkoutRequest)
if err != nil {
return "", model.BadRequest(err)
return "", err
}
req, err := http.NewRequestWithContext(ctx, "POST", C.GatewayUrl+"/v2/subscriptions/me/sessions/checkout", bytes.NewBuffer(reqString))
response, err := zeus.GetCheckoutURL(ctx, licenseKey, body)
if err != nil {
return "", model.BadRequest(err)
return "", err
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
response, err := hClient.Do(req)
if err != nil {
return "", model.BadRequest(err)
}
body, err := io.ReadAll(response.Body)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read checkout response from %v", C.GatewayUrl)))
}
defer response.Body.Close()
switch response.StatusCode {
case 201:
a := CheckoutResponse{}
err = json.Unmarshal(body, &a)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, "failed to unmarshal zeus checkout response"))
}
return a.Data.RedirectURL, nil
case 400:
return "", model.BadRequest(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
case 401:
return "", model.Unauthorized(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
default:
return "", model.InternalError(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
}
return gjson.GetBytes(response, "url").String(), nil
}
func PortalSession(ctx context.Context, checkoutRequest *model.PortalRequest, licenseKey string) (string, *model.ApiError) {
hClient := &http.Client{}
reqString, err := json.Marshal(checkoutRequest)
func PortalSession(ctx context.Context, portalRequest *model.PortalRequest, licenseKey string, zeus zeus.Zeus) (string, error) {
body, err := json.Marshal(portalRequest)
if err != nil {
return "", model.BadRequest(err)
return "", err
}
req, err := http.NewRequestWithContext(ctx, "POST", C.GatewayUrl+"/v2/subscriptions/me/sessions/portal", bytes.NewBuffer(reqString))
response, err := zeus.GetPortalURL(ctx, licenseKey, body)
if err != nil {
return "", model.BadRequest(err)
return "", err
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
response, err := hClient.Do(req)
if err != nil {
return "", model.BadRequest(err)
}
body, err := io.ReadAll(response.Body)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, fmt.Sprintf("failed to read portal response from %v", C.GatewayUrl)))
}
defer response.Body.Close()
switch response.StatusCode {
case 201:
a := CheckoutResponse{}
err = json.Unmarshal(body, &a)
if err != nil {
return "", model.BadRequest(errors.Wrap(err, "failed to unmarshal zeus portal response"))
}
return a.Data.RedirectURL, nil
case 400:
return "", model.BadRequest(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("bad request error received from %v", C.GatewayUrl)))
case 401:
return "", model.Unauthorized(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("unauthorized request error received from %v", C.GatewayUrl)))
default:
return "", model.InternalError(errors.Wrap(errors.New(string(body)),
fmt.Sprintf("internal request error received from %v", C.GatewayUrl)))
}
return gjson.GetBytes(response, "url").String(), nil
}

View File

@@ -6,14 +6,13 @@ import (
"time"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
"sync"
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/zeus"
validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/model"
@@ -29,6 +28,7 @@ var validationFrequency = 24 * 60 * time.Minute
type Manager struct {
repo *Repo
zeus zeus.Zeus
mutex sync.Mutex
validatorRunning bool
// end the license validation, this is important to gracefully
@@ -45,7 +45,7 @@ type Manager struct {
activeFeatures basemodel.FeatureSet
}
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Feature) (*Manager, error) {
func StartManager(db *sqlx.DB, store sqlstore.SQLStore, zeus zeus.Zeus, features ...basemodel.Feature) (*Manager, error) {
if LM != nil {
return LM, nil
}
@@ -53,6 +53,7 @@ func StartManager(db *sqlx.DB, store sqlstore.SQLStore, features ...basemodel.Fe
repo := NewLicenseRepo(db, store)
m := &Manager{
repo: &repo,
zeus: zeus,
}
if err := m.start(features...); err != nil {
return m, err
@@ -172,17 +173,15 @@ func (lm *Manager) ValidatorV3(ctx context.Context) {
}
}
func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
license, apiError := validate.ValidateLicenseV3(lm.activeLicenseV3.Key)
if apiError != nil {
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError
func (lm *Manager) RefreshLicense(ctx context.Context) error {
license, err := validate.ValidateLicenseV3(ctx, lm.activeLicenseV3.Key, lm.zeus)
if err != nil {
return err
}
err := lm.repo.UpdateLicenseV3(ctx, license)
err = lm.repo.UpdateLicenseV3(ctx, license)
if err != nil {
return model.BadRequest(errors.Wrap(err, "failed to update the new license"))
return err
}
lm.SetActiveV3(license)
@@ -190,7 +189,6 @@ func (lm *Manager) RefreshLicense(ctx context.Context) *model.ApiError {
}
func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
zap.L().Info("License validation started")
if lm.activeLicenseV3 == nil {
return nil
}
@@ -236,28 +234,17 @@ func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) {
return nil
}
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (licenseResponse *model.LicenseV3, errResponse *model.ApiError) {
defer func() {
if errResponse != nil {
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED,
map[string]interface{}{"err": errResponse.Err.Error()}, claims.Email, true, false)
}
}
}()
license, apiError := validate.ValidateLicenseV3(licenseKey)
if apiError != nil {
zap.L().Error("failed to get the license", zap.Error(apiError.Err))
return nil, apiError
func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (*model.LicenseV3, error) {
license, err := validate.ValidateLicenseV3(ctx, licenseKey, lm.zeus)
if err != nil {
return nil, err
}
// insert the new license to the sqlite db
err := lm.repo.InsertLicenseV3(ctx, license)
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, err
modelErr := lm.repo.InsertLicenseV3(ctx, license)
if modelErr != nil {
zap.L().Error("failed to activate license", zap.Error(modelErr))
return nil, modelErr
}
// license is valid, activate it

View File

@@ -8,6 +8,8 @@ import (
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
@@ -109,6 +111,8 @@ func main() {
signoz, err := signoz.New(
context.Background(),
config,
zeus.Config(),
httpzeus.NewProviderFactory(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
sqlStoreFactories,

View File

@@ -297,7 +297,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
// UpdateSteps the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value

View File

@@ -4,7 +4,6 @@ import (
"context"
"encoding/json"
"fmt"
"regexp"
"strings"
"sync/atomic"
"time"
@@ -16,10 +15,10 @@ import (
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/dao"
licenseserver "github.com/SigNoz/signoz/ee/query-service/integrations/signozio"
"github.com/SigNoz/signoz/ee/query-service/license"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
"github.com/SigNoz/signoz/pkg/zeus"
)
const (
@@ -42,26 +41,16 @@ type Manager struct {
modelDao dao.ModelDao
tenantID string
zeus zeus.Zeus
}
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, chUrl string) (*Manager, error) {
hostNameRegex := regexp.MustCompile(`tcp://(?P<hostname>.*):`)
hostNameRegexMatches := hostNameRegex.FindStringSubmatch(chUrl)
tenantID := ""
if len(hostNameRegexMatches) == 2 {
tenantID = hostNameRegexMatches[1]
tenantID = strings.TrimSuffix(tenantID, "-clickhouse")
}
func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, zeus zeus.Zeus) (*Manager, error) {
m := &Manager{
// repository: repo,
clickhouseConn: clickhouseConn,
licenseRepo: licenseRepo,
scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC
modelDao: modelDao,
tenantID: tenantID,
zeus: zeus,
}
return m, nil
}
@@ -158,7 +147,7 @@ func (lm *Manager) UploadUsage() {
usageData.Type = usage.Type
usageData.Tenant = "default"
usageData.OrgName = "default"
usageData.TenantId = lm.tenantID
usageData.TenantId = "default"
usagesPayload = append(usagesPayload, usageData)
}
@@ -167,24 +156,18 @@ func (lm *Manager) UploadUsage() {
LicenseKey: key,
Usage: usagesPayload,
}
lm.UploadUsageWithExponentalBackOff(ctx, payload)
}
func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) {
for i := 1; i <= MaxRetries; i++ {
apiErr := licenseserver.SendUsage(ctx, payload)
if apiErr != nil && i == MaxRetries {
zap.L().Error("retries stopped : %v", zap.Error(apiErr))
// not returning error here since it is captured in the failed count
return
} else if apiErr != nil {
// sleeping for exponential backoff
sleepDuration := RetryInterval * time.Duration(i)
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
time.Sleep(sleepDuration)
} else {
break
}
body, errv2 := json.Marshal(payload)
if errv2 != nil {
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
return
}
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
if errv2 != nil {
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
// not returning error here since it is captured in the failed count
return
}
}

42
ee/zeus/config.go Normal file
View File

@@ -0,0 +1,42 @@
package zeus
import (
"fmt"
neturl "net/url"
"sync"
"github.com/SigNoz/signoz/pkg/zeus"
)
// This will be set via ldflags at build time.
var (
url string = "<unset>"
deprecatedURL string = "<unset>"
)
var (
config zeus.Config
once sync.Once
)
// initializes the Zeus configuration
func Config() zeus.Config {
once.Do(func() {
parsedURL, err := neturl.Parse(url)
if err != nil {
panic(fmt.Errorf("invalid zeus URL: %w", err))
}
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
if err != nil {
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
}
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
if err := config.Validate(); err != nil {
panic(fmt.Errorf("invalid zeus config: %w", err))
}
})
return config
}

View File

@@ -0,0 +1,189 @@
package httpzeus
import (
"bytes"
"context"
"io"
"net/http"
"net/url"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
type Provider struct {
settings factory.ScopedProviderSettings
config zeus.Config
httpClient *client.Client
}
func NewProviderFactory() factory.ProviderFactory[zeus.Zeus, zeus.Config] {
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
return New(ctx, providerSettings, config)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/zeus/httpzeus")
httpClient, err := client.New(
settings.Logger(),
providerSettings.TracerProvider,
providerSettings.MeterProvider,
client.WithRequestResponseLog(true),
client.WithRetryCount(3),
)
if err != nil {
return nil, err
}
return &Provider{
settings: settings,
config: config,
httpClient: httpClient,
}, nil
}
func (provider *Provider) GetLicense(ctx context.Context, key string) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/licenses/me"),
http.MethodGet,
key,
nil,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) GetCheckoutURL(ctx context.Context, key string, body []byte) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/subscriptions/me/sessions/checkout"),
http.MethodPost,
key,
body,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) GetPortalURL(ctx context.Context, key string, body []byte) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/subscriptions/me/sessions/portal"),
http.MethodPost,
key,
body,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) GetDeployment(ctx context.Context, key string) ([]byte, error) {
response, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/deployments/me"),
http.MethodGet,
key,
nil,
)
if err != nil {
return nil, err
}
return []byte(gjson.GetBytes(response, "data").String()), nil
}
func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte) error {
_, err := provider.do(
ctx,
provider.config.DeprecatedURL.JoinPath("/api/v1/usage"),
http.MethodPost,
key,
data,
)
return err
}
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/profiles/me"),
http.MethodPut,
key,
body,
)
return err
}
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
http.MethodPut,
key,
body,
)
return err
}
func (provider *Provider) do(ctx context.Context, url *url.URL, method string, key string, requestBody []byte) ([]byte, error) {
request, err := http.NewRequestWithContext(ctx, method, url.String(), bytes.NewBuffer(requestBody))
if err != nil {
return nil, err
}
request.Header.Set("X-Signoz-Cloud-Api-Key", key)
request.Header.Set("Content-Type", "application/json")
response, err := provider.httpClient.Do(request)
if err != nil {
return nil, err
}
defer func() {
_ = response.Body.Close()
}()
body, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
if response.StatusCode/100 == 2 {
return body, nil
}
return nil, provider.errFromStatusCode(response.StatusCode)
}
// This can be taken down to the client package
func (provider *Provider) errFromStatusCode(statusCode int) error {
switch statusCode {
case http.StatusBadRequest:
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
case http.StatusUnauthorized:
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
case http.StatusForbidden:
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
case http.StatusNotFound:
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
}
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
}

View File

@@ -132,6 +132,7 @@
"tsconfig-paths-webpack-plugin": "^3.5.1",
"typescript": "^4.0.5",
"uplot": "1.6.31",
"userpilot": "1.3.9",
"uuid": "^8.3.2",
"web-vitals": "^0.2.4",
"webpack": "5.94.0",

View File

@@ -26,6 +26,7 @@ import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
import { CompatRouter } from 'react-router-dom-v5-compat';
import { Userpilot } from 'userpilot';
import { extractDomain } from 'utils/app';
import { Home } from './pageComponents';
@@ -100,6 +101,18 @@ function App(): JSX.Element {
logEvent('Domain Identified', groupTraits, 'group');
}
Userpilot.identify(email, {
email,
name,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
});
posthog?.identify(email, {
email,
name,
@@ -276,6 +289,10 @@ function App(): JSX.Element {
});
}
if (process.env.USERPILOT_KEY) {
Userpilot.initialize(process.env.USERPILOT_KEY);
}
Sentry.init({
dsn: process.env.SENTRY_DSN,
tunnel: process.env.TUNNEL_URL,

View File

@@ -64,6 +64,10 @@ export const TraceDetail = Loadable(
),
);
export const UsageExplorerPage = Loadable(
() => import(/* webpackChunkName: "UsageExplorerPage" */ 'modules/Usage'),
);
export const SignupPage = Loadable(
() => import(/* webpackChunkName: "SignupPage" */ 'pages/SignUp'),
);

View File

@@ -57,6 +57,7 @@ import {
TracesFunnels,
TracesSaveViews,
UnAuthorized,
UsageExplorerPage,
WorkspaceAccessRestricted,
WorkspaceBlocked,
WorkspaceSuspended,
@@ -154,6 +155,13 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'SETTINGS',
},
{
path: ROUTES.USAGE_EXPLORER,
exact: true,
component: UsageExplorerPage,
isPrivate: true,
key: 'USAGE_EXPLORER',
},
{
path: ROUTES.ALL_DASHBOARD,
exact: true,

View File

@@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/logs/getLogs';
const GetLogs = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const data = await axios.get(`/logs`, {
params: props,
});
return {
statusCode: 200,
error: null,
message: '',
payload: data.data.results,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default GetLogs;

View File

@@ -0,0 +1,19 @@
import apiV1 from 'api/apiV1';
import getLocalStorageKey from 'api/browser/localstorage/get';
import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage';
import { EventSourcePolyfill } from 'event-source-polyfill';
// 10 min in ms
const TIMEOUT_IN_MS = 10 * 60 * 1000;
export const LiveTail = (queryParams: string): EventSourcePolyfill =>
new EventSourcePolyfill(
`${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`,
{
headers: {
Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`,
},
heartbeatTimeout: TIMEOUT_IN_MS,
},
);

View File

@@ -17,6 +17,7 @@ const ROUTES = {
'/get-started/infrastructure-monitoring',
GET_STARTED_AWS_MONITORING: '/get-started/aws-monitoring',
GET_STARTED_AZURE_MONITORING: '/get-started/azure-monitoring',
USAGE_EXPLORER: '/usage-explorer',
APPLICATION: '/services',
ALL_DASHBOARD: '/dashboard',
DASHBOARD: '/dashboard/:dashboardId',

View File

@@ -133,3 +133,231 @@ const ServicesListTable = memo(
),
);
ServicesListTable.displayName = 'ServicesListTable';
function ServiceMetrics({
onUpdateChecklistDoneItem,
loadingUserPreferences,
}: {
onUpdateChecklistDoneItem: (itemKey: string) => void;
loadingUserPreferences: boolean;
}): JSX.Element {
const { selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
const { user, activeLicenseV3 } = useAppContext();
const [timeRange, setTimeRange] = useState(() => {
const now = new Date().getTime();
return {
startTime: now - homeInterval,
endTime: now,
selectedInterval: homeInterval,
};
});
const { queries } = useResourceAttribute();
const { safeNavigate } = useSafeNavigate();
const selectedTags = useMemo(
() => (convertRawQueriesToTraceSelectedTags(queries) as Tags[]) || [],
[queries],
);
const [isError, setIsError] = useState(false);
const queryKey: QueryKey = useMemo(
() => [
timeRange.startTime,
timeRange.endTime,
selectedTags,
globalSelectedInterval,
],
[
timeRange.startTime,
timeRange.endTime,
selectedTags,
globalSelectedInterval,
],
);
const {
data,
isLoading: isLoadingTopLevelOperations,
isError: isErrorTopLevelOperations,
} = useGetTopLevelOperations(queryKey, {
start: timeRange.startTime * 1e6,
end: timeRange.endTime * 1e6,
});
const handleTimeIntervalChange = useCallback((value: number): void => {
const timeInterval = TIME_PICKER_OPTIONS.find(
(option) => option.value === value,
);
logEvent('Homepage: Services time interval updated', {
updatedTimeInterval: timeInterval?.label,
});
const now = new Date();
setTimeRange({
startTime: now.getTime() - value,
endTime: now.getTime(),
selectedInterval: value,
});
}, []);
const topLevelOperations = useMemo(() => Object.entries(data || {}), [data]);
const queryRangeRequestData = useMemo(
() =>
getQueryRangeRequestData({
topLevelOperations,
minTime: timeRange.startTime * 1e6,
maxTime: timeRange.endTime * 1e6,
globalSelectedInterval,
}),
[
globalSelectedInterval,
timeRange.endTime,
timeRange.startTime,
topLevelOperations,
],
);
const dataQueries = useGetQueriesRange(
queryRangeRequestData,
ENTITY_VERSION_V4,
{
queryKey: useMemo(
() => [
`GetMetricsQueryRange-home-${globalSelectedInterval}`,
timeRange.endTime,
timeRange.startTime,
globalSelectedInterval,
],
[globalSelectedInterval, timeRange.endTime, timeRange.startTime],
),
keepPreviousData: true,
enabled: true,
refetchOnMount: false,
onError: () => {
setIsError(true);
},
},
);
const isLoading = useMemo(() => dataQueries.some((query) => query.isLoading), [
dataQueries,
]);
const services: ServicesList[] = useMemo(
() =>
getServiceListFromQuery({
queries: dataQueries,
topLevelOperations,
isLoading,
}),
[dataQueries, topLevelOperations, isLoading],
);
const sortedServices = useMemo(
() =>
services?.sort((a, b) => {
const aUpdateAt = new Date(a.p99).getTime();
const bUpdateAt = new Date(b.p99).getTime();
return bUpdateAt - aUpdateAt;
}) || [],
[services],
);
const servicesExist = sortedServices.length > 0;
const top5Services = useMemo(() => sortedServices.slice(0, 5), [
sortedServices,
]);
useEffect(() => {
if (!loadingUserPreferences && servicesExist) {
onUpdateChecklistDoneItem('SETUP_SERVICES');
}
}, [onUpdateChecklistDoneItem, loadingUserPreferences, servicesExist]);
const handleRowClick = useCallback(
(record: ServicesList) => {
logEvent('Homepage: Service clicked', {
serviceName: record.serviceName,
});
safeNavigate(`${ROUTES.APPLICATION}/${record.serviceName}`);
},
[safeNavigate],
);
if (isLoadingTopLevelOperations || isLoading) {
return (
<Card className="services-list-card home-data-card loading-card">
<Card.Content>
<Skeleton active />
</Card.Content>
</Card>
);
}
if (isErrorTopLevelOperations || isError) {
return (
<Card className="services-list-card home-data-card error-card">
<Card.Content>
<Skeleton active />
</Card.Content>
</Card>
);
}
return (
<Card className="services-list-card home-data-card">
{servicesExist && (
<Card.Header>
<div className="services-header home-data-card-header">
{' '}
Services
<div className="services-header-actions">
<Select
value={timeRange.selectedInterval}
onChange={handleTimeIntervalChange}
options={TIME_PICKER_OPTIONS}
className="services-header-select"
/>
</div>
</div>
</Card.Header>
)}
<Card.Content>
{servicesExist ? (
<ServicesListTable services={top5Services} onRowClick={handleRowClick} />
) : (
<EmptyState user={user} activeLicenseV3={activeLicenseV3} />
)}
</Card.Content>
{servicesExist && (
<Card.Footer>
<div className="services-footer home-data-card-footer">
<Link to="/services">
<Button
type="link"
className="periscope-btn link learn-more-link"
onClick={(): void => {
logEvent('Homepage: All Services clicked', {});
}}
>
All Services <ArrowRight size={12} />
</Button>
</Link>
</div>
</Card.Footer>
)}
</Card>
);
}
export default memo(ServiceMetrics);

View File

@@ -21,10 +21,17 @@ function Services({
return (
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="home-services-container">
<ServiceTraces
{isSpanMetricEnabled ? (
<ServiceMetrics
onUpdateChecklistDoneItem={onUpdateChecklistDoneItem}
loadingUserPreferences={loadingUserPreferences}
/>
) : (
<ServiceTraces
onUpdateChecklistDoneItem={onUpdateChecklistDoneItem}
loadingUserPreferences={loadingUserPreferences}
/>
)}
</div>
</Sentry.ErrorBoundary>
);

View File

@@ -481,6 +481,7 @@ export const apDexMetricsQueryBuilderQueries = ({
export const operationPerSec = ({
servicename,
tagFilterItems,
topLevelOperations,
}: OperationPerSecProps): QueryBuilderData => {
const autocompleteData: BaseAutocompleteData[] = [
{

View File

@@ -1,4 +1,7 @@
import logEvent from 'api/common/logEvent';
import getTopLevelOperations, {
ServiceDataProps,
} from 'api/metrics/getTopLevelOperations';
import { FeatureKeys } from 'constants/features';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
@@ -107,6 +110,21 @@ function Application(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const {
data: topLevelOperations,
error: topLevelOperationsError,
isLoading: topLevelOperationsIsLoading,
isError: topLevelOperationsIsError,
} = useQuery<ServiceDataProps>({
queryKey: [servicename, minTime, maxTime],
queryFn: (): Promise<ServiceDataProps> =>
getTopLevelOperations({
service: servicename || '',
start: minTime,
end: maxTime,
}),
});
const selectedTraceTags: string = JSON.stringify(
convertRawQueriesToTraceSelectedTags(queries) || [],
);
@@ -119,6 +137,14 @@ function Application(): JSX.Element {
[queries],
);
const topLevelOperationsRoute = useMemo(
() =>
topLevelOperations
? defaultTo(topLevelOperations[servicename || ''], [])
: [],
[servicename, topLevelOperations],
);
const operationPerSecWidget = useMemo(
() =>
getWidgetQueryBuilder({

View File

@@ -110,9 +110,16 @@
}
.nav-wrapper {
height: calc(100% - 52px);
display: flex;
flex-direction: column;
justify-content: space-between;
.primary-nav-items {
max-height: 65%;
display: flex;
flex-direction: column;
flex: 1;
min-height: 0;
max-height: 100%;
overflow-y: auto;
overflow-x: hidden;
@@ -121,15 +128,14 @@
}
}
.secondary-nav-items {
max-height: 35%;
display: flex;
flex-direction: column;
flex-shrink: 0;
overflow-y: auto;
overflow-x: hidden;
border-top: 1px solid var(--bg-slate-400);
padding: 8px 0;
max-width: 100%;
position: fixed;
bottom: 0;
left: 0;
width: 64px;
transition: all 0.2s, background 0s, border 0s;

View File

@@ -0,0 +1,224 @@
/* eslint-disable */
//@ts-nocheck
import { Select, Space, Typography } from 'antd';
import Graph from 'components/Graph';
import { useEffect, useState } from 'react';
import { connect, useSelector } from 'react-redux';
import { withRouter } from 'react-router-dom';
import { GetService, getUsageData, UsageDataItem } from 'store/actions';
import { AppState } from 'store/reducers';
import { GlobalTime } from 'types/actions/globalTime';
import { GlobalReducer } from 'types/reducer/globalTime';
import MetricReducer from 'types/reducer/metrics';
import { isOnboardingSkipped } from 'utils/app';
import { Card } from './styles';
const { Option } = Select;
interface UsageExplorerProps {
usageData: UsageDataItem[];
getUsageData: (
minTime: number,
maxTime: number,
selectedInterval: number,
selectedService: string,
) => void;
getServicesList: ({
selectedTimeInterval,
}: {
selectedTimeInterval: GlobalReducer['selectedTime'];
}) => void;
globalTime: GlobalTime;
servicesList: servicesListItem[];
totalCount: number;
}
const timeDaysOptions = [
{ value: 30, label: 'Last 30 Days' },
{ value: 7, label: 'Last week' },
{ value: 1, label: 'Last day' },
];
const interval = [
{
value: 604800,
chartDivideMultiplier: 1,
label: 'Weekly',
applicableOn: [timeDaysOptions[0]],
},
{
value: 86400,
chartDivideMultiplier: 30,
label: 'Daily',
applicableOn: [timeDaysOptions[0], timeDaysOptions[1]],
},
{
value: 3600,
chartDivideMultiplier: 10,
label: 'Hours',
applicableOn: [timeDaysOptions[2], timeDaysOptions[1]],
},
];
function _UsageExplorer(props: UsageExplorerProps): JSX.Element {
const [selectedTime, setSelectedTime] = useState(timeDaysOptions[1]);
const [selectedInterval, setSelectedInterval] = useState(interval[2]);
const [selectedService, setSelectedService] = useState<string>('');
const { selectedTime: globalSelectedTime } = useSelector<
AppState,
GlobalReducer
>((state) => state.globalTime);
const {
getServicesList,
getUsageData,
globalTime,
totalCount,
usageData,
} = props;
const { services } = useSelector<AppState, MetricReducer>(
(state) => state.metrics,
);
useEffect(() => {
if (selectedTime && selectedInterval) {
const maxTime = new Date().getTime() * 1000000;
const minTime = maxTime - selectedTime.value * 24 * 3600000 * 1000000;
getUsageData(minTime, maxTime, selectedInterval.value, selectedService);
}
}, [selectedTime, selectedInterval, selectedService, getUsageData]);
useEffect(() => {
getServicesList({
selectedTimeInterval: globalSelectedTime,
});
}, [globalTime, getServicesList, globalSelectedTime]);
const data = {
labels: usageData.map((s) => new Date(s.timestamp / 1000000)),
datasets: [
{
label: 'Span Count',
data: usageData.map((s) => s.count),
backgroundColor: 'rgba(255, 99, 132, 0.2)',
borderColor: 'rgba(255, 99, 132, 1)',
borderWidth: 2,
},
],
};
return (
<>
<Space style={{ marginTop: 40, marginLeft: 20 }}>
<Space>
<Select
onSelect={(value): void => {
setSelectedTime(
timeDaysOptions.filter((item) => item.value == parseInt(value))[0],
);
}}
value={selectedTime.label}
>
{timeDaysOptions.map(({ value, label }) => (
<Option key={value} value={value}>
{label}
</Option>
))}
</Select>
</Space>
<Space>
<Select
onSelect={(value): void => {
setSelectedInterval(
interval.filter((item) => item.value === parseInt(value))[0],
);
}}
value={selectedInterval.label}
>
{interval
.filter((interval) => interval.applicableOn.includes(selectedTime))
.map((item) => (
<Option key={item.label} value={item.value}>
{item.label}
</Option>
))}
</Select>
</Space>
<Space>
<Select
onSelect={(value): void => {
setSelectedService(value);
}}
value={selectedService || 'All Services'}
>
<Option value="">All Services</Option>
{services?.map((service) => (
<Option key={service.serviceName} value={service.serviceName}>
{service.serviceName}
</Option>
))}
</Select>
</Space>
{isOnboardingSkipped() && totalCount === 0 ? (
<Space
style={{
width: '100%',
margin: '40px 0',
marginLeft: 20,
justifyContent: 'center',
}}
>
<Typography>
No spans found. Please add instrumentation (follow this
<a
href="https://signoz.io/docs/instrumentation/overview"
target="_blank"
style={{ marginLeft: 3 }}
rel="noreferrer"
>
guide
</a>
)
</Typography>
</Space>
) : (
<Space style={{ display: 'block', marginLeft: 20, width: 200 }}>
<Typography>{`Total count is ${totalCount}`}</Typography>
</Space>
)}
</Space>
<Card>
<Graph name="usage" data={data} type="bar" />
</Card>
</>
);
}
const mapStateToProps = (
state: AppState,
): {
totalCount: number;
globalTime: GlobalTime;
usageData: UsageDataItem[];
} => {
let totalCount = 0;
for (const item of state.usageDate) {
totalCount += item.count;
}
return {
totalCount,
usageData: state.usageDate,
globalTime: state.globalTime,
};
};
export const UsageExplorer = withRouter(
connect(mapStateToProps, {
getUsageData,
getServicesList: GetService,
})(_UsageExplorer),
);

View File

@@ -0,0 +1,7 @@
import { UsageExplorer } from './UsageExplorer';
function UsageExplorerContainer(): JSX.Element {
return <UsageExplorer />;
}
export default UsageExplorerContainer;

View File

@@ -0,0 +1,13 @@
import { Card as CardComponent } from 'antd';
import styled from 'styled-components';
export const Card = styled(CardComponent)`
&&& {
width: 90%;
margin-top: 2rem;
}
.ant-card-body {
height: 70vh;
}
`;

View File

@@ -2,3 +2,4 @@ export * from './global';
export * from './metrics';
export * from './serviceMap';
export * from './types';
export * from './usage';

View File

@@ -0,0 +1,34 @@
import GetLogs from 'api/logs/GetLogs';
import { Dispatch } from 'redux';
import AppActions from 'types/actions';
import { SET_LOADING, SET_LOGS } from 'types/actions/logs';
import { Props } from 'types/api/logs/getLogs';
export const getLogs = (
props: Props,
): ((dispatch: Dispatch<AppActions>) => void) => async (
dispatch,
): Promise<void> => {
dispatch({
type: SET_LOADING,
payload: true,
});
const response = await GetLogs(props);
if (response.payload)
dispatch({
type: SET_LOGS,
payload: response.payload,
});
else
dispatch({
type: SET_LOGS,
payload: [],
});
dispatch({
type: SET_LOADING,
payload: false,
});
};

View File

@@ -1,14 +1,17 @@
import { ServiceMapItemAction, ServiceMapLoading } from './serviceMap';
import { GetUsageDataAction } from './usage';
export enum ActionTypes {
updateTimeInterval = 'UPDATE_TIME_INTERVAL',
getServiceMapItems = 'GET_SERVICE_MAP_ITEMS',
getServices = 'GET_SERVICES',
getUsageData = 'GET_USAGE_DATE',
fetchTraces = 'FETCH_TRACES',
fetchTraceItem = 'FETCH_TRACE_ITEM',
serviceMapLoading = 'UPDATE_SERVICE_MAP_LOADING',
}
export type Action =
| GetUsageDataAction
| ServiceMapItemAction
| ServiceMapLoading;

View File

@@ -0,0 +1,34 @@
import api from 'api';
import { Dispatch } from 'redux';
import { toUTCEpoch } from 'utils/timeUtils';
import { ActionTypes } from './types';
export interface UsageDataItem {
timestamp: number;
count: number;
}
export interface GetUsageDataAction {
type: ActionTypes.getUsageData;
payload: UsageDataItem[];
}
export const getUsageData = (
minTime: number,
maxTime: number,
step: number,
service: string,
) => async (dispatch: Dispatch): Promise<void> => {
const requesString = `/usage?start=${toUTCEpoch(minTime)}&end=${toUTCEpoch(
maxTime,
)}&step=${step}&service=${service || ''}`;
// Step can only be multiple of 3600
const response = await api.get<UsageDataItem[]>(requesString);
dispatch<GetUsageDataAction>({
type: ActionTypes.getUsageData,
payload: response.data,
// PNOTE - response.data in the axios response has the actual API response
});
};

View File

@@ -6,9 +6,11 @@ import { LogsReducer } from './logs';
import metricsReducers from './metric';
import { ServiceMapReducer } from './serviceMap';
import traceReducer from './trace';
import { usageDataReducer } from './usage';
const reducers = combineReducers({
traces: traceReducer,
usageDate: usageDataReducer,
globalTime: globalTimeReducer,
serviceMap: ServiceMapReducer,
app: appReducer,

View File

@@ -0,0 +1,14 @@
/* eslint-disable sonarjs/no-small-switch */
import { Action, ActionTypes, UsageDataItem } from 'store/actions';
export const usageDataReducer = (
state: UsageDataItem[] = [{ timestamp: 0, count: 0 }],
action: Action,
): UsageDataItem[] => {
switch (action.type) {
case ActionTypes.getUsageData:
return action.payload;
default:
return state;
}
};

View File

@@ -24,6 +24,7 @@ const plugins = [
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
POSTHOG_KEY: process.env.POSTHOG_KEY,
USERPILOT_KEY: process.env.USERPILOT_KEY,
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
SENTRY_ORG: process.env.SENTRY_ORG,
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,
@@ -43,6 +44,7 @@ const plugins = [
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
POSTHOG_KEY: process.env.POSTHOG_KEY,
USERPILOT_KEY: process.env.USERPILOT_KEY,
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
SENTRY_ORG: process.env.SENTRY_ORG,
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,

View File

@@ -29,6 +29,7 @@ const plugins = [
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
POSTHOG_KEY: process.env.POSTHOG_KEY,
USERPILOT_KEY: process.env.USERPILOT_KEY,
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
SENTRY_ORG: process.env.SENTRY_ORG,
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,
@@ -53,6 +54,7 @@ const plugins = [
CUSTOMERIO_SITE_ID: process.env.CUSTOMERIO_SITE_ID,
CUSTOMERIO_ID: process.env.CUSTOMERIO_ID,
POSTHOG_KEY: process.env.POSTHOG_KEY,
USERPILOT_KEY: process.env.USERPILOT_KEY,
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
SENTRY_ORG: process.env.SENTRY_ORG,
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,

View File

@@ -3135,6 +3135,30 @@
strict-event-emitter "^0.2.4"
web-encoding "^1.1.5"
"@ndhoule/each@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@ndhoule/each/-/each-2.0.1.tgz#bbed372a603e0713a3193c706a73ddebc5b426a9"
integrity sha512-wHuJw6x+rF6Q9Skgra++KccjBozCr9ymtna0FhxmV/8xT/hZ2ExGYR8SV8prg8x4AH/7mzDYErNGIVHuzHeybw==
dependencies:
"@ndhoule/keys" "^2.0.0"
"@ndhoule/includes@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@ndhoule/includes/-/includes-2.0.1.tgz#051ff5eb042c8fa17e7158f0a8a70172e1affaa5"
integrity sha512-Q8zN6f3yIhxgBwZ5ldLozHqJlc/fRQ5+hFFsPMFeC9SJvz0nq8vG9hoRXL1c1iaNFQd7yAZIy2igQpERoFqxqg==
dependencies:
"@ndhoule/each" "^2.0.1"
"@ndhoule/keys@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@ndhoule/keys/-/keys-2.0.0.tgz#3d64ae677c65a261747bf3a457c62eb292a4e0ce"
integrity sha512-vtCqKBC1Av6dsBA8xpAO+cgk051nfaI+PnmTZep2Px0vYrDvpUmLxv7z40COlWH5yCpu3gzNhepk+02yiQiZNw==
"@ndhoule/pick@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@ndhoule/pick/-/pick-2.0.0.tgz#e1eb1a6ca3243eef56daa095c3a1612c74a52156"
integrity sha512-xkYtpf1pRd8egwvl5tJcdGu+GBd6ZZH3S/zoIQ9txEI+pHF9oTIlxMC9G4CB3sRugAeLgu8qYJGl3tnxWq74Qw==
"@nodelib/fs.scandir@2.1.5":
version "2.1.5"
resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz"
@@ -6713,6 +6737,11 @@ compare-func@^2.0.0:
array-ify "^1.0.0"
dot-prop "^5.1.0"
component-indexof@0.0.3:
version "0.0.3"
resolved "https://registry.yarnpkg.com/component-indexof/-/component-indexof-0.0.3.tgz#11d091312239eb8f32c8f25ae9cb002ffe8d3c24"
integrity sha512-puDQKvx/64HZXb4hBwIcvQLaLgux8o1CbWl39s41hrIIZDl1lJiD5jc22gj3RBeGK0ovxALDYpIbyjqDUUl0rw==
compressible@~2.0.16:
version "2.0.18"
resolved "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz"
@@ -10742,6 +10771,11 @@ is-wsl@^2.2.0:
dependencies:
is-docker "^2.0.0"
is@^3.1.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/is/-/is-3.3.0.tgz#61cff6dd3c4193db94a3d62582072b44e5645d79"
integrity sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==
isarray@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
@@ -13130,6 +13164,11 @@ nwsapi@^2.2.0:
resolved "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.4.tgz"
integrity sha512-NHj4rzRo0tQdijE9ZqAx6kYDcoRwYwSYzCA8MY3JzfxlrvEU0jhnhJT9BhqhJs7I/dKcrDm6TyulaRqZPIhN5g==
obj-case@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/obj-case/-/obj-case-0.2.1.tgz#13a554d04e5ca32dfd9d566451fd2b0e11007f1a"
integrity sha512-PquYBBTy+Y6Ob/O2574XHhDtHJlV1cJHMCgW+rDRc9J5hhmRelJB3k5dTK/3cVmFVtzvAKuENeuLpoyTzMzkOg==
object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
version "4.1.1"
resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz"
@@ -17466,6 +17505,17 @@ use-sync-external-store@^1.0.0:
resolved "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz"
integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==
userpilot@1.3.9:
version "1.3.9"
resolved "https://registry.yarnpkg.com/userpilot/-/userpilot-1.3.9.tgz#6374083f3e84cbf1fc825133588b5b499054271b"
integrity sha512-V0QIuIlAJPB8s3j+qtv7BW7NKSXthlZWuowIu+IZOMGLgUbqQTaSW5m1Ct4wJviPKUNOi8kbhCXN4c4b3zcJzg==
dependencies:
"@ndhoule/includes" "^2.0.1"
"@ndhoule/pick" "^2.0.0"
component-indexof "0.0.3"
is "^3.1.0"
obj-case "^0.2.0"
util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"

View File

@@ -6,6 +6,7 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/http/client/plugin"
"github.com/gojek/heimdall/v7"
"github.com/gojek/heimdall/v7/httpclient"
"go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
"go.opentelemetry.io/otel/metric"
@@ -33,6 +34,15 @@ func New(logger *slog.Logger, tracerProvider trace.TracerProvider, meterProvider
Transport: otelhttp.NewTransport(http.DefaultTransport, otelhttp.WithTracerProvider(tracerProvider), otelhttp.WithMeterProvider(meterProvider)),
}
if clientOpts.retriable == nil {
clientOpts.retriable = heimdall.NewRetrier(
heimdall.NewConstantBackoff(
2*time.Second,
100*time.Millisecond,
),
)
}
c := httpclient.NewClient(
httpclient.WithHTTPClient(netc),
httpclient.WithRetrier(clientOpts.retriable),

View File

@@ -63,7 +63,7 @@ func (plugin *reqResLog) OnRequestEnd(request *http.Request, response *http.Resp
func (plugin *reqResLog) OnError(request *http.Request, err error) {
host, port, _ := net.SplitHostPort(request.Host)
fields := []any{
err,
"error", err,
string(semconv.HTTPRequestMethodKey), request.Method,
string(semconv.URLPathKey), request.URL.Path,
string(semconv.URLSchemeKey), request.URL.Scheme,

View File

@@ -0,0 +1,533 @@
package impltracefunnel
import (
"encoding/json"
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/types/authtypes"
tf "github.com/SigNoz/signoz/pkg/types/tracefunnel"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
type handler struct {
module tracefunnel.Module
}
func NewHandler(module tracefunnel.Module) tracefunnel.Handler {
return &handler{module: module}
}
func (handler *handler) New(rw http.ResponseWriter, r *http.Request) {
var req tf.FunnelRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, err)
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(rw, err)
return
}
userID := claims.UserID
orgID := claims.OrgID
funnels, err := handler.module.List(r.Context(), orgID)
if err != nil {
render.Error(rw, err)
return
}
for _, f := range funnels {
if f.Name == req.Name {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "a funnel with name '%s' already exists in this organization", req.Name))
return
}
}
funnel, err := handler.module.Create(r.Context(), req.Timestamp, req.Name, userID, orgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to create funnel"))
return
}
response := tf.FunnelResponse{
FunnelID: funnel.ID.String(),
FunnelName: funnel.Name,
CreatedAt: req.Timestamp,
UserEmail: claims.Email,
OrgID: orgID,
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) UpdateSteps(rw http.ResponseWriter, r *http.Request) {
var req tf.FunnelRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, err)
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(rw, err)
return
}
userID := claims.UserID
orgID := claims.OrgID
if err := tracefunnel.ValidateTimestamp(req.Timestamp, "timestamp"); err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "timestamp is invalid: %v", err))
return
}
funnel, err := handler.module.Get(r.Context(), req.FunnelID.String())
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
return
}
// Check if name is being updated and if it already exists
if req.Name != "" && req.Name != funnel.Name {
funnels, err := handler.module.List(r.Context(), orgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to list funnels: %v", err))
return
}
for _, f := range funnels {
if f.Name == req.Name {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "a funnel with name '%s' already exists in this organization", req.Name))
return
}
}
}
// Process each step in the request
for i := range req.Steps {
if req.Steps[i].Order < 1 {
req.Steps[i].Order = int64(i + 1) // Default to sequential ordering if not specified
}
// Generate a new UUID for the step if it doesn't have one
if req.Steps[i].Id.IsZero() {
newUUID := valuer.GenerateUUID()
req.Steps[i].Id = newUUID
}
}
if err := tracefunnel.ValidateFunnelSteps(req.Steps); err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid funnel steps: %v", err))
return
}
// Normalize step orders
req.Steps = tracefunnel.NormalizeFunnelSteps(req.Steps)
// UpdateSteps the funnel with new steps
funnel.Steps = req.Steps
funnel.UpdatedAt = time.Unix(0, req.Timestamp*1000000) // Convert to nanoseconds
funnel.UpdatedBy = userID
if req.Name != "" {
funnel.Name = req.Name
}
if req.Description != "" {
funnel.Description = req.Description
}
// UpdateSteps funnel in database
err = handler.module.Update(r.Context(), funnel, userID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update funnel in database: %v", err))
return
}
//// UpdateSteps name and description if provided
//if req.Name != "" || req.Description != "" {
// name := req.Name
//
// description := req.Description
//
// err = handler.module.UpdateMetadata(r.Context(), funnel.ID, name, description, userID)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update funnel metadata: %v", err))
// return
// }
//}
// Get the updated funnel to return in response
updatedFunnel, err := handler.module.Get(r.Context(), funnel.ID.String())
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to get updated funnel: %v", err))
return
}
response := tf.FunnelResponse{
FunnelName: updatedFunnel.Name,
FunnelID: updatedFunnel.ID.String(),
Steps: updatedFunnel.Steps,
CreatedAt: updatedFunnel.CreatedAt.UnixNano() / 1000000,
CreatedBy: updatedFunnel.CreatedBy,
OrgID: updatedFunnel.OrgID.String(),
UpdatedBy: userID,
UpdatedAt: updatedFunnel.UpdatedAt.UnixNano() / 1000000,
Description: updatedFunnel.Description,
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) UpdateFunnel(rw http.ResponseWriter, r *http.Request) {
var req tf.FunnelRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, err)
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(rw, err)
return
}
userID := claims.UserID
orgID := claims.OrgID
if err := tracefunnel.ValidateTimestamp(req.Timestamp, "timestamp"); err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "timestamp is invalid: %v", err))
return
}
vars := mux.Vars(r)
funnelID := vars["funnel_id"]
funnel, err := handler.module.Get(r.Context(), funnelID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
return
}
// Check if name is being updated and if it already exists
if req.Name != "" && req.Name != funnel.Name {
funnels, err := handler.module.List(r.Context(), orgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to list funnels: %v", err))
return
}
for _, f := range funnels {
if f.Name == req.Name {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "a funnel with name '%s' already exists in this organization", req.Name))
return
}
}
}
funnel.UpdatedAt = time.Unix(0, req.Timestamp*1000000) // Convert to nanoseconds
funnel.UpdatedBy = userID
if req.Name != "" {
funnel.Name = req.Name
}
if req.Description != "" {
funnel.Description = req.Description
}
// Update funnel in database
err = handler.module.Update(r.Context(), funnel, userID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update funnel in database: %v", err))
return
}
// Get the updated funnel to return in response
updatedFunnel, err := handler.module.Get(r.Context(), funnel.ID.String())
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to get updated funnel: %v", err))
return
}
response := tf.FunnelResponse{
FunnelName: updatedFunnel.Name,
FunnelID: updatedFunnel.ID.String(),
Steps: updatedFunnel.Steps,
CreatedAt: updatedFunnel.CreatedAt.UnixNano() / 1000000,
CreatedBy: updatedFunnel.CreatedBy,
OrgID: updatedFunnel.OrgID.String(),
UpdatedBy: userID,
UpdatedAt: updatedFunnel.UpdatedAt.UnixNano() / 1000000,
Description: updatedFunnel.Description,
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unauthenticated"))
return
}
orgID := claims.OrgID
funnels, err := handler.module.List(r.Context(), orgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to list funnels: %v", err))
return
}
var response []tf.FunnelResponse
for _, f := range funnels {
funnelResp := tf.FunnelResponse{
FunnelName: f.Name,
FunnelID: f.ID.String(),
CreatedAt: f.CreatedAt.UnixNano() / 1000000,
CreatedBy: f.CreatedBy,
OrgID: f.OrgID.String(),
UpdatedAt: f.UpdatedAt.UnixNano() / 1000000,
UpdatedBy: f.UpdatedBy,
Description: f.Description,
}
// Get user email if available
if f.CreatedByUser != nil {
funnelResp.UserEmail = f.CreatedByUser.Email
}
response = append(response, funnelResp)
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
funnelID := vars["funnel_id"]
funnel, err := handler.module.Get(r.Context(), funnelID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
return
}
// Create a response with all funnel details including step IDs
response := tf.FunnelResponse{
FunnelID: funnel.ID.String(),
FunnelName: funnel.Name,
Description: funnel.Description,
CreatedAt: funnel.CreatedAt.UnixNano() / 1000000,
UpdatedAt: funnel.UpdatedAt.UnixNano() / 1000000,
CreatedBy: funnel.CreatedBy,
UpdatedBy: funnel.UpdatedBy,
OrgID: funnel.OrgID.String(),
Steps: funnel.Steps,
}
// Add user email if available
if funnel.CreatedByUser != nil {
response.UserEmail = funnel.CreatedByUser.Email
}
render.Success(rw, http.StatusOK, response)
}
func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
funnelID := vars["funnel_id"]
err := handler.module.Delete(r.Context(), funnelID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to delete funnel: %v", err))
return
}
render.Success(rw, http.StatusOK, nil)
}
func (handler *handler) Save(rw http.ResponseWriter, r *http.Request) {
var req tf.FunnelRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request: %v", err))
return
}
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unauthenticated"))
return
}
orgID := claims.OrgID
usrID := claims.UserID
funnel, err := handler.module.Get(r.Context(), req.FunnelID.String())
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
return
}
updateTimestamp := req.Timestamp
if updateTimestamp == 0 {
updateTimestamp = time.Now().UnixMilli()
} else if !tracefunnel.ValidateTimestampIsMilliseconds(updateTimestamp) {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "timestamp must be in milliseconds format (13 digits)"))
return
}
funnel.UpdatedAt = time.Unix(0, updateTimestamp*1000000) // Convert to nanoseconds
if req.UserID != "" {
funnel.UpdatedBy = usrID
}
funnel.Description = req.Description
if err := handler.module.Save(r.Context(), funnel, funnel.UpdatedBy, orgID); err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to save funnel: %v", err))
return
}
// Try to fetch metadata from DB
createdAt, updatedAt, extraDataFromDB, err := handler.module.GetFunnelMetadata(r.Context(), funnel.ID.String())
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to get funnel metadata: %v", err))
return
}
resp := tf.FunnelResponse{
FunnelName: funnel.Name,
CreatedAt: createdAt,
UpdatedAt: updatedAt,
CreatedBy: funnel.CreatedBy,
UpdatedBy: funnel.UpdatedBy,
OrgID: funnel.OrgID.String(),
Description: extraDataFromDB,
}
render.Success(rw, http.StatusOK, resp)
}
//func (handler *handler) ValidateTraces(rw http.ResponseWriter, r *http.Request) {
// vars := mux.Vars(r)
// funnelID := vars["funnel_id"]
//
// funnel, err := handler.module.Get(r.Context(), funnelID)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
// return
// }
//
// var timeRange tf.TimeRange
// if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error decoding time range: %v", err))
// return
// }
//
// response, err := handler.module.ValidateTraces(r.Context(), funnel, timeRange)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error validating traces: %v", err))
// return
// }
//
// render.Success(rw, http.StatusOK, response)
//}
//
//func (handler *handler) FunnelAnalytics(rw http.ResponseWriter, r *http.Request) {
// vars := mux.Vars(r)
// funnelID := vars["funnel_id"]
//
// funnel, err := handler.module.Get(r.Context(), funnelID)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
// return
// }
//
// var timeRange tf.TimeRange
// if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error decoding time range: %v", err))
// return
// }
//
// response, err := handler.module.GetFunnelAnalytics(r.Context(), funnel, timeRange)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error getting funnel analytics: %v", err))
// return
// }
//
// render.Success(rw, http.StatusOK, response)
//}
//
//func (handler *handler) StepAnalytics(rw http.ResponseWriter, r *http.Request) {
// vars := mux.Vars(r)
// funnelID := vars["funnel_id"]
//
// funnel, err := handler.module.Get(r.Context(), funnelID)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "funnel not found: %v", err))
// return
// }
//
// var timeRange tf.TimeRange
// if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error decoding time range: %v", err))
// return
// }
//
// response, err := handler.module.GetStepAnalytics(r.Context(), funnel, timeRange)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error getting step analytics: %v", err))
// return
// }
//
// render.Success(rw, http.StatusOK, response)
//}
//
//func (handler *handler) SlowestTraces(rw http.ResponseWriter, r *http.Request) {
// handler.handleTracesWithLatency(rw, r, false)
//}
//
//func (handler *handler) ErrorTraces(rw http.ResponseWriter, r *http.Request) {
// handler.handleTracesWithLatency(rw, r, true)
//}
//
//// handleTracesWithLatency handles both slow and error traces with common logic
//func (handler *handler) handleTracesWithLatency(rw http.ResponseWriter, r *http.Request, isError bool) {
// funnel, req, err := handler.validateTracesRequest(r)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "%v", err))
// return
// }
//
// if err := tracefunnel.ValidateSteps(funnel, req.StepAOrder, req.StepBOrder); err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "%v", err))
// return
// }
//
// response, err := handler.module.GetSlowestTraces(r.Context(), funnel, req.StepAOrder, req.StepBOrder, req.TimeRange, isError)
// if err != nil {
// render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "error getting traces: %v", err))
// return
// }
//
// render.Success(rw, http.StatusOK, response)
//}
//
//// validateTracesRequest validates and extracts the request parameters
//func (handler *handler) validateTracesRequest(r *http.Request) (*tf.Funnel, *tf.StepTransitionRequest, error) {
// vars := mux.Vars(r)
// funnelID := vars["funnel_id"]
//
// funnel, err := handler.module.Get(r.Context(), funnelID)
// if err != nil {
// return nil, nil, fmt.Errorf("funnel not found: %v", err)
// }
//
// var req tf.StepTransitionRequest
// if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
// return nil, nil, fmt.Errorf("invalid request body: %v", err)
// }
//
// return funnel, &req, nil
//}

View File

@@ -0,0 +1,220 @@
package impltracefunnel
import (
"context"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/types"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
"github.com/SigNoz/signoz/pkg/valuer"
)
type module struct {
store traceFunnels.TraceFunnelStore
}
func NewModule(store traceFunnels.TraceFunnelStore) tracefunnel.Module {
return &module{
store: store,
}
}
func (module *module) Create(ctx context.Context, timestamp int64, name string, userID string, orgID string) (*traceFunnels.Funnel, error) {
orgUUID, err := valuer.NewUUID(orgID)
if err != nil {
return nil, fmt.Errorf("invalid org ID: %v", err)
}
funnel := &traceFunnels.Funnel{
BaseMetadata: traceFunnels.BaseMetadata{
Name: name,
OrgID: orgUUID,
},
}
funnel.CreatedAt = time.Unix(0, timestamp*1000000) // Convert to nanoseconds
funnel.CreatedBy = userID
// Set up the user relationship
funnel.CreatedByUser = &types.User{
ID: userID,
}
if err := module.store.Create(ctx, funnel); err != nil {
return nil, fmt.Errorf("failed to create funnel: %v", err)
}
return funnel, nil
}
// Get gets a funnel by ID
func (module *module) Get(ctx context.Context, funnelID string) (*traceFunnels.Funnel, error) {
uuid, err := valuer.NewUUID(funnelID)
if err != nil {
return nil, fmt.Errorf("invalid funnel ID: %v", err)
}
return module.store.Get(ctx, uuid)
}
// Update updates a funnel
func (module *module) Update(ctx context.Context, funnel *traceFunnels.Funnel, userID string) error {
funnel.UpdatedBy = userID
return module.store.Update(ctx, funnel)
}
// List lists all funnels for an organization
func (module *module) List(ctx context.Context, orgID string) ([]*traceFunnels.Funnel, error) {
orgUUID, err := valuer.NewUUID(orgID)
if err != nil {
return nil, fmt.Errorf("invalid org ID: %v", err)
}
funnels, err := module.store.List(ctx)
if err != nil {
return nil, err
}
// Filter by orgID
var orgFunnels []*traceFunnels.Funnel
for _, f := range funnels {
if f.OrgID == orgUUID {
orgFunnels = append(orgFunnels, f)
}
}
return orgFunnels, nil
}
// Delete deletes a funnel
func (module *module) Delete(ctx context.Context, funnelID string) error {
uuid, err := valuer.NewUUID(funnelID)
if err != nil {
return fmt.Errorf("invalid funnel ID: %v", err)
}
return module.store.Delete(ctx, uuid)
}
// Save saves a funnel
func (module *module) Save(ctx context.Context, funnel *traceFunnels.Funnel, userID string, orgID string) error {
orgUUID, err := valuer.NewUUID(orgID)
if err != nil {
return fmt.Errorf("invalid org ID: %v", err)
}
funnel.UpdatedBy = userID
funnel.OrgID = orgUUID
return module.store.Update(ctx, funnel)
}
// GetFunnelMetadata gets metadata for a funnel
func (module *module) GetFunnelMetadata(ctx context.Context, funnelID string) (int64, int64, string, error) {
uuid, err := valuer.NewUUID(funnelID)
if err != nil {
return 0, 0, "", fmt.Errorf("invalid funnel ID: %v", err)
}
funnel, err := module.store.Get(ctx, uuid)
if err != nil {
return 0, 0, "", err
}
return funnel.CreatedAt.UnixNano() / 1000000, funnel.UpdatedAt.UnixNano() / 1000000, funnel.Description, nil
}
// ValidateTraces validates traces in a funnel
//func (module *module) ValidateTraces(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) ([]*v3.Row, error) {
// chq, err := tracefunnel.ValidateTraces(funnel, timeRange)
// if err != nil {
// RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
// return
// }
//
// results, err := aH.reader. GetListResultV3(r.Context(), chq.Query)
// if err != nil {
// RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
// return
// }
//
//}
// GetFunnelAnalytics gets analytics for a funnel
//func (module *module) GetFunnelAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error) {
// if err := tracefunnel.ValidateFunnel(funnel); err != nil {
// return nil, fmt.Errorf("invalid funnel: %v", err)
// }
//
// if err := tracefunnel.ValidateTimeRange(timeRange); err != nil {
// return nil, fmt.Errorf("invalid time range: %v", err)
// }
//
// _, err := tracefunnel.ValidateTracesWithLatency(funnel, timeRange)
// if err != nil {
// return nil, fmt.Errorf("error building clickhouse query: %v", err)
// }
//
// // TODO: Execute query and return results
// // For now, return empty analytics
// return &traceFunnels.FunnelAnalytics{
// TotalStart: 0,
// TotalComplete: 0,
// ErrorCount: 0,
// AvgDurationMs: 0,
// P99LatencyMs: 0,
// ConversionRate: 0,
// }, nil
//}
// GetStepAnalytics gets analytics for each step
//func (module *module) GetStepAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error) {
// if err := tracefunnel.ValidateFunnel(funnel); err != nil {
// return nil, fmt.Errorf("invalid funnel: %v", err)
// }
//
// if err := tracefunnel.ValidateTimeRange(timeRange); err != nil {
// return nil, fmt.Errorf("invalid time range: %v", err)
// }
//
// _, err := tracefunnel.GetStepAnalytics(funnel, timeRange)
// if err != nil {
// return nil, fmt.Errorf("error building clickhouse query: %v", err)
// }
//
// // TODO: Execute query and return results
// // For now, return empty analytics
// return &traceFunnels.FunnelAnalytics{
// TotalStart: 0,
// TotalComplete: 0,
// ErrorCount: 0,
// AvgDurationMs: 0,
// P99LatencyMs: 0,
// ConversionRate: 0,
// }, nil
//}
// GetSlowestTraces gets the slowest traces between two steps
//func (module *module) GetSlowestTraces(ctx context.Context, funnel *traceFunnels.Funnel, stepAOrder, stepBOrder int64, timeRange traceFunnels.TimeRange, isError bool) (*traceFunnels.ValidTracesResponse, error) {
// if err := tracefunnel.ValidateFunnel(funnel); err != nil {
// return nil, fmt.Errorf("invalid funnel: %v", err)
// }
//
// if err := tracefunnel.ValidateTimeRange(timeRange); err != nil {
// return nil, fmt.Errorf("invalid time range: %v", err)
// }
//
// _, err := tracefunnel.GetSlowestTraces(funnel, stepAOrder, stepBOrder, timeRange, isError)
// if err != nil {
// return nil, fmt.Errorf("error building clickhouse query: %v", err)
// }
//
// // TODO: Execute query and return results
// // For now, return empty response
// return &traceFunnels.ValidTracesResponse{
// TraceIDs: []string{},
// }, nil
//}
//UpdateMetadata updates the metadata of a funnel
//func (module *module) UpdateMetadata(ctx context.Context, funnelID valuer.UUID, name, description string, userID string) error {
// return module.store.UpdateMetadata(ctx, funnelID, name, description, userID)
//}

View File

@@ -0,0 +1,220 @@
package impltracefunnel
import (
"context"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/sqlstore"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
"github.com/SigNoz/signoz/pkg/valuer"
)
type store struct {
sqlstore sqlstore.SQLStore
}
func NewStore(sqlstore sqlstore.SQLStore) traceFunnels.TraceFunnelStore {
return &store{sqlstore: sqlstore}
}
func (store *store) Create(ctx context.Context, funnel *traceFunnels.Funnel) error {
if funnel.ID.IsZero() {
funnel.ID = valuer.GenerateUUID()
}
if funnel.CreatedAt.IsZero() {
funnel.CreatedAt = time.Now()
}
if funnel.UpdatedAt.IsZero() {
funnel.UpdatedAt = time.Now()
}
_, err := store.
sqlstore.
BunDB().
NewInsert().
Model(funnel).
Exec(ctx)
if err != nil {
return fmt.Errorf("failed to create funnel: %v", err)
}
if funnel.CreatedByUser != nil {
_, err = store.sqlstore.BunDB().NewUpdate().
Model(funnel).
Set("created_by = ?", funnel.CreatedByUser.ID).
Where("id = ?", funnel.ID).
Exec(ctx)
if err != nil {
return fmt.Errorf("failed to update funnel user relationship: %v", err)
}
}
return nil
}
// Get retrieves a funnel by ID
func (store *store) Get(ctx context.Context, uuid valuer.UUID) (*traceFunnels.Funnel, error) {
funnel := &traceFunnels.Funnel{}
err := store.
sqlstore.
BunDB().
NewSelect().
Model(funnel).
Relation("CreatedByUser").
Where("?TableAlias.id = ?", uuid).
Scan(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get funnel: %v", err)
}
return funnel, nil
}
// Update updates an existing funnel
func (store *store) Update(ctx context.Context, funnel *traceFunnels.Funnel) error {
// UpdateSteps the updated_at timestamp
funnel.UpdatedAt = time.Now()
_, err := store.
sqlstore.
BunDB().
NewUpdate().
Model(funnel).
WherePK().
Exec(ctx)
if err != nil {
return fmt.Errorf("failed to update funnel: %v", err)
}
return nil
}
// List retrieves all funnels
func (store *store) List(ctx context.Context) ([]*traceFunnels.Funnel, error) {
var funnels []*traceFunnels.Funnel
err := store.
sqlstore.
BunDB().
NewSelect().
Model(&funnels).
Relation("CreatedByUser").
Scan(ctx)
if err != nil {
return nil, fmt.Errorf("failed to list funnels: %v", err)
}
return funnels, nil
}
// Delete removes a funnel by ID
func (store *store) Delete(ctx context.Context, uuid valuer.UUID) error {
_, err := store.
sqlstore.
BunDB().
NewDelete().
Model((*traceFunnels.Funnel)(nil)).
Where("id = ?", uuid).Exec(ctx)
if err != nil {
return fmt.Errorf("failed to delete funnel: %v", err)
}
return nil
}
// ListByOrg retrieves all funnels for a specific organization
//func (store *store) ListByOrg(ctx context.Context, orgID valuer.UUID) ([]*traceFunnels.Funnel, error) {
// var funnels []*traceFunnels.Funnel
// err := store.
// sqlstore.
// BunDB().
// NewSelect().
// Model(&funnels).
// Relation("CreatedByUser").
// Where("org_id = ?", orgID).
// Scan(ctx)
// if err != nil {
// return nil, fmt.Errorf("failed to list funnels by org: %v", err)
// }
// return funnels, nil
//}
// GetByIDAndOrg retrieves a funnel by ID and organization ID
//func (store *store) GetByIDAndOrg(ctx context.Context, id, orgID valuer.UUID) (*traceFunnels.Funnel, error) {
// funnel := &traceFunnels.Funnel{}
// err := store.
// sqlstore.
// BunDB().
// NewSelect().
// Model(funnel).
// Relation("CreatedByUser").
// Where("?TableAlias.id = ? AND ?TableAlias.org_id = ?", id, orgID).
// Scan(ctx)
// if err != nil {
// return nil, fmt.Errorf("failed to get funnel by ID and org: %v", err)
// }
// return funnel, nil
//}
// UpdateSteps updates the steps of a funnel
//func (store *store) UpdateSteps(ctx context.Context, funnelID valuer.UUID, steps []traceFunnels.FunnelStep) error {
// _, err := store.
// sqlstore.
// BunDB().
// NewUpdate().
// Model((*traceFunnels.Funnel)(nil)).
// Set("steps = ?", steps).
// Where("id = ?", funnelID).
// Exec(ctx)
// if err != nil {
// return fmt.Errorf("failed to update funnel steps: %v", err)
// }
// return nil
//}
// UpdateMetadata updates the metadata of a funnel
//func (store *store) UpdateMetadata(ctx context.Context, funnelID valuer.UUID, name, description string, userID string) error {
//
// // First get the current funnel to preserve other fields
// funnel := &traceFunnels.Funnel{}
// err := store.
// sqlstore.
// BunDB().
// NewSelect().
// Model(funnel).
// Where("id = ?", funnelID).
// Scan(ctx)
// if err != nil {
// return fmt.Errorf("failed to get funnel: %v", err)
// }
//
// // UpdateSteps the fields
// funnel.Name = name
// funnel.Description = description
// funnel.UpdatedAt = time.Now()
// funnel.UpdatedBy = userID
//
// // Save the updated funnel
// _, err = store.
// sqlstore.
// BunDB().
// NewUpdate().
// Model(funnel).
// WherePK().
// Exec(ctx)
// if err != nil {
// return fmt.Errorf("failed to update funnel metadata: %v", err)
// }
//
// // Verify the update
// updatedFunnel := &traceFunnels.Funnel{}
// err = store.
// sqlstore.
// BunDB().
// NewSelect().
// Model(updatedFunnel).
// Where("id = ?", funnelID).
// Scan(ctx)
// if err != nil {
// return fmt.Errorf("failed to verify update: %v", err)
// }
//
// return nil
//}

View File

@@ -0,0 +1,442 @@
package tracefunnel
import (
"fmt"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
tracefunnel "github.com/SigNoz/signoz/pkg/types/tracefunnel"
"strings"
)
// GetSlowestTraces builds a ClickHouse query to get the slowest traces between two steps
func GetSlowestTraces(funnel *tracefunnel.Funnel, stepAOrder, stepBOrder int64, timeRange tracefunnel.TimeRange, withErrors bool) (*v3.ClickHouseQuery, error) {
// Find steps by order
var stepA, stepB *tracefunnel.FunnelStep
for i := range funnel.Steps {
if funnel.Steps[i].Order == stepAOrder {
stepA = &funnel.Steps[i]
}
if funnel.Steps[i].Order == stepBOrder {
stepB = &funnel.Steps[i]
}
}
if stepA == nil || stepB == nil {
return nil, fmt.Errorf("step not found")
}
// Build having clause based on withErrors flag
havingClause := ""
if withErrors {
havingClause = "HAVING has_error = 1"
}
// Build filter strings for each step
stepAFilters := ""
if stepA.Filters != nil && len(stepA.Filters.Items) > 0 {
// ToDO: need to implement where clause filtering with minimal code duplication
stepAFilters = "/* Custom filters for step A would be applied here */"
}
stepBFilters := ""
if stepB.Filters != nil && len(stepB.Filters.Items) > 0 {
// ToDO: need to implement where clause filtering with minimal code duplication
stepBFilters = "/* Custom filters for step B would be applied here */"
}
query := fmt.Sprintf(`
WITH
toUInt64(%d) AS start_time,
toUInt64(%d) AS end_time,
toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart,
toString(intDiv(end_time, 1000000000)) AS tsBucketEnd
SELECT
trace_id,
concat(toString((max_end_time_ns - min_start_time_ns) / 1e6), ' ms') AS duration_ms,
COUNT(*) AS span_count
FROM (
SELECT
s1.trace_id,
MIN(toUnixTimestamp64Nano(s1.timestamp)) AS min_start_time_ns,
MAX(toUnixTimestamp64Nano(s2.timestamp) + s2.duration_nano) AS max_end_time_ns,
MAX(s1.has_error OR s2.has_error) AS has_error
FROM %s AS s1
JOIN %s AS s2
ON s1.trace_id = s2.trace_id
WHERE s1.resource_string_service$$name = '%s'
AND s1.name = '%s'
AND s2.resource_string_service$$name = '%s'
AND s2.name = '%s'
AND s1.timestamp BETWEEN toString(start_time) AND toString(end_time)
AND s1.ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
AND s2.timestamp BETWEEN toString(start_time) AND toString(end_time)
AND s2.ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
%s
%s
GROUP BY s1.trace_id
%s
) AS trace_durations
JOIN %s AS spans
ON spans.trace_id = trace_durations.trace_id
WHERE spans.timestamp BETWEEN toString(start_time) AND toString(end_time)
AND spans.ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
GROUP BY trace_id, duration_ms
ORDER BY CAST(replaceRegexpAll(duration_ms, ' ms$', '') AS Float64) DESC
LIMIT 5`,
timeRange.StartTime,
timeRange.EndTime,
TracesTable,
TracesTable,
escapeString(stepA.ServiceName),
escapeString(stepA.SpanName),
escapeString(stepB.ServiceName),
escapeString(stepB.SpanName),
stepAFilters,
stepBFilters,
havingClause,
TracesTable,
)
return &v3.ClickHouseQuery{
Query: query,
}, nil
}
// GetStepAnalytics builds a ClickHouse query to get analytics for each step
func GetStepAnalytics(funnel *tracefunnel.Funnel, timeRange tracefunnel.TimeRange) (*v3.ClickHouseQuery, error) {
if len(funnel.Steps) == 0 {
return nil, fmt.Errorf("funnel has no steps")
}
// Build funnel steps array
var steps []string
for _, step := range funnel.Steps {
steps = append(steps, fmt.Sprintf("('%s', '%s')",
escapeString(step.ServiceName), escapeString(step.SpanName)))
}
stepsArray := fmt.Sprintf("array(%s)", strings.Join(steps, ","))
// Build step CTEs
var stepCTEs []string
for i, step := range funnel.Steps {
filterStr := ""
if step.Filters != nil && len(step.Filters.Items) > 0 {
// ToDO: need to implement where clause filtering with minimal code duplication
filterStr = "/* Custom filters would be applied here */"
}
cte := fmt.Sprintf(`
step%d_traces AS (
SELECT DISTINCT trace_id
FROM %s
WHERE resource_string_service$$name = '%s'
AND name = '%s'
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
%s
)`,
i+1,
TracesTable,
escapeString(step.ServiceName),
escapeString(step.SpanName),
filterStr,
)
stepCTEs = append(stepCTEs, cte)
}
// Build intersecting traces CTE
var intersections []string
for i := 1; i <= len(funnel.Steps); i++ {
intersections = append(intersections, fmt.Sprintf("SELECT trace_id FROM step%d_traces", i))
}
intersectingTracesCTE := fmt.Sprintf(`
intersecting_traces AS (
%s
)`,
strings.Join(intersections, "\nINTERSECT\n"),
)
// Build CASE expressions for each step
var caseExpressions []string
for i, step := range funnel.Steps {
totalSpansExpr := fmt.Sprintf(`
COUNT(CASE WHEN resource_string_service$$name = '%s'
AND name = '%s'
THEN trace_id END) AS total_s%d_spans`,
escapeString(step.ServiceName), escapeString(step.SpanName), i+1)
erroredSpansExpr := fmt.Sprintf(`
COUNT(CASE WHEN resource_string_service$$name = '%s'
AND name = '%s'
AND has_error = true
THEN trace_id END) AS total_s%d_errored_spans`,
escapeString(step.ServiceName), escapeString(step.SpanName), i+1)
caseExpressions = append(caseExpressions, totalSpansExpr, erroredSpansExpr)
}
query := fmt.Sprintf(`
WITH
toUInt64(%d) AS start_time,
toUInt64(%d) AS end_time,
toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart,
toString(intDiv(end_time, 1000000000)) AS tsBucketEnd,
%s AS funnel_steps,
%s,
%s
SELECT
%s
FROM %s
WHERE trace_id IN (SELECT trace_id FROM intersecting_traces)
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd`,
timeRange.StartTime,
timeRange.EndTime,
stepsArray,
strings.Join(stepCTEs, ",\n"),
intersectingTracesCTE,
strings.Join(caseExpressions, ",\n "),
TracesTable,
)
return &v3.ClickHouseQuery{
Query: query,
}, nil
}
// ValidateTracesWithLatency builds a ClickHouse query to validate traces with latency information
func ValidateTracesWithLatency(funnel *tracefunnel.Funnel, timeRange tracefunnel.TimeRange) (*v3.ClickHouseQuery, error) {
filters, err := buildFunnelFiltersWithLatency(funnel)
if err != nil {
return nil, fmt.Errorf("error building funnel filters with latency: %w", err)
}
query := generateFunnelSQLWithLatency(timeRange.StartTime, timeRange.EndTime, filters)
return &v3.ClickHouseQuery{
Query: query,
}, nil
}
func generateFunnelSQLWithLatency(start, end int64, filters []tracefunnel.FunnelStepFilter) string {
var expressions []string
// Convert timestamps to nanoseconds
startTime := fmt.Sprintf("toUInt64(%d)", start)
endTime := fmt.Sprintf("toUInt64(%d)", end)
expressions = append(expressions, fmt.Sprintf("%s AS start_time", startTime))
expressions = append(expressions, fmt.Sprintf("%s AS end_time", endTime))
expressions = append(expressions, "toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart")
expressions = append(expressions, "toString(intDiv(end_time, 1000000000)) AS tsBucketEnd")
expressions = append(expressions, "(end_time - start_time) / 1e9 AS total_time_seconds")
// Define step configurations dynamically
for _, f := range filters {
expressions = append(expressions, fmt.Sprintf("('%s', '%s') AS s%d_config",
escapeString(f.ServiceName),
escapeString(f.SpanName),
f.StepNumber))
}
withClause := "WITH \n" + strings.Join(expressions, ",\n") + "\n"
// Build step raw expressions and cumulative logic
var stepRaws []string
var cumulativeLogic []string
var filterConditions []string
stepCount := len(filters)
// Build raw step detection
for i := 1; i <= stepCount; i++ {
stepRaws = append(stepRaws, fmt.Sprintf(
"MAX(CASE WHEN (resource_string_service$$name, name) = s%d_config THEN 1 ELSE 0 END) AS has_s%d_raw", i, i))
filterConditions = append(filterConditions, fmt.Sprintf("s%d_config", i))
}
// Build cumulative IF logic
for i := 1; i <= stepCount; i++ {
if i == 1 {
cumulativeLogic = append(cumulativeLogic, fmt.Sprintf(`
IF(MAX(CASE WHEN (resource_string_service$$name, name) = s1_config THEN 1 ELSE 0 END) = 1, 1, 0) AS has_s1`))
} else {
innerIf := "IF(MAX(CASE WHEN (resource_string_service$$name, name) = s1_config THEN 1 ELSE 0 END) = 1, 1, 0)"
for j := 2; j < i; j++ {
innerIf = fmt.Sprintf(`IF(%s = 1 AND MAX(CASE WHEN (resource_string_service$$name, name) = s%d_config THEN 1 ELSE 0 END) = 1, 1, 0)`, innerIf, j)
}
cumulativeLogic = append(cumulativeLogic, fmt.Sprintf(`
IF(
%s = 1 AND MAX(CASE WHEN (resource_string_service$$name, name) = s%d_config THEN 1 ELSE 0 END) = 1,
1, 0
) AS has_s%d`, innerIf, i, i))
}
}
// Final SELECT counts using FILTER clauses
var stepCounts []string
for i := 1; i <= stepCount; i++ {
stepCounts = append(stepCounts, fmt.Sprintf("COUNT(DISTINCT trace_id) FILTER (WHERE has_s%d = 1) AS step%d_count", i, i))
}
// Final query assembly
lastStep := fmt.Sprint(stepCount)
query := withClause + `
SELECT
` + strings.Join(stepCounts, ",\n ") + `,
IF(total_time_seconds = 0 OR COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1) = 0, 0,
COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1) / total_time_seconds
) AS avg_rate,
COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1 AND has_error = true) AS errors,
IF(COUNT(*) = 0, 0, avg(trace_duration)) AS avg_duration,
IF(COUNT(*) = 0, 0, quantile(0.99)(trace_duration)) AS p99_latency,
IF(COUNT(DISTINCT trace_id) FILTER (WHERE has_s1 = 1) = 0, 0,
100.0 * COUNT(DISTINCT trace_id) FILTER (WHERE has_s` + lastStep + ` = 1) /
COUNT(DISTINCT trace_id) FILTER (WHERE has_s1 = 1)
) AS conversion_rate
FROM (
SELECT
trace_id,
MAX(has_error) AS has_error,
` + strings.Join(stepRaws, ",\n ") + `,
MAX(toUnixTimestamp64Nano(timestamp) + duration_nano) - MIN(toUnixTimestamp64Nano(timestamp)) AS trace_duration,
` + strings.Join(cumulativeLogic, ",\n ") + `
FROM ` + TracesTable + `
WHERE
timestamp BETWEEN toString(start_time) AND toString(end_time)
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
AND (resource_string_service$$name, name) IN (` + strings.Join(filterConditions, ", ") + `)
GROUP BY trace_id
) AS funnel_data;`
return query
}
func buildFunnelFiltersWithLatency(funnel *tracefunnel.Funnel) ([]tracefunnel.FunnelStepFilter, error) {
if funnel == nil {
return nil, fmt.Errorf("funnel cannot be nil")
}
if len(funnel.Steps) == 0 {
return nil, fmt.Errorf("funnel must have at least one step")
}
filters := make([]tracefunnel.FunnelStepFilter, len(funnel.Steps))
for i, step := range funnel.Steps {
latencyPointer := "start" // Default value
if step.LatencyPointer != "" {
latencyPointer = step.LatencyPointer
}
filters[i] = tracefunnel.FunnelStepFilter{
StepNumber: i + 1,
ServiceName: step.ServiceName,
SpanName: step.SpanName,
LatencyPointer: latencyPointer,
CustomFilters: step.Filters,
}
}
return filters, nil
}
func buildFunnelFilters(funnel *tracefunnel.Funnel) ([]tracefunnel.FunnelStepFilter, error) {
if funnel == nil {
return nil, fmt.Errorf("funnel cannot be nil")
}
if len(funnel.Steps) == 0 {
return nil, fmt.Errorf("funnel must have at least one step")
}
filters := make([]tracefunnel.FunnelStepFilter, len(funnel.Steps))
for i, step := range funnel.Steps {
filters[i] = tracefunnel.FunnelStepFilter{
StepNumber: i + 1,
ServiceName: step.ServiceName,
SpanName: step.SpanName,
CustomFilters: step.Filters,
}
}
return filters, nil
}
func escapeString(s string) string {
// Replace single quotes with double single quotes to escape them in SQL
return strings.ReplaceAll(s, "'", "''")
}
const TracesTable = "signoz_traces.signoz_index_v3"
func generateFunnelSQL(start, end int64, filters []tracefunnel.FunnelStepFilter) string {
var expressions []string
// Basic time expressions.
expressions = append(expressions, fmt.Sprintf("toUInt64(%d) AS start_time", start))
expressions = append(expressions, fmt.Sprintf("toUInt64(%d) AS end_time", end))
expressions = append(expressions, "toString(intDiv(start_time, 1000000000) - 1800) AS tsBucketStart")
expressions = append(expressions, "toString(intDiv(end_time, 1000000000)) AS tsBucketEnd")
// Add service and span alias definitions from each filter.
for _, f := range filters {
expressions = append(expressions, fmt.Sprintf("'%s' AS service_%d", escapeString(f.ServiceName), f.StepNumber))
expressions = append(expressions, fmt.Sprintf("'%s' AS span_%d", escapeString(f.SpanName), f.StepNumber))
}
// Add the CTE for each step.
for _, f := range filters {
cte := fmt.Sprintf(`step%d_traces AS (
SELECT DISTINCT trace_id
FROM %s
WHERE serviceName = service_%d
AND name = span_%d
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
)`, f.StepNumber, TracesTable, f.StepNumber, f.StepNumber)
expressions = append(expressions, cte)
}
withClause := "WITH \n" + strings.Join(expressions, ",\n") + "\n"
// Build the intersect clause for each step.
var intersectQueries []string
for _, f := range filters {
intersectQueries = append(intersectQueries, fmt.Sprintf("SELECT trace_id FROM step%d_traces", f.StepNumber))
}
intersectClause := strings.Join(intersectQueries, "\nINTERSECT\n")
query := withClause + `
SELECT trace_id
FROM ` + TracesTable + `
WHERE trace_id IN (
` + intersectClause + `
)
AND timestamp BETWEEN toString(start_time) AND toString(end_time)
AND ts_bucket_start BETWEEN tsBucketStart AND tsBucketEnd
GROUP BY trace_id
LIMIT 5
`
return query
}
// ValidateTraces builds a ClickHouse query to validate traces in a funnel
func ValidateTraces(funnel *tracefunnel.Funnel, timeRange tracefunnel.TimeRange) (*v3.ClickHouseQuery, error) {
filters, err := buildFunnelFilters(funnel)
if err != nil {
return nil, fmt.Errorf("error building funnel filters: %w", err)
}
query := generateFunnelSQL(timeRange.StartTime, timeRange.EndTime, filters)
return &v3.ClickHouseQuery{
Query: query,
}, nil
}

View File

@@ -0,0 +1,67 @@
package tracefunnel
import (
"context"
"net/http"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
)
// Module defines the interface for trace funnel operations
type Module interface {
// operations on funnel
Create(ctx context.Context, timestamp int64, name string, userID string, orgID string) (*traceFunnels.Funnel, error)
Get(ctx context.Context, funnelID string) (*traceFunnels.Funnel, error)
Update(ctx context.Context, funnel *traceFunnels.Funnel, userID string) error
List(ctx context.Context, orgID string) ([]*traceFunnels.Funnel, error)
Delete(ctx context.Context, funnelID string) error
Save(ctx context.Context, funnel *traceFunnels.Funnel, userID string, orgID string) error
GetFunnelMetadata(ctx context.Context, funnelID string) (int64, int64, string, error)
//
//GetFunnelAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error)
//
//GetStepAnalytics(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) (*traceFunnels.FunnelAnalytics, error)
//
//GetSlowestTraces(ctx context.Context, funnel *traceFunnels.Funnel, stepAOrder, stepBOrder int64, timeRange traceFunnels.TimeRange, isError bool) (*traceFunnels.ValidTracesResponse, error)
// updates funnel metadata
//UpdateMetadata(ctx context.Context, funnelID valuer.UUID, name, description string, userID string) error
// validates funnel
//ValidateTraces(ctx context.Context, funnel *traceFunnels.Funnel, timeRange traceFunnels.TimeRange) ([]*v3.Row, error)
}
type Handler interface {
// CRUD on funnel
New(http.ResponseWriter, *http.Request)
UpdateSteps(http.ResponseWriter, *http.Request)
UpdateFunnel(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
Save(http.ResponseWriter, *http.Request)
// validator handlers
//ValidateTraces(http.ResponseWriter, *http.Request)
//
//// Analytics handlers
//FunnelAnalytics(http.ResponseWriter, *http.Request)
//
//StepAnalytics(http.ResponseWriter, *http.Request)
//
//SlowestTraces(http.ResponseWriter, *http.Request)
//
//ErrorTraces(http.ResponseWriter, *http.Request)
}

View File

@@ -0,0 +1,171 @@
package tracefunnel
import (
"fmt"
tracefunnel "github.com/SigNoz/signoz/pkg/types/tracefunnel"
"sort"
)
// ValidateTimestamp validates a timestamp
func ValidateTimestamp(timestamp int64, fieldName string) error {
if timestamp == 0 {
return fmt.Errorf("%s is required", fieldName)
}
if timestamp < 0 {
return fmt.Errorf("%s must be positive", fieldName)
}
return nil
}
// ValidateTimestampIsMilliseconds validates that a timestamp is in milliseconds
func ValidateTimestampIsMilliseconds(timestamp int64) bool {
// Check if timestamp is in milliseconds (13 digits)
return timestamp >= 1000000000000 && timestamp <= 9999999999999
}
// ValidateFunnelSteps validates funnel steps
func ValidateFunnelSteps(steps []tracefunnel.FunnelStep) error {
if len(steps) < 2 {
return fmt.Errorf("funnel must have at least 2 steps")
}
for i, step := range steps {
if step.ServiceName == "" {
return fmt.Errorf("step %d: service name is required", i+1)
}
if step.SpanName == "" {
return fmt.Errorf("step %d: span name is required", i+1)
}
if step.Order < 0 {
return fmt.Errorf("step %d: order must be non-negative", i+1)
}
}
return nil
}
// NormalizeFunnelSteps normalizes step orders to be sequential
func NormalizeFunnelSteps(steps []tracefunnel.FunnelStep) []tracefunnel.FunnelStep {
// Sort steps by order
sort.Slice(steps, func(i, j int) bool {
return steps[i].Order < steps[j].Order
})
// Normalize orders to be sequential
for i := range steps {
steps[i].Order = int64(i + 1)
}
return steps
}
//// ValidateSteps checks if the requested steps exist in the funnel
//func ValidateSteps(funnel *tracefunnel.Funnel, stepAOrder, stepBOrder int64) error {
// stepAExists, stepBExists := false, false
// for _, step := range funnel.Steps {
// if step.Order == stepAOrder {
// stepAExists = true
// }
// if step.Order == stepBOrder {
// stepBExists = true
// }
// }
//
// if !stepAExists || !stepBExists {
// return fmt.Errorf("one or both steps not found. Step A Order: %d, Step B Order: %d", stepAOrder, stepBOrder)
// }
//
// return nil
//}
//// ValidateFunnel validates a funnel's data
//func ValidateFunnel(funnel *tracefunnel.Funnel) error {
// if funnel == nil {
// return fmt.Errorf("funnel cannot be nil")
// }
//
// if len(funnel.Steps) < 2 {
// return fmt.Errorf("funnel must have at least 2 steps")
// }
//
// // Validate each step
// for i, step := range funnel.Steps {
// if err := ValidateStep(step, i+1); err != nil {
// return err
// }
// }
//
// return nil
//}
// ValidateStep validates a single funnel step
//func ValidateStep(step tracefunnel.FunnelStep, stepNum int) error {
// if step.ServiceName == "" {
// return fmt.Errorf("step %d: service name is required", stepNum)
// }
//
// if step.SpanName == "" {
// return fmt.Errorf("step %d: span name is required", stepNum)
// }
//
// if step.Order < 0 {
// return fmt.Errorf("step %d: order must be non-negative", stepNum)
// }
//
// return nil
//}
//
//// ValidateTimeRange validates a time range
//func ValidateTimeRange(timeRange tracefunnel.TimeRange) error {
// if timeRange.StartTime <= 0 {
// return fmt.Errorf("start time must be positive")
// }
//
// if timeRange.EndTime <= 0 {
// return fmt.Errorf("end time must be positive")
// }
//
// if timeRange.EndTime < timeRange.StartTime {
// return fmt.Errorf("end time must be after start time")
// }
//
// // Check if the time range is not too far in the future
// now := time.Now().UnixNano() / 1000000 // Convert to milliseconds
// if timeRange.EndTime > now {
// return fmt.Errorf("end time cannot be in the future")
// }
//
// // Check if the time range is not too old (e.g., more than 30 days)
// maxAge := int64(30 * 24 * 60 * 60 * 1000) // 30 days in milliseconds
// if now-timeRange.StartTime > maxAge {
// return fmt.Errorf("time range cannot be older than 30 days")
// }
//
// return nil
//}
//
//// ValidateStepOrder validates that step orders are sequential
//func ValidateStepOrder(steps []tracefunnel.FunnelStep) error {
// if len(steps) < 2 {
// return nil
// }
//
// // Create a map to track used orders
// usedOrders := make(map[int64]bool)
//
// for i, step := range steps {
// if usedOrders[step.Order] {
// return fmt.Errorf("duplicate step order %d at step %d", step.Order, i+1)
// }
// usedOrders[step.Order] = true
// }
//
// // Check if orders are sequential
// for i := 0; i < len(steps)-1; i++ {
// if steps[i+1].Order != steps[i].Order+1 {
// return fmt.Errorf("step orders must be sequential")
// }
// }
//
// return nil
//}

View File

@@ -17,6 +17,8 @@ const (
const (
defaultTraceDB string = "signoz_traces"
defaultOperationsTable string = "distributed_signoz_operations"
defaultIndexTable string = "distributed_signoz_index_v2"
defaultLocalIndexTable string = "signoz_index_v2"
defaultErrorTable string = "distributed_signoz_error_index_v2"
defaultDurationTable string = "distributed_durationSort"
@@ -57,10 +59,19 @@ type namespaceConfig struct {
Enabled bool
Datasource string
TraceDB string
ErrorTable string
OperationsTable string
IndexTable string
LocalIndexTable string
DurationTable string
UsageExplorerTable string
SpansTable string
ErrorTable string
SpanAttributeTableV2 string
SpanAttributeKeysTable string
DependencyGraphTable string
TopLevelOperationsTable string
LogsDB string
LogsTable string
LogsLocalTable string
LogsAttributeKeysTable string
LogsResourceKeysTable string
@@ -71,7 +82,6 @@ type namespaceConfig struct {
Encoding Encoding
Connector Connector
LogsDB string
LogsLocalTableV2 string
LogsTableV2 string
LogsResourceLocalTableV2 string

View File

@@ -12,6 +12,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/prometheus"
@@ -36,7 +37,9 @@ import (
"go.uber.org/zap"
queryprogress "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader/query_progress"
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
"github.com/SigNoz/signoz/pkg/query-service/app/resource"
"github.com/SigNoz/signoz/pkg/query-service/app/services"
"github.com/SigNoz/signoz/pkg/query-service/app/traces/smart"
"github.com/SigNoz/signoz/pkg/query-service/app/traces/tracedetail"
"github.com/SigNoz/signoz/pkg/query-service/common"
@@ -115,15 +118,24 @@ type ClickHouseReader struct {
prometheus prometheus.Prometheus
sqlDB sqlstore.SQLStore
TraceDB string
operationsTable string
durationTable string
indexTable string
errorTable string
usageExplorerTable string
SpansTable string
spanAttributeTableV2 string
spanAttributesKeysTable string
dependencyGraphTable string
topLevelOperationsTable string
logsDB string
logsTable string
logsLocalTable string
logsAttributeKeys string
logsResourceKeys string
logsTagAttributeTableV2 string
queryProgressTracker queryprogress.QueryProgressTracker
logsDB string
logsTableV2 string
logsLocalTableV2 string
logsResourceTableV2 string
@@ -178,10 +190,19 @@ func NewReaderFromClickhouseConnection(
prometheus: prometheus,
sqlDB: sqlDB,
TraceDB: options.primary.TraceDB,
operationsTable: options.primary.OperationsTable,
indexTable: options.primary.IndexTable,
errorTable: options.primary.ErrorTable,
usageExplorerTable: options.primary.UsageExplorerTable,
durationTable: options.primary.DurationTable,
SpansTable: options.primary.SpansTable,
spanAttributeTableV2: options.primary.SpanAttributeTableV2,
spanAttributesKeysTable: options.primary.SpanAttributeKeysTable,
dependencyGraphTable: options.primary.DependencyGraphTable,
topLevelOperationsTable: options.primary.TopLevelOperationsTable,
logsDB: options.primary.LogsDB,
logsTable: options.primary.LogsTable,
logsLocalTable: options.primary.LogsLocalTable,
logsAttributeKeys: options.primary.LogsAttributeKeysTable,
logsResourceKeys: options.primary.LogsResourceKeysTable,
logsTagAttributeTableV2: options.primary.LogsTagAttributeTableV2,
@@ -262,6 +283,41 @@ func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, erro
return &services, nil
}
func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, start, end time.Time, services []string) (*map[string][]string, *model.ApiError) {
start = start.In(time.UTC)
// The `top_level_operations` that have `time` >= start
operations := map[string][]string{}
// We can't use the `end` because the `top_level_operations` table has the most recent instances of the operations
// We can only use the `start` time to filter the operations
query := fmt.Sprintf(`SELECT name, serviceName, max(time) as ts FROM %s.%s WHERE time >= @start`, r.TraceDB, r.topLevelOperationsTable)
if len(services) > 0 {
query += ` AND serviceName IN @services`
}
query += ` GROUP BY name, serviceName ORDER BY ts DESC LIMIT 5000`
rows, err := r.db.Query(ctx, query, clickhouse.Named("start", start), clickhouse.Named("services", services))
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
}
defer rows.Close()
for rows.Next() {
var name, serviceName string
var t time.Time
if err := rows.Scan(&name, &serviceName, &t); err != nil {
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error in reading data")}
}
if _, ok := operations[serviceName]; !ok {
operations[serviceName] = []string{"overflow_operation"}
}
operations[serviceName] = append(operations[serviceName], name)
}
return &operations, nil
}
func (r *ClickHouseReader) buildResourceSubQuery(tags []model.TagQueryParam, svc string, start, end time.Time) (string, error) {
// assuming all will be resource attributes.
// and resource attributes are string for traces
@@ -321,6 +377,131 @@ func (r *ClickHouseReader) buildResourceSubQuery(tags []model.TagQueryParam, svc
return resourceSubQuery, nil
}
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) {
if r.indexTable == "" {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
}
topLevelOps, apiErr := r.GetTopLevelOperations(ctx, *queryParams.Start, *queryParams.End, nil)
if apiErr != nil {
return nil, apiErr
}
serviceItems := []model.ServiceItem{}
var wg sync.WaitGroup
// limit the number of concurrent queries to not overload the clickhouse server
sem := make(chan struct{}, 10)
var mtx sync.RWMutex
for svc, ops := range *topLevelOps {
sem <- struct{}{}
wg.Add(1)
go func(svc string, ops []string) {
defer wg.Done()
defer func() { <-sem }()
var serviceItem model.ServiceItem
var numErrors uint64
// Even if the total number of operations within the time range is less and the all
// the top level operations are high, we want to warn to let user know the issue
// with the instrumentation
serviceItem.DataWarning = model.DataWarning{
TopLevelOps: (*topLevelOps)[svc],
}
// default max_query_size = 262144
// Let's assume the average size of the item in `ops` is 50 bytes
// We can have 262144/50 = 5242 items in the `ops` array
// Although we have make it as big as 5k, We cap the number of items
// in the `ops` array to 1500
ops = ops[:int(math.Min(1500, float64(len(ops))))]
query := fmt.Sprintf(
`SELECT
quantile(0.99)(durationNano) as p99,
avg(durationNano) as avgDuration,
count(*) as numCalls
FROM %s.%s
WHERE serviceName = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end`,
r.TraceDB, r.traceTableName,
)
errorQuery := fmt.Sprintf(
`SELECT
count(*) as numErrors
FROM %s.%s
WHERE serviceName = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
r.TraceDB, r.traceTableName,
)
args := []interface{}{}
args = append(args,
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("serviceName", svc),
clickhouse.Named("names", ops),
)
resourceSubQuery, err := r.buildResourceSubQuery(queryParams.Tags, svc, *queryParams.Start, *queryParams.End)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
query += `
AND (
resource_fingerprint GLOBAL IN ` +
resourceSubQuery +
`) AND ts_bucket_start >= @start_bucket AND ts_bucket_start <= @end_bucket`
args = append(args,
clickhouse.Named("start_bucket", strconv.FormatInt(queryParams.Start.Unix()-1800, 10)),
clickhouse.Named("end_bucket", strconv.FormatInt(queryParams.End.Unix(), 10)),
)
err = r.db.QueryRow(
ctx,
query,
args...,
).ScanStruct(&serviceItem)
if serviceItem.NumCalls == 0 {
return
}
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
errorQuery += `
AND (
resource_fingerprint GLOBAL IN ` +
resourceSubQuery +
`) AND ts_bucket_start >= @start_bucket AND ts_bucket_start <= @end_bucket`
err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
serviceItem.ServiceName = svc
serviceItem.NumErrors = numErrors
mtx.Lock()
serviceItems = append(serviceItems, serviceItem)
mtx.Unlock()
}(svc, ops)
}
wg.Wait()
for idx := range serviceItems {
serviceItems[idx].CallRate = float64(serviceItems[idx].NumCalls) / float64(queryParams.Period)
serviceItems[idx].ErrorRate = float64(serviceItems[idx].NumErrors) * 100 / float64(serviceItems[idx].NumCalls)
}
return &serviceItems, nil
}
func getStatusFilters(query string, statusParams []string, excludeMap map[string]struct{}) string {
// status can only be two and if both are selected than they are equivalent to none selected
if _, ok := excludeMap["status"]; ok {
@@ -499,6 +680,7 @@ func addExistsOperator(item model.TagQuery, tagMapType string, not bool) (string
}
func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError) {
namedArgs := []interface{}{
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
@@ -552,6 +734,42 @@ func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *mo
return &topOperationsItems, nil
}
func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetUsageParams) (*[]model.UsageItem, error) {
var usageItems []model.UsageItem
namedArgs := []interface{}{
clickhouse.Named("interval", queryParams.StepHour),
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
}
var query string
if len(queryParams.ServiceName) != 0 {
namedArgs = append(namedArgs, clickhouse.Named("serviceName", queryParams.ServiceName))
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE service_name=@serviceName AND timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.TraceDB, r.usageExplorerTable)
} else {
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL @interval HOUR) as time, sum(count) as count FROM %s.%s WHERE timestamp>=@start AND timestamp<=@end GROUP BY time ORDER BY time ASC", r.TraceDB, r.usageExplorerTable)
}
err := r.db.Select(ctx, &usageItems, query, namedArgs...)
zap.L().Info(query)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, fmt.Errorf("error in processing sql query")
}
for i := range usageItems {
usageItems[i].Timestamp = uint64(usageItems[i].Time.UnixNano())
}
if usageItems == nil {
usageItems = []model.UsageItem{}
}
return &usageItems, nil
}
func (r *ClickHouseReader) GetSpansForTrace(ctx context.Context, traceID string, traceDetailsQuery string) ([]model.SpanItemV2, *model.ApiError) {
var traceSummary model.TraceSummary
summaryQuery := fmt.Sprintf("SELECT * from %s.%s WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
@@ -934,6 +1152,54 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, trace
return trace, nil
}
func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) {
response := []model.ServiceMapDependencyResponseItem{}
args := []interface{}{}
args = append(args,
clickhouse.Named("start", uint64(queryParams.Start.Unix())),
clickhouse.Named("end", uint64(queryParams.End.Unix())),
clickhouse.Named("duration", uint64(queryParams.End.Unix()-queryParams.Start.Unix())),
)
query := fmt.Sprintf(`
WITH
quantilesMergeState(0.5, 0.75, 0.9, 0.95, 0.99)(duration_quantiles_state) AS duration_quantiles_state,
finalizeAggregation(duration_quantiles_state) AS result
SELECT
src as parent,
dest as child,
result[1] AS p50,
result[2] AS p75,
result[3] AS p90,
result[4] AS p95,
result[5] AS p99,
sum(total_count) as callCount,
sum(total_count)/ @duration AS callRate,
sum(error_count)/sum(total_count) * 100 as errorRate
FROM %s.%s
WHERE toUInt64(toDateTime(timestamp)) >= @start AND toUInt64(toDateTime(timestamp)) <= @end`,
r.TraceDB, r.dependencyGraphTable,
)
tags := createTagQueryFromTagQueryParams(queryParams.Tags)
filterQuery, filterArgs := services.BuildServiceMapQuery(tags)
query += filterQuery + " GROUP BY src, dest;"
args = append(args, filterArgs...)
zap.L().Debug("GetDependencyGraph query", zap.String("query", query), zap.Any("args", args))
err := r.db.Select(ctx, &response, query, args...)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return nil, fmt.Errorf("error in processing sql query %w", err)
}
return &response, nil
}
func getLocalTableName(tableName string) string {
tableNameSplit := strings.Split(tableName, ".")
@@ -1089,6 +1355,9 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
tableNames := []string{
r.TraceDB + "." + r.traceTableName,
r.TraceDB + "." + r.traceResourceTableV3,
r.TraceDB + "." + signozErrorIndexTable,
r.TraceDB + "." + signozUsageExplorerTable,
r.TraceDB + "." + defaultDependencyGraphTable,
r.TraceDB + "." + r.traceSummaryTable,
}
@@ -2439,6 +2708,218 @@ func (r *ClickHouseReader) UpdateTraceField(ctx context.Context, field *model.Up
return nil
}
func (r *ClickHouseReader) GetLogs(ctx context.Context, params *model.LogsFilterParams) (*[]model.SignozLog, *model.ApiError) {
response := []model.SignozLog{}
fields, apiErr := r.GetLogFields(ctx)
if apiErr != nil {
return nil, apiErr
}
isPaginatePrev := logs.CheckIfPrevousPaginateAndModifyOrder(params)
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, params)
if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
}
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
claims, errv2 := authtypes.ClaimsFromContext(ctx)
if errv2 == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, claims.Email, true, false)
}
}
query := fmt.Sprintf("%s from %s.%s", constants.LogsSQLSelect, r.logsDB, r.logsTable)
if filterSql != "" {
query = fmt.Sprintf("%s where %s", query, filterSql)
}
query = fmt.Sprintf("%s order by %s %s limit %d", query, params.OrderBy, params.Order, params.Limit)
err = r.db.Select(ctx, &response, query)
if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal}
}
if isPaginatePrev {
// rever the results from db
for i, j := 0, len(response)-1; i < j; i, j = i+1, j-1 {
response[i], response[j] = response[j], response[i]
}
}
return &response, nil
}
func (r *ClickHouseReader) TailLogs(ctx context.Context, client *model.LogsTailClient) {
fields, apiErr := r.GetLogFields(ctx)
if apiErr != nil {
client.Error <- apiErr.Err
return
}
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
Query: client.Filter.Query,
})
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
claims, errv2 := authtypes.ClaimsFromContext(ctx)
if errv2 == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, claims.Email, true, false)
}
}
if err != nil {
client.Error <- err
return
}
query := fmt.Sprintf("%s from %s.%s", constants.LogsSQLSelect, r.logsDB, r.logsTable)
tsStart := uint64(time.Now().UnixNano())
if client.Filter.TimestampStart != 0 {
tsStart = client.Filter.TimestampStart
}
var idStart string
if client.Filter.IdGt != "" {
idStart = client.Filter.IdGt
}
ticker := time.NewTicker(time.Duration(r.liveTailRefreshSeconds) * time.Second)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
done := true
client.Done <- &done
zap.L().Debug("closing go routine : " + client.Name)
return
case <-ticker.C:
// get the new 100 logs as anything more older won't make sense
tmpQuery := fmt.Sprintf("%s where timestamp >='%d'", query, tsStart)
if filterSql != "" {
tmpQuery = fmt.Sprintf("%s and %s", tmpQuery, filterSql)
}
if idStart != "" {
tmpQuery = fmt.Sprintf("%s and id > '%s'", tmpQuery, idStart)
}
tmpQuery = fmt.Sprintf("%s order by timestamp desc, id desc limit 100", tmpQuery)
response := []model.SignozLog{}
err := r.db.Select(ctx, &response, tmpQuery)
if err != nil {
zap.L().Error("Error while getting logs", zap.Error(err))
client.Error <- err
return
}
for i := len(response) - 1; i >= 0; i-- {
select {
case <-ctx.Done():
done := true
client.Done <- &done
zap.L().Debug("closing go routine while sending logs : " + client.Name)
return
default:
client.Logs <- &response[i]
if i == 0 {
tsStart = response[i].Timestamp
idStart = response[i].ID
}
}
}
}
}
}
func (r *ClickHouseReader) AggregateLogs(ctx context.Context, params *model.LogsAggregateParams) (*model.GetLogsAggregatesResponse, *model.ApiError) {
logAggregatesDBResponseItems := []model.LogsAggregatesDBResponseItem{}
function := "toFloat64(count()) as value"
if params.Function != "" {
function = fmt.Sprintf("toFloat64(%s) as value", params.Function)
}
fields, apiErr := r.GetLogFields(ctx)
if apiErr != nil {
return nil, apiErr
}
filterSql, lenFilters, err := logs.GenerateSQLWhere(fields, &model.LogsFilterParams{
Query: params.Query,
})
if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorBadData}
}
data := map[string]interface{}{
"lenFilters": lenFilters,
}
if lenFilters != 0 {
claims, errv2 := authtypes.ClaimsFromContext(ctx)
if errv2 == nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_LOGS_FILTERS, data, claims.Email, true, false)
}
}
query := ""
if params.GroupBy != "" {
query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, toString(%s) as groupBy, "+
"%s "+
"FROM %s.%s WHERE (timestamp >= '%d' AND timestamp <= '%d' )",
params.StepSeconds/60, params.GroupBy, function, r.logsDB, r.logsTable, params.TimestampStart, params.TimestampEnd)
} else {
query = fmt.Sprintf("SELECT toInt64(toUnixTimestamp(toStartOfInterval(toDateTime(timestamp/1000000000), INTERVAL %d minute))*1000000000) as ts_start_interval, "+
"%s "+
"FROM %s.%s WHERE (timestamp >= '%d' AND timestamp <= '%d' )",
params.StepSeconds/60, function, r.logsDB, r.logsTable, params.TimestampStart, params.TimestampEnd)
}
if filterSql != "" {
query = fmt.Sprintf("%s AND ( %s ) ", query, filterSql)
}
if params.GroupBy != "" {
query = fmt.Sprintf("%s GROUP BY ts_start_interval, toString(%s) as groupBy ORDER BY ts_start_interval", query, params.GroupBy)
} else {
query = fmt.Sprintf("%s GROUP BY ts_start_interval ORDER BY ts_start_interval", query)
}
err = r.db.Select(ctx, &logAggregatesDBResponseItems, query)
if err != nil {
return nil, &model.ApiError{Err: err, Typ: model.ErrorInternal}
}
aggregateResponse := model.GetLogsAggregatesResponse{
Items: make(map[int64]model.LogsAggregatesResponseItem),
}
for i := range logAggregatesDBResponseItems {
if elem, ok := aggregateResponse.Items[int64(logAggregatesDBResponseItems[i].Timestamp)]; ok {
if params.GroupBy != "" && logAggregatesDBResponseItems[i].GroupBy != "" {
elem.GroupBy[logAggregatesDBResponseItems[i].GroupBy] = logAggregatesDBResponseItems[i].Value
}
aggregateResponse.Items[logAggregatesDBResponseItems[i].Timestamp] = elem
} else {
if params.GroupBy != "" && logAggregatesDBResponseItems[i].GroupBy != "" {
aggregateResponse.Items[logAggregatesDBResponseItems[i].Timestamp] = model.LogsAggregatesResponseItem{
Timestamp: logAggregatesDBResponseItems[i].Timestamp,
GroupBy: map[string]interface{}{logAggregatesDBResponseItems[i].GroupBy: logAggregatesDBResponseItems[i].Value},
}
} else if params.GroupBy == "" {
aggregateResponse.Items[logAggregatesDBResponseItems[i].Timestamp] = model.LogsAggregatesResponseItem{
Timestamp: logAggregatesDBResponseItems[i].Timestamp,
Value: logAggregatesDBResponseItems[i].Value,
}
}
}
}
return &aggregateResponse, nil
}
func (r *ClickHouseReader) QueryDashboardVars(ctx context.Context, query string) (*model.DashboardVar, error) {
var result = model.DashboardVar{VariableValues: make([]interface{}, 0)}
rows, err := r.db.Query(ctx, query)
@@ -4427,7 +4908,34 @@ func (r *ClickHouseReader) GetTriggersByInterval(ctx context.Context, ruleID str
return result[0], nil
}
func (r *ClickHouseReader) ReportQueryStartForProgressTracking(queryId string) (func(), *model.ApiError) {
func (r *ClickHouseReader) GetMinAndMaxTimestampForTraceID(ctx context.Context, traceID []string) (int64, int64, error) {
var minTime, maxTime time.Time
query := fmt.Sprintf("SELECT min(timestamp), max(timestamp) FROM %s.%s WHERE traceID IN ('%s')",
r.TraceDB, r.SpansTable, strings.Join(traceID, "','"))
zap.L().Debug("GetMinAndMaxTimestampForTraceID", zap.String("query", query))
err := r.db.QueryRow(ctx, query).Scan(&minTime, &maxTime)
if err != nil {
zap.L().Error("Error while executing query", zap.Error(err))
return 0, 0, err
}
// return current time if traceID not found
if minTime.IsZero() || maxTime.IsZero() {
zap.L().Debug("minTime or maxTime is zero, traceID not found")
return time.Now().UnixNano(), time.Now().UnixNano(), nil
}
zap.L().Debug("GetMinAndMaxTimestampForTraceID", zap.Any("minTime", minTime), zap.Any("maxTime", maxTime))
return minTime.UnixNano(), maxTime.UnixNano(), nil
}
func (r *ClickHouseReader) ReportQueryStartForProgressTracking(
queryId string,
) (func(), *model.ApiError) {
return r.queryProgressTracker.ReportQueryStarted(queryId)
}

View File

@@ -23,9 +23,11 @@ import (
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
tracefunnels "github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
"github.com/SigNoz/signoz/pkg/signoz"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/promql"
@@ -49,6 +51,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
tracesV3 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v3"
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
"github.com/SigNoz/signoz/pkg/query-service/auth"
"github.com/SigNoz/signoz/pkg/query-service/cache"
@@ -528,8 +531,12 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// router.HandleFunc("/api/v1/get_percentiles", aH.getApplicationPercentiles).Methods(http.MethodGet)
router.HandleFunc("/api/v1/services", am.ViewAccess(aH.getServices)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/services/list", am.ViewAccess(aH.getServicesList)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/service/top_operations", am.ViewAccess(aH.getTopOperations)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/service/top_level_operations", am.ViewAccess(aH.getServicesTopLevelOps)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(aH.SearchTraces)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/usage", am.ViewAccess(aH.getUsage)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/dependency_graph", am.ViewAccess(aH.dependencyGraph)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/settings/ttl", am.AdminAccess(aH.setTTL)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.setApdexSettings)).Methods(http.MethodPost)
@@ -1598,13 +1605,122 @@ func (aH *APIHandler) getTopOperations(w http.ResponseWriter, r *http.Request) {
}
func (aH *APIHandler) getUsage(w http.ResponseWriter, r *http.Request) {
query, err := parseGetUsageRequest(r)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, err := aH.reader.GetUsage(r.Context(), query)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Request) {
var start, end time.Time
var services []string
type topLevelOpsParams struct {
Service string `json:"service"`
Start string `json:"start"`
End string `json:"end"`
}
var params topLevelOpsParams
err := json.NewDecoder(r.Body).Decode(&params)
if err != nil {
zap.L().Error("Error in getting req body for get top operations API", zap.Error(err))
}
if params.Service != "" {
services = []string{params.Service}
}
startEpoch := params.Start
if startEpoch != "" {
startEpochInt, err := strconv.ParseInt(startEpoch, 10, 64)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading start time")
return
}
start = time.Unix(0, startEpochInt)
}
endEpoch := params.End
if endEpoch != "" {
endEpochInt, err := strconv.ParseInt(endEpoch, 10, 64)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading end time")
return
}
end = time.Unix(0, endEpochInt)
}
result, apiErr := aH.reader.GetTopLevelOperations(r.Context(), start, end, services)
if apiErr != nil {
RespondError(w, apiErr, nil)
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
query, err := parseGetServicesRequest(r)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, apiErr := aH.reader.GetServices(r.Context(), query)
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
return
}
data := map[string]interface{}{
"number": len(*result),
}
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
if errv2 != nil {
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_NUMBER_OF_SERVICES, data, claims.Email, true, false)
}
if (data["number"] != 0) && (data["number"] != telemetry.DEFAULT_NUMBER_OF_SERVICES) {
telemetry.GetInstance().AddActiveTracesUser()
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) dependencyGraph(w http.ResponseWriter, r *http.Request) {
query, err := parseGetServicesRequest(r)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
result, err := aH.reader.GetDependencyGraph(r.Context(), query)
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
result, err := aH.reader.GetServicesList(r.Context())
if aH.HandleError(w, err, http.StatusBadRequest) {
return
}
aH.WriteJSON(w, r, result)
}
func (aH *APIHandler) SearchTraces(w http.ResponseWriter, r *http.Request) {
@@ -4104,8 +4220,11 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
// logs
func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1/logs").Subrouter()
subRouter.HandleFunc("", am.ViewAccess(aH.getLogs)).Methods(http.MethodGet)
subRouter.HandleFunc("/tail", am.ViewAccess(aH.tailLogs)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields", am.ViewAccess(aH.logFields)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields", am.EditAccess(aH.logFieldUpdate)).Methods(http.MethodPost)
subRouter.HandleFunc("/aggregate", am.ViewAccess(aH.logAggregate)).Methods(http.MethodGet)
// log pipelines
subRouter.HandleFunc("/pipelines/preview", am.ViewAccess(aH.PreviewLogsPipelinesHandler)).Methods(http.MethodPost)
@@ -4145,6 +4264,81 @@ func (aH *APIHandler) logFieldUpdate(w http.ResponseWriter, r *http.Request) {
aH.WriteJSON(w, r, field)
}
func (aH *APIHandler) getLogs(w http.ResponseWriter, r *http.Request) {
params, err := logs.ParseLogFilterParams(r)
if err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErr, "Incorrect params")
return
}
res, apiErr := aH.reader.GetLogs(r.Context(), params)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch logs from the DB")
return
}
aH.WriteJSON(w, r, map[string]interface{}{"results": res})
}
func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
params, err := logs.ParseLogFilterParams(r)
if err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErr, "Incorrect params")
return
}
// create the client
client := &model.LogsTailClient{Name: r.RemoteAddr, Logs: make(chan *model.SignozLog, 1000), Done: make(chan *bool), Error: make(chan error), Filter: *params}
go aH.reader.TailLogs(r.Context(), client)
w.Header().Set("Connection", "keep-alive")
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Access-Control-Allow-Origin", "*")
w.WriteHeader(200)
flusher, ok := w.(http.Flusher)
if !ok {
err := model.ApiError{Typ: model.ErrorStreamingNotSupported, Err: nil}
RespondError(w, &err, "streaming is not supported")
return
}
// flush the headers
flusher.Flush()
for {
select {
case log := <-client.Logs:
var buf bytes.Buffer
enc := json.NewEncoder(&buf)
enc.Encode(log)
fmt.Fprintf(w, "data: %v\n\n", buf.String())
flusher.Flush()
case <-client.Done:
zap.L().Debug("done!")
return
case err := <-client.Error:
zap.L().Error("error occured", zap.Error(err))
return
}
}
}
func (aH *APIHandler) logAggregate(w http.ResponseWriter, r *http.Request) {
params, err := logs.ParseLogAggregateParams(r)
if err != nil {
apiErr := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErr, "Incorrect params")
return
}
res, apiErr := aH.reader.AggregateLogs(r.Context(), params)
if apiErr != nil {
RespondError(w, apiErr, "Failed to fetch logs aggregate from the DB")
return
}
aH.WriteJSON(w, r, res)
}
const logPipelines = "log_pipelines"
func parseAgentConfigVersion(r *http.Request) (int, *model.ApiError) {
@@ -4630,6 +4824,22 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
// WARN: Only works for AND operator in traces query
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
if isUsed && len(traceIDs) > 0 {
zap.L().Debug("traceID used as filter in traces query")
// query signoz_spans table with traceID to get min and max timestamp
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
if err == nil {
// add timestamp filter to queryRange params
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
}
}
}
// Hook up query progress tracking if requested
queryIdHeader := r.Header.Get("X-SIGNOZ-QUERY-ID")
if len(queryIdHeader) > 0 {
@@ -5013,6 +5223,22 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
tracesV4.Enrich(queryRangeParams, spanKeys)
}
// WARN: Only works for AND operator in traces query
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
if isUsed && len(traceIDs) > 0 {
zap.L().Debug("traceID used as filter in traces query")
// query signoz_spans table with traceID to get min and max timestamp
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
if err == nil {
// add timestamp filter to queryRange params
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
}
}
}
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, queryRangeParams)
if err != nil {
@@ -5201,3 +5427,210 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
}
aH.Respond(w, resp)
}
// RegisterTraceFunnelsRoutes adds trace funnels routes
func (aH *APIHandler) RegisterTraceFunnelsRoutes(router *mux.Router, am *middleware.AuthZ) {
// Main trace funnels router
traceFunnelsRouter := router.PathPrefix("/api/v1/trace-funnels").Subrouter()
// API endpoints
traceFunnelsRouter.HandleFunc("/new",
am.ViewAccess(aH.Signoz.Handlers.TraceFunnel.New)).
Methods(http.MethodPost)
traceFunnelsRouter.HandleFunc("/list",
am.ViewAccess(aH.Signoz.Handlers.TraceFunnel.List)).
Methods(http.MethodGet)
traceFunnelsRouter.HandleFunc("/steps/update",
am.ViewAccess(aH.Signoz.Handlers.TraceFunnel.UpdateSteps)).
Methods(http.MethodPut)
traceFunnelsRouter.HandleFunc("/{funnel_id}",
am.ViewAccess(aH.Signoz.Handlers.TraceFunnel.Get)).
Methods(http.MethodGet)
traceFunnelsRouter.HandleFunc("/{funnel_id}",
am.ViewAccess(aH.Signoz.Handlers.TraceFunnel.Delete)).
Methods(http.MethodDelete)
traceFunnelsRouter.HandleFunc("/{funnel_id}",
am.ViewAccess(aH.Signoz.Handlers.TraceFunnel.UpdateFunnel)).
Methods(http.MethodPut)
traceFunnelsRouter.HandleFunc("/save",
am.ViewAccess(aH.Signoz.Handlers.TraceFunnel.Save)).
Methods(http.MethodPost)
// Analytics endpoints
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/validate", aH.handleValidateTraces).Methods("POST")
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/overview", aH.handleFunnelAnalytics).Methods("POST")
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/steps", aH.handleStepAnalytics).Methods("POST")
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/slow-traces", aH.handleFunnelSlowTraces).Methods("POST")
traceFunnelsRouter.HandleFunc("/{funnel_id}/analytics/error-traces", aH.handleFunnelErrorTraces).Methods("POST")
}
func (aH *APIHandler) handleValidateTraces(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
funnelID := vars["funnel_id"]
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), funnelID)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
return
}
var timeRange traceFunnels.TimeRange
if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding time range: %v", err)}, nil)
return
}
if len(funnel.Steps) < 2 {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("funnel must have at least 2 steps")}, nil)
return
}
chq, err := tracefunnels.ValidateTraces(funnel, timeRange)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
return
}
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
return
}
aH.Respond(w, results)
}
func (aH *APIHandler) handleFunnelAnalytics(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
funnelID := vars["funnel_id"]
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), funnelID)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
return
}
var timeRange traceFunnels.TimeRange
if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding time range: %v", err)}, nil)
return
}
chq, err := tracefunnels.ValidateTracesWithLatency(funnel, timeRange)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
return
}
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
return
}
aH.Respond(w, results)
}
func (aH *APIHandler) handleStepAnalytics(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
funnelID := vars["funnel_id"]
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), funnelID)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("funnel not found: %v", err)}, nil)
return
}
var timeRange traceFunnels.TimeRange
if err := json.NewDecoder(r.Body).Decode(&timeRange); err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error decoding time range: %v", err)}, nil)
return
}
chq, err := tracefunnels.GetStepAnalytics(funnel, timeRange)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
return
}
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
return
}
aH.Respond(w, results)
}
// handleFunnelSlowTraces handles requests for slow traces in a funnel
func (aH *APIHandler) handleFunnelSlowTraces(w http.ResponseWriter, r *http.Request) {
aH.handleTracesWithLatency(w, r, false)
}
// handleFunnelErrorTraces handles requests for error traces in a funnel
func (aH *APIHandler) handleFunnelErrorTraces(w http.ResponseWriter, r *http.Request) {
aH.handleTracesWithLatency(w, r, true)
}
// handleTracesWithLatency handles both slow and error traces with common logic
func (aH *APIHandler) handleTracesWithLatency(w http.ResponseWriter, r *http.Request, isError bool) {
funnel, req, err := aH.validateTracesRequest(r)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
if err := aH.validateSteps(funnel, req.StepAOrder, req.StepBOrder); err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}
chq, err := tracefunnels.GetSlowestTraces(funnel, req.StepAOrder, req.StepBOrder, req.TimeRange, isError)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error building clickhouse query: %v", err)}, nil)
return
}
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error converting clickhouse results to list: %v", err)}, nil)
return
}
aH.Respond(w, results)
}
// validateTracesRequest validates and extracts the request parameters
func (aH *APIHandler) validateTracesRequest(r *http.Request) (*traceFunnels.Funnel, *traceFunnels.StepTransitionRequest, error) {
vars := mux.Vars(r)
funnelID := vars["funnel_id"]
funnel, err := aH.Signoz.Modules.TraceFunnel.Get(r.Context(), funnelID)
if err != nil {
return nil, nil, fmt.Errorf("funnel not found: %v", err)
}
var req traceFunnels.StepTransitionRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return nil, nil, fmt.Errorf("invalid request body: %v", err)
}
return funnel, &req, nil
}
// validateSteps checks if the requested steps exist in the funnel
func (aH *APIHandler) validateSteps(funnel *traceFunnels.Funnel, stepAOrder, stepBOrder int64) error {
stepAExists, stepBExists := false, false
for _, step := range funnel.Steps {
if step.Order == stepAOrder {
stepAExists = true
}
if step.Order == stepBOrder {
stepBExists = true
}
}
if !stepAExists || !stepBExists {
return fmt.Errorf("one or both steps not found. Step A Order: %d, Step B Order: %d", stepAOrder, stepBOrder)
}
return nil
}

View File

@@ -104,7 +104,7 @@ func (agent *Agent) updateAgentDescription(newStatus *protobufs.AgentToServer) (
agent.Status = newStatus
agentDescrChanged = true
} else {
// Not a new Agent. Update the Status.
// Not a new Agent. UpdateSteps the Status.
agent.Status.SequenceNum = newStatus.SequenceNum
// Check what's changed in the AgentDescription.
@@ -127,7 +127,7 @@ func (agent *Agent) updateAgentDescription(newStatus *protobufs.AgentToServer) (
agentDescrChanged = false
}
// Update remote config status if it is included and is different from what we have.
// UpdateSteps remote config status if it is included and is different from what we have.
if newStatus.RemoteConfigStatus != nil &&
!proto.Equal(agent.Status.RemoteConfigStatus, newStatus.RemoteConfigStatus) {
agent.Status.RemoteConfigStatus = newStatus.RemoteConfigStatus
@@ -164,7 +164,7 @@ func (agent *Agent) updateHealth(newStatus *protobufs.AgentToServer) {
}
func (agent *Agent) updateRemoteConfigStatus(newStatus *protobufs.AgentToServer) {
// Update remote config status if it is included and is different from what we have.
// UpdateSteps remote config status if it is included and is different from what we have.
if newStatus.RemoteConfigStatus != nil {
agent.Status.RemoteConfigStatus = newStatus.RemoteConfigStatus
}
@@ -184,7 +184,7 @@ func (agent *Agent) updateStatusField(newStatus *protobufs.AgentToServer) (agent
}
func (agent *Agent) updateEffectiveConfig(newStatus *protobufs.AgentToServer, response *protobufs.ServerToAgent) {
// Update effective config if provided.
// UpdateSteps effective config if provided.
if newStatus.EffectiveConfig != nil {
if newStatus.EffectiveConfig.ConfigMap != nil {
agent.Status.EffectiveConfig = newStatus.EffectiveConfig

View File

@@ -171,6 +171,42 @@ func parseQueryRangeRequest(r *http.Request) (*model.QueryRangeParams, *model.Ap
return &queryRangeParams, nil
}
func parseGetUsageRequest(r *http.Request) (*model.GetUsageParams, error) {
startTime, err := parseTime("start", r)
if err != nil {
return nil, err
}
endTime, err := parseTime("end", r)
if err != nil {
return nil, err
}
stepStr := r.URL.Query().Get("step")
if len(stepStr) == 0 {
return nil, errors.New("step param missing in query")
}
stepInt, err := strconv.Atoi(stepStr)
if err != nil {
return nil, errors.New("step param is not in correct format")
}
serviceName := r.URL.Query().Get("service")
stepHour := stepInt / 3600
getUsageParams := model.GetUsageParams{
StartTime: startTime.Format(time.RFC3339Nano),
EndTime: endTime.Format(time.RFC3339Nano),
Start: startTime,
End: endTime,
ServiceName: serviceName,
Period: fmt.Sprintf("PT%dH", stepHour),
StepHour: stepHour,
}
return &getUsageParams, nil
}
func parseGetServicesRequest(r *http.Request) (*model.GetServicesParams, error) {
var postData *model.GetServicesParams

View File

@@ -1382,7 +1382,7 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
},
),
}
// Update query parameters
// UpdateSteps query parameters
params.Start = tc.queryParams.start
params.End = tc.queryParams.end
params.CompositeQuery.BuilderQueries["A"].Limit = tc.queryParams.limit

View File

@@ -1436,7 +1436,7 @@ func Test_querier_runWindowBasedListQuery(t *testing.T) {
},
),
}
// Update query parameters
// UpdateSteps query parameters
params.Start = tc.queryParams.start
params.End = tc.queryParams.end
params.CompositeQuery.BuilderQueries["A"].Limit = tc.queryParams.limit

View File

@@ -140,7 +140,7 @@ func funcEWMA(result *v3.Result, alpha float64) *v3.Result {
}
if !math.IsNaN(point.Value) {
// Update EWMA with the current value
// UpdateSteps EWMA with the current value
ewma = alpha*point.Value + (1-alpha)*ewma
}
// Set the EWMA value for the current point

View File

@@ -281,6 +281,7 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
api.RegisterMessagingQueuesRoutes(r, am)
api.RegisterThirdPartyApiRoutes(r, am)
api.MetricExplorerRoutes(r, am)
api.RegisterTraceFunnelsRoutes(r, am)
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},

View File

@@ -153,7 +153,7 @@ func InviteUsers(ctx context.Context, req *model.BulkInviteRequest) (*model.Bulk
}
}
// Update the status based on the results
// UpdateSteps the status based on the results
if response.Summary.FailedInvites == response.Summary.TotalInvites {
response.Status = "failure"
} else if response.Summary.FailedInvites > 0 {

View File

@@ -15,8 +15,12 @@ import (
type Reader interface {
GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
GetTopLevelOperations(ctx context.Context, start, end time.Time, services []string) (*map[string][]string, *model.ApiError)
GetServices(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError)
GetTopOperations(ctx context.Context, query *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError)
GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error)
GetServicesList(ctx context.Context) (*[]string, error)
GetDependencyGraph(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error)
GetTTL(ctx context.Context, orgID string, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError)
@@ -70,6 +74,9 @@ type Reader interface {
// Logs
GetLogFields(ctx context.Context) (*model.GetFieldsResponse, *model.ApiError)
UpdateLogField(ctx context.Context, field *model.UpdateField) *model.ApiError
GetLogs(ctx context.Context, params *model.LogsFilterParams) (*[]model.SignozLog, *model.ApiError)
TailLogs(ctx context.Context, client *model.LogsTailClient)
AggregateLogs(ctx context.Context, params *model.LogsAggregateParams) (*model.GetLogsAggregatesResponse, *model.ApiError)
GetLogAttributeKeys(ctx context.Context, req *v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error)
GetLogAttributeValues(ctx context.Context, req *v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error)
GetLogAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest) (*v3.AggregateAttributeResponse, error)
@@ -93,6 +100,8 @@ type Reader interface {
ReadRuleStateHistoryTopContributorsByRuleID(ctx context.Context, ruleID string, params *model.QueryRuleStateHistory) ([]model.RuleStateHistoryContributor, error)
GetLastSavedRuleStateHistory(ctx context.Context, ruleID string) ([]model.RuleStateHistory, error)
GetMinAndMaxTimestampForTraceID(ctx context.Context, traceID []string) (int64, int64, error)
// Query Progress tracking helpers.
ReportQueryStartForProgressTracking(queryId string) (reportQueryFinished func(), err *model.ApiError)
SubscribeToQueryProgress(queryId string) (<-chan model.QueryProgress, func(), *model.ApiError)

View File

@@ -14,6 +14,8 @@ import (
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/SigNoz/signoz/pkg/zeus/noopzeus"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
@@ -99,6 +101,8 @@ func main() {
signoz, err := signoz.New(
context.Background(),
config,
zeus.Config{},
noopzeus.NewProviderFactory(),
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
signoz.NewSQLStoreProviderFactories(),

View File

@@ -70,6 +70,16 @@ type RegisterEventParams struct {
RateLimited bool `json:"rateLimited"`
}
type GetUsageParams struct {
StartTime string
EndTime string
ServiceName string
Period string
StepHour int
Start *time.Time
End *time.Time
}
type GetServicesParams struct {
StartTime string `json:"start"`
EndTime string `json:"end"`

View File

@@ -50,7 +50,7 @@ func PostProcessResult(result []*v3.Result, queryRangeParams *v3.QueryRangeParam
for _, query := range queryRangeParams.CompositeQuery.BuilderQueries {
// The way we distinguish between a formula and a query is by checking if the expression
// is the same as the query name
// TODO(srikanthccv): Update the UI to send a flag to distinguish between a formula and a query
// TODO(srikanthccv): UpdateSteps the UI to send a flag to distinguish between a formula and a query
if query.Expression != query.QueryName {
expression, err := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, EvalFuncs())
// This shouldn't happen here, because it should have been caught earlier in validation

View File

@@ -115,7 +115,7 @@ func (q *queryCache) FindMissingTimeRangesV2(start, end int64, step int64, cache
missingRanges = append(missingRanges, MissInterval{Start: currentTime, End: min(data.Start, end)})
}
// Update currentTime, but don't go past the end time
// UpdateSteps currentTime, but don't go past the end time
currentTime = max(currentTime, min(data.End, end))
}
@@ -205,7 +205,7 @@ func (q *queryCache) FindMissingTimeRanges(start, end, step int64, cacheKey stri
missingRanges = append(missingRanges, MissInterval{Start: currentTime, End: min(data.Start, end)})
}
// Update currentTime, but don't go past the end time
// UpdateSteps currentTime, but don't go past the end time
currentTime = max(currentTime, min(data.End, end))
}

View File

@@ -209,7 +209,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
// UpdateSteps the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value
alert.Annotations = a.Annotations

View File

@@ -471,7 +471,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
// alerts[h] is ready, add or update active list now
for h, a := range alerts {
// Check whether we already have alerting state for the identifying label set.
// Update the last value and annotations if so, create a new alert entry otherwise.
// UpdateSteps the last value and annotations if so, create a new alert entry otherwise.
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
alert.Value = a.Value

View File

@@ -146,7 +146,7 @@ func (r *rule) ListOrgs(ctx context.Context) ([]string, error) {
func (r *rule) getChannels() (*[]model.ChannelItem, *model.ApiError) {
channels := []model.ChannelItem{}
query := "SELECT id, created_at, updated_at, name, type, data FROM notification_channels"
query := "SELECT id, created_at, updated_at, name, type, data FROM notification_channel"
err := r.Select(&channels, query)
@@ -163,7 +163,7 @@ func (r *rule) getChannels() (*[]model.ChannelItem, *model.ApiError) {
func (r *rule) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
alertsInfo := model.AlertsInfo{}
// fetch alerts from rules db
query := "SELECT data FROM rules"
query := "SELECT data FROM rule"
var alertsData []string
var alertNames []string
err := r.Select(&alertsData, query)

View File

@@ -5,16 +5,20 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
)
type Handlers struct {
Organization organization.Handler
Preference preference.Handler
TraceFunnel tracefunnel.Handler
}
func NewHandlers(modules Modules) Handlers {
return Handlers{
Organization: implorganization.NewHandler(modules.Organization),
Preference: implpreference.NewHandler(modules.Preference),
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
}
}

View File

@@ -5,6 +5,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
)
@@ -12,11 +14,13 @@ import (
type Modules struct {
Organization organization.Module
Preference preference.Module
TraceFunnel tracefunnel.Module
}
func NewModules(sqlstore sqlstore.SQLStore) Modules {
return Modules{
Organization: implorganization.NewModule(implorganization.NewStore(sqlstore)),
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewDefaultPreferenceMap()),
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
}
}

View File

@@ -74,6 +74,7 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
sqlmigration.NewUpdateIntegrationsFactory(sqlstore),
sqlmigration.NewUpdateOrganizationsFactory(sqlstore),
sqlmigration.NewDropGroupsFactory(sqlstore),
sqlmigration.NewAddTraceFunnelsFactory(sqlstore),
)
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/SigNoz/signoz/pkg/web"
)
@@ -26,6 +27,7 @@ type SigNoz struct {
TelemetryStore telemetrystore.TelemetryStore
Prometheus prometheus.Prometheus
Alertmanager alertmanager.Alertmanager
Zeus zeus.Zeus
Modules Modules
Handlers Handlers
}
@@ -33,6 +35,8 @@ type SigNoz struct {
func New(
ctx context.Context,
config Config,
zeusConfig zeus.Config,
zeusProviderFactory factory.ProviderFactory[zeus.Zeus, zeus.Config],
cacheProviderFactories factory.NamedMap[factory.ProviderFactory[cache.Cache, cache.Config]],
webProviderFactories factory.NamedMap[factory.ProviderFactory[web.Web, web.Config]],
sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]],
@@ -50,6 +54,17 @@ func New(
// Get the provider settings from instrumentation
providerSettings := instrumentation.ToProviderSettings()
// Initialize zeus from the available zeus provider factory. This is not config controlled
// and depends on the variant of the build.
zeus, err := zeusProviderFactory.New(
ctx,
providerSettings,
zeusConfig,
)
if err != nil {
return nil, err
}
// Initialize cache from the available cache provider factories
cache, err := factory.NewProviderFromNamedMap(
ctx,
@@ -162,6 +177,7 @@ func New(
TelemetryStore: telemetrystore,
Prometheus: prometheus,
Alertmanager: alertmanager,
Zeus: zeus,
Modules: modules,
Handlers: handlers,
}, nil

View File

@@ -0,0 +1,96 @@
package sqlmigration
import (
"context"
"fmt"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunnel"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type addTraceFunnels struct {
sqlstore sqlstore.SQLStore
}
func NewAddTraceFunnelsFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_trace_funnels"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return newAddTraceFunnels(ctx, providerSettings, config, sqlstore)
})
}
func newAddTraceFunnels(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore) (SQLMigration, error) {
return &addTraceFunnels{sqlstore: sqlstore}, nil
}
func (migration *addTraceFunnels) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addTraceFunnels) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback()
// Create trace_funnel table with foreign key constraint inline
_, err = tx.NewCreateTable().Model((*traceFunnels.Funnel)(nil)).
ForeignKey(`("org_id") REFERENCES "organizations" ("id") ON DELETE CASCADE`).
IfNotExists().
Exec(ctx)
if err != nil {
return fmt.Errorf("failed to create trace_funnel table: %v", err)
}
// Add unique constraint for org_id and name
_, err = tx.NewRaw(`
CREATE UNIQUE INDEX IF NOT EXISTS idx_trace_funnel_org_id_name
ON trace_funnel (org_id, name)
`).Exec(ctx)
if err != nil {
return fmt.Errorf("failed to create unique constraint: %v", err)
}
// Create indexes
_, err = tx.NewCreateIndex().Model((*traceFunnels.Funnel)(nil)).Index("idx_trace_funnel_org_id").Column("org_id").Exec(ctx)
if err != nil {
return fmt.Errorf("failed to create org_id index: %v", err)
}
_, err = tx.NewCreateIndex().Model((*traceFunnels.Funnel)(nil)).Index("idx_trace_funnel_created_at").Column("created_at").Exec(ctx)
if err != nil {
return fmt.Errorf("failed to create created_at index: %v", err)
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *addTraceFunnels) Down(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback()
// Drop trace_funnel table
_, err = tx.NewDropTable().Model((*traceFunnels.Funnel)(nil)).IfExists().Exec(ctx)
if err != nil {
return fmt.Errorf("failed to drop trace_funnel table: %v", err)
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,15 @@
package traceFunnels
import (
"context"
"github.com/SigNoz/signoz/pkg/valuer"
)
type TraceFunnelStore interface {
Create(context.Context, *Funnel) error
Get(context.Context, valuer.UUID) (*Funnel, error)
List(context.Context) ([]*Funnel, error)
Update(context.Context, *Funnel) error
Delete(context.Context, valuer.UUID) error
}

View File

@@ -0,0 +1,113 @@
package traceFunnels
import (
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
// metadata for funnels
type BaseMetadata struct {
types.Identifiable // funnel id
types.TimeAuditable
types.UserAuditable
Name string `json:"funnel_name" bun:"name,type:text,notnull"` // funnel name
Description string `json:"description" bun:"description,type:text"` // funnel description
OrgID valuer.UUID `json:"org_id" bun:"org_id,type:varchar,notnull"`
}
// Funnel Core Data Structure (Funnel and FunnelStep)
type Funnel struct {
bun.BaseModel `bun:"table:trace_funnel"`
BaseMetadata
Steps []FunnelStep `json:"steps" bun:"steps,type:text,notnull"`
Tags string `json:"tags" bun:"tags,type:text"`
CreatedByUser *types.User `json:"user" bun:"rel:belongs-to,join:created_by=id"`
}
type FunnelStep struct {
Id valuer.UUID `json:"id,omitempty"`
Name string `json:"name,omitempty"` // step name
Description string `json:"description,omitempty"` // step description
Order int64 `json:"step_order"`
ServiceName string `json:"service_name"`
SpanName string `json:"span_name"`
Filters *v3.FilterSet `json:"filters,omitempty"`
LatencyPointer string `json:"latency_pointer,omitempty"`
LatencyType string `json:"latency_type,omitempty"`
HasErrors bool `json:"has_errors"`
}
// FunnelRequest represents all possible funnel-related requests
type FunnelRequest struct {
FunnelID valuer.UUID `json:"funnel_id,omitempty"`
Name string `json:"funnel_name,omitempty"`
Timestamp int64 `json:"timestamp,omitempty"`
Description string `json:"description,omitempty"`
Steps []FunnelStep `json:"steps,omitempty"`
UserID string `json:"user_id,omitempty"`
// Analytics specific fields
StartTime int64 `json:"start_time,omitempty"`
EndTime int64 `json:"end_time,omitempty"`
StepAOrder int64 `json:"step_a_order,omitempty"`
StepBOrder int64 `json:"step_b_order,omitempty"`
}
// FunnelResponse represents all possible funnel-related responses
type FunnelResponse struct {
FunnelID string `json:"funnel_id,omitempty"`
FunnelName string `json:"funnel_name,omitempty"`
Description string `json:"description,omitempty"`
CreatedAt int64 `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
UpdatedAt int64 `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
OrgID string `json:"org_id,omitempty"`
UserEmail string `json:"user_email,omitempty"`
Funnel *Funnel `json:"funnel,omitempty"`
Steps []FunnelStep `json:"steps,omitempty"`
}
// TimeRange represents a time range for analytics
type TimeRange struct {
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`
}
// StepTransitionRequest represents a request for step transition analytics
type StepTransitionRequest struct {
TimeRange
StepAOrder int64 `json:"step_a_order"`
StepBOrder int64 `json:"step_b_order"`
}
// UserInfo represents basic user information
type UserInfo struct {
ID string `json:"id"`
Email string `json:"email"`
}
// Analytics on traces
//type FunnelAnalytics struct {
// TotalStart int64 `json:"total_start"`
// TotalComplete int64 `json:"total_complete"`
// ErrorCount int64 `json:"error_count"`
// AvgDurationMs float64 `json:"avg_duration_ms"`
// P99LatencyMs float64 `json:"p99_latency_ms"`
// ConversionRate float64 `json:"conversion_rate"`
//}
//type ValidTracesResponse struct {
// TraceIDs []string `json:"trace_ids"`
//}
type FunnelStepFilter struct {
StepNumber int
ServiceName string
SpanName string
LatencyPointer string // "start" or "end"
CustomFilters *v3.FilterSet
}

18
pkg/zeus/config.go Normal file
View File

@@ -0,0 +1,18 @@
package zeus
import (
"net/url"
"github.com/SigNoz/signoz/pkg/factory"
)
var _ factory.Config = (*Config)(nil)
type Config struct {
URL *url.URL `mapstructure:"url"`
DeprecatedURL *url.URL `mapstructure:"deprecated_url"`
}
func (c Config) Validate() error {
return nil
}

View File

@@ -0,0 +1,49 @@
package noopzeus
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/zeus"
)
type provider struct{}
func NewProviderFactory() factory.ProviderFactory[zeus.Zeus, zeus.Config] {
return factory.NewProviderFactory(factory.MustNewName("noop"), func(ctx context.Context, providerSettings factory.ProviderSettings, config zeus.Config) (zeus.Zeus, error) {
return New(ctx, providerSettings, config)
})
}
func New(_ context.Context, _ factory.ProviderSettings, _ zeus.Config) (zeus.Zeus, error) {
return &provider{}, nil
}
func (provider *provider) GetLicense(_ context.Context, _ string) ([]byte, error) {
return nil, errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "fetching license is not supported")
}
func (provider *provider) GetCheckoutURL(_ context.Context, _ string, _ []byte) ([]byte, error) {
return nil, errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "getting the checkout url is not supported")
}
func (provider *provider) GetPortalURL(_ context.Context, _ string, _ []byte) ([]byte, error) {
return nil, errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "getting the portal url is not supported")
}
func (provider *provider) GetDeployment(_ context.Context, _ string) ([]byte, error) {
return nil, errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "getting the deployment is not supported")
}
func (provider *provider) PutMeters(_ context.Context, _ string, _ []byte) error {
return errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "putting meters is not supported")
}
func (provider *provider) PutProfile(_ context.Context, _ string, _ []byte) error {
return errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "putting profile is not supported")
}
func (provider *provider) PutHost(_ context.Context, _ string, _ []byte) error {
return errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "putting host is not supported")
}

35
pkg/zeus/zeus.go Normal file
View File

@@ -0,0 +1,35 @@
package zeus
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
)
var (
ErrCodeUnsupported = errors.MustNewCode("zeus_unsupported")
ErrCodeResponseMalformed = errors.MustNewCode("zeus_response_malformed")
)
type Zeus interface {
// Returns the license for the given key.
GetLicense(context.Context, string) ([]byte, error)
// Returns the checkout URL for the given license key.
GetCheckoutURL(context.Context, string, []byte) ([]byte, error)
// Returns the portal URL for the given license key.
GetPortalURL(context.Context, string, []byte) ([]byte, error)
// Returns the deployment for the given license key.
GetDeployment(context.Context, string) ([]byte, error)
// Puts the meters for the given license key.
PutMeters(context.Context, string, []byte) error
// Put profile for the given license key.
PutProfile(context.Context, string, []byte) error
// Put host for the given license key.
PutHost(context.Context, string, []byte) error
}

View File

@@ -1,4 +1,5 @@
import http
import json
import requests
from wiremock.client import (
@@ -69,3 +70,162 @@ def test_apply_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
)
assert response.json()["count"] >= 1
def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/licenses/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"id": "0196360e-90cd-7a74-8313-1aa815ce2a67",
"key": "secret-key",
"valid_from": 1732146922,
"valid_until": -1,
"status": "VALID",
"state": "EVALUATING",
"plan": {
"name": "ENTERPRISE",
},
"platform": "CLOUD",
"features": [],
"event_queue": {},
},
},
),
persistent=False,
)
],
)
access_token = get_jwt_token("admin@integration.test", "password")
response = requests.put(
url=signoz.self.host_config.get("/api/v3/licenses"),
headers={"Authorization": "Bearer " + access_token},
timeout=5,
)
assert response.status_code == http.HTTPStatus.NO_CONTENT
cursor = signoz.sqlstore.conn.cursor()
cursor.execute(
"SELECT data FROM licenses_v3 WHERE id='0196360e-90cd-7a74-8313-1aa815ce2a67'"
)
record = cursor.fetchone()[0]
assert json.loads(record)["valid_from"] == 1732146922
response = requests.post(
url=signoz.zeus.host_config.get("/__admin/requests/count"),
json={"method": "GET", "url": "/v2/licenses/me"},
timeout=5,
)
assert response.json()["count"] >= 1
def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.POST,
url="/v2/subscriptions/me/sessions/checkout",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {"url": "https://signoz.checkout.com"},
},
),
persistent=False,
)
],
)
access_token = get_jwt_token("admin@integration.test", "password")
response = requests.post(
url=signoz.self.host_config.get("/api/v1/checkout"),
json={"url": "https://integration-signoz.com"},
headers={"Authorization": "Bearer " + access_token},
timeout=5,
)
assert response.status_code == http.HTTPStatus.OK
assert response.json()["data"]["redirectURL"] == "https://signoz.checkout.com"
response = requests.post(
url=signoz.zeus.host_config.get("/__admin/requests/count"),
json={"method": "POST", "url": "/v2/subscriptions/me/sessions/checkout"},
timeout=5,
)
assert response.json()["count"] == 1
def test_license_portal(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.POST,
url="/v2/subscriptions/me/sessions/portal",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {"url": "https://signoz.portal.com"},
},
),
persistent=False,
)
],
)
access_token = get_jwt_token("admin@integration.test", "password")
response = requests.post(
url=signoz.self.host_config.get("/api/v1/portal"),
json={"url": "https://integration-signoz.com"},
headers={"Authorization": "Bearer " + access_token},
timeout=5,
)
assert response.status_code == http.HTTPStatus.OK
assert response.json()["data"]["redirectURL"] == "https://signoz.portal.com"
response = requests.post(
url=signoz.zeus.host_config.get("/__admin/requests/count"),
json={"method": "POST", "url": "/v2/subscriptions/me/sessions/portal"},
timeout=5,
)
assert response.json()["count"] == 1

View File

@@ -1,7 +1,9 @@
from fixtures import types
import requests
from http import HTTPStatus
import requests
from fixtures import types
def test_api_key(signoz: types.SigNoz, get_jwt_token) -> None:
admin_token = get_jwt_token("admin@integration.test", "password")