Compare commits
9 Commits
v0.78.2-js
...
chore/remo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
660c70d1e4 | ||
|
|
e723399f7f | ||
|
|
48936bed9b | ||
|
|
ee70474cc7 | ||
|
|
c3fa7144ee | ||
|
|
5dd02a5b8e | ||
|
|
c0f01e4cb9 | ||
|
|
fed84cb50a | ||
|
|
80545c4d07 |
@@ -153,9 +153,11 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
|||||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||||
ctx context.Context, orgId string, cloudProvider string,
|
ctx context.Context, orgId string, cloudProvider string,
|
||||||
) (*types.User, *basemodel.ApiError) {
|
) (*types.User, *basemodel.ApiError) {
|
||||||
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
|
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||||
|
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||||
|
|
||||||
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
|
// TODO(nitya): there should be orgId here
|
||||||
|
integrationUserResult, apiErr := ah.AppDao().GetUserByEmail(ctx, email)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
|
||||||
}
|
}
|
||||||
@@ -170,9 +172,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
|||||||
)
|
)
|
||||||
|
|
||||||
newUser := &types.User{
|
newUser := &types.User{
|
||||||
ID: cloudIntegrationUserId,
|
ID: uuid.New().String(),
|
||||||
Name: fmt.Sprintf("%s integration", cloudProvider),
|
Name: cloudIntegrationUser,
|
||||||
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
|
Email: email,
|
||||||
TimeAuditable: types.TimeAuditable{
|
TimeAuditable: types.TimeAuditable{
|
||||||
CreatedAt: time.Now(),
|
CreatedAt: time.Now(),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -5,16 +5,18 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"slices"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||||
"github.com/SigNoz/signoz/ee/types"
|
|
||||||
eeTypes "github.com/SigNoz/signoz/ee/types"
|
eeTypes "github.com/SigNoz/signoz/ee/types"
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/http/render"
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||||
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
@@ -58,7 +60,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
|
|||||||
ah.Respond(w, &pat)
|
ah.Respond(w, &pat)
|
||||||
}
|
}
|
||||||
|
|
||||||
func validatePATRequest(req types.GettablePAT) error {
|
func validatePATRequest(req eeTypes.GettablePAT) error {
|
||||||
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) {
|
||||||
return fmt.Errorf("valid role is required")
|
return fmt.Errorf("valid role is required")
|
||||||
}
|
}
|
||||||
@@ -74,12 +76,19 @@ func validatePATRequest(req types.GettablePAT) error {
|
|||||||
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
req := types.GettablePAT{}
|
req := eeTypes.GettablePAT{}
|
||||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
RespondError(w, model.BadRequest(err), nil)
|
RespondError(w, model.BadRequest(err), nil)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
idStr := mux.Vars(r)["id"]
|
||||||
|
id, err := valuer.NewUUID(idStr)
|
||||||
|
if err != nil {
|
||||||
|
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
user, err := auth.GetUserFromReqContext(r.Context())
|
user, err := auth.GetUserFromReqContext(r.Context())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
RespondError(w, &model.ApiError{
|
RespondError(w, &model.ApiError{
|
||||||
@@ -89,6 +98,25 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//get the pat
|
||||||
|
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||||
|
if paterr != nil {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the user
|
||||||
|
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||||
|
if usererr != nil {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
err = validatePATRequest(req)
|
err = validatePATRequest(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
RespondError(w, model.BadRequest(err), nil)
|
RespondError(w, model.BadRequest(err), nil)
|
||||||
@@ -96,12 +124,6 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
req.UpdatedByUserID = user.ID
|
req.UpdatedByUserID = user.ID
|
||||||
idStr := mux.Vars(r)["id"]
|
|
||||||
id, err := valuer.NewUUID(idStr)
|
|
||||||
if err != nil {
|
|
||||||
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
req.UpdatedAt = time.Now()
|
req.UpdatedAt = time.Now()
|
||||||
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
|
||||||
var apierr basemodel.BaseApiError
|
var apierr basemodel.BaseApiError
|
||||||
@@ -149,6 +171,25 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//get the pat
|
||||||
|
existingPAT, paterr := ah.AppDao().GetPATByID(ctx, user.OrgID, id)
|
||||||
|
if paterr != nil {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, paterr.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the user
|
||||||
|
createdByUser, usererr := ah.AppDao().GetUser(ctx, existingPAT.UserID)
|
||||||
|
if usererr != nil {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, usererr.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user pat cannot be updated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
zap.L().Info("Revoke PAT with id", zap.String("id", id.StringValue()))
|
||||||
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
if apierr := ah.AppDao().RevokePAT(ctx, user.OrgID, id, user.ID); apierr != nil {
|
||||||
RespondError(w, apierr, nil)
|
RespondError(w, apierr, nil)
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import (
|
|||||||
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
basedao "github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
ossTypes "github.com/SigNoz/signoz/pkg/types"
|
|
||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
@@ -40,7 +39,6 @@ type ModelDao interface {
|
|||||||
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
UpdatePAT(ctx context.Context, orgID string, p types.GettablePAT, id valuer.UUID) basemodel.BaseApiError
|
||||||
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
GetPAT(ctx context.Context, pat string) (*types.GettablePAT, basemodel.BaseApiError)
|
||||||
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
GetPATByID(ctx context.Context, orgID string, id valuer.UUID) (*types.GettablePAT, basemodel.BaseApiError)
|
||||||
GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError)
|
|
||||||
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
ListPATs(ctx context.Context, orgID string) ([]types.GettablePAT, basemodel.BaseApiError)
|
||||||
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
RevokePAT(ctx context.Context, orgID string, id valuer.UUID, userID string) basemodel.BaseApiError
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||||
@@ -44,7 +43,7 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
|
|||||||
}
|
}
|
||||||
|
|
||||||
user := &types.User{
|
user := &types.User{
|
||||||
ID: uuid.NewString(),
|
ID: uuid.New().String(),
|
||||||
Name: "",
|
Name: "",
|
||||||
Email: email,
|
Email: email,
|
||||||
Password: hash,
|
Password: hash,
|
||||||
@@ -162,12 +161,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
|
|||||||
// find domain from email
|
// find domain from email
|
||||||
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
orgDomain, apierr := m.GetDomainByEmail(ctx, email)
|
||||||
if apierr != nil {
|
if apierr != nil {
|
||||||
var emailDomain string
|
zap.L().Error("failed to get org domain from email", zap.String("email", email), zap.Error(apierr.ToError()))
|
||||||
emailComponents := strings.Split(email, "@")
|
|
||||||
if len(emailComponents) > 0 {
|
|
||||||
emailDomain = emailComponents[1]
|
|
||||||
}
|
|
||||||
zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError()))
|
|
||||||
return resp, apierr
|
return resp, apierr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -196,27 +196,3 @@ func (m *modelDao) GetPATByID(ctx context.Context, orgID string, id valuer.UUID)
|
|||||||
|
|
||||||
return &patWithUser, nil
|
return &patWithUser, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// deprecated
|
|
||||||
func (m *modelDao) GetUserByPAT(ctx context.Context, orgID string, token string) (*ossTypes.GettableUser, basemodel.BaseApiError) {
|
|
||||||
users := []ossTypes.GettableUser{}
|
|
||||||
|
|
||||||
if err := m.DB().NewSelect().
|
|
||||||
Model(&users).
|
|
||||||
Column("u.id", "u.name", "u.email", "u.password", "u.created_at", "u.profile_picture_url", "u.org_id", "u.group_id").
|
|
||||||
Join("JOIN personal_access_tokens p ON u.id = p.user_id").
|
|
||||||
Where("p.token = ?", token).
|
|
||||||
Where("p.expires_at >= strftime('%s', 'now')").
|
|
||||||
Where("p.org_id = ?", orgID).
|
|
||||||
Scan(ctx); err != nil {
|
|
||||||
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(users) != 1 {
|
|
||||||
return nil, &model.ApiError{
|
|
||||||
Typ: model.ErrorInternal,
|
|
||||||
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return &users[0], nil
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -62,13 +62,7 @@ var BasicPlan = basemodel.FeatureSet{
|
|||||||
UsageLimit: -1,
|
UsageLimit: -1,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.TraceFunnels,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var EnterprisePlan = basemodel.FeatureSet{
|
var EnterprisePlan = basemodel.FeatureSet{
|
||||||
@@ -121,11 +115,5 @@ var EnterprisePlan = basemodel.FeatureSet{
|
|||||||
UsageLimit: -1,
|
UsageLimit: -1,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
basemodel.Feature{
|
|
||||||
Name: basemodel.TraceFunnels,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,11 +19,13 @@ var (
|
|||||||
var (
|
var (
|
||||||
Org = "org"
|
Org = "org"
|
||||||
User = "user"
|
User = "user"
|
||||||
|
CloudIntegration = "cloud_integration"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||||
|
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||||
)
|
)
|
||||||
|
|
||||||
type dialect struct {
|
type dialect struct {
|
||||||
@@ -211,6 +213,8 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
|||||||
fkReferences = append(fkReferences, OrgReference)
|
fkReferences = append(fkReferences, OrgReference)
|
||||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||||
fkReferences = append(fkReferences, UserReference)
|
fkReferences = append(fkReferences, UserReference)
|
||||||
|
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||||
|
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,5 +8,4 @@ export enum FeatureKeys {
|
|||||||
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
PREMIUM_SUPPORT = 'PREMIUM_SUPPORT',
|
||||||
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
ANOMALY_DETECTION = 'ANOMALY_DETECTION',
|
||||||
ONBOARDING_V3 = 'ONBOARDING_V3',
|
ONBOARDING_V3 = 'ONBOARDING_V3',
|
||||||
TRACE_FUNNELS = 'TRACE_FUNNELS',
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,24 +2,17 @@ import './TracesModulePage.styles.scss';
|
|||||||
|
|
||||||
import RouteTab from 'components/RouteTab';
|
import RouteTab from 'components/RouteTab';
|
||||||
import { TabRoutes } from 'components/RouteTab/types';
|
import { TabRoutes } from 'components/RouteTab/types';
|
||||||
import { FeatureKeys } from 'constants/features';
|
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { useAppContext } from 'providers/App/App';
|
|
||||||
import { useLocation } from 'react-router-dom';
|
import { useLocation } from 'react-router-dom';
|
||||||
|
|
||||||
import { tracesExplorer, tracesFunnel, tracesSaveView } from './constants';
|
import { tracesExplorer, tracesFunnel, tracesSaveView } from './constants';
|
||||||
|
|
||||||
function TracesModulePage(): JSX.Element {
|
function TracesModulePage(): JSX.Element {
|
||||||
const { pathname } = useLocation();
|
const { pathname } = useLocation();
|
||||||
const { featureFlags } = useAppContext();
|
|
||||||
|
|
||||||
const isTraceFunnelsEnabled =
|
|
||||||
featureFlags?.find((flag) => flag.name === FeatureKeys.TRACE_FUNNELS)
|
|
||||||
?.active || false;
|
|
||||||
|
|
||||||
const routes: TabRoutes[] = [
|
const routes: TabRoutes[] = [
|
||||||
tracesExplorer,
|
tracesExplorer,
|
||||||
isTraceFunnelsEnabled ? tracesFunnel : null,
|
process.env.NODE_ENV === 'development' ? tracesFunnel : null,
|
||||||
tracesSaveView,
|
tracesSaveView,
|
||||||
].filter(Boolean) as TabRoutes[];
|
].filter(Boolean) as TabRoutes[];
|
||||||
|
|
||||||
|
|||||||
6
go.mod
6
go.mod
@@ -10,8 +10,7 @@ require (
|
|||||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
|
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
|
||||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||||
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1
|
github.com/SigNoz/signoz-otel-collector v0.111.16
|
||||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
|
||||||
github.com/antonmedv/expr v1.15.3
|
github.com/antonmedv/expr v1.15.3
|
||||||
github.com/cespare/xxhash/v2 v2.3.0
|
github.com/cespare/xxhash/v2 v2.3.0
|
||||||
github.com/coreos/go-oidc/v3 v3.11.0
|
github.com/coreos/go-oidc/v3 v3.11.0
|
||||||
@@ -90,9 +89,10 @@ require (
|
|||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
|
||||||
github.com/ClickHouse/ch-go v0.63.1 // indirect
|
github.com/ClickHouse/ch-go v0.61.5 // indirect
|
||||||
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
|
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
|
||||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||||
|
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
|
||||||
github.com/armon/go-metrics v0.4.1 // indirect
|
github.com/armon/go-metrics v0.4.1 // indirect
|
||||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||||
github.com/aws/aws-sdk-go v1.55.5 // indirect
|
github.com/aws/aws-sdk-go v1.55.5 // indirect
|
||||||
|
|||||||
12
go.sum
12
go.sum
@@ -85,8 +85,8 @@ github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mx
|
|||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
github.com/ClickHouse/ch-go v0.63.1 h1:s2JyZvWLTCSAGdtjMBBmAgQQHMco6pawLJMOXi0FODM=
|
github.com/ClickHouse/ch-go v0.61.5 h1:zwR8QbYI0tsMiEcze/uIMK+Tz1D3XZXLdNrlaOpeEI4=
|
||||||
github.com/ClickHouse/ch-go v0.63.1/go.mod h1:I1kJJCL3WJcBMGe1m+HVK0+nREaG+JOYYBWjrDrF3R0=
|
github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg=
|
||||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0 h1:AG4D/hW39qa58+JHQIFOSnxyL46H6h2lrmGGk17dhFo=
|
github.com/ClickHouse/clickhouse-go/v2 v2.30.0 h1:AG4D/hW39qa58+JHQIFOSnxyL46H6h2lrmGGk17dhFo=
|
||||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0/go.mod h1:i9ZQAojcayW3RsdCb3YR+n+wC2h65eJsZCscZ1Z1wyo=
|
github.com/ClickHouse/clickhouse-go/v2 v2.30.0/go.mod h1:i9ZQAojcayW3RsdCb3YR+n+wC2h65eJsZCscZ1Z1wyo=
|
||||||
github.com/Code-Hex/go-generics-cache v1.5.1 h1:6vhZGc5M7Y/YD8cIUcY8kcuQLB4cHR7U+0KMqAA0KcU=
|
github.com/Code-Hex/go-generics-cache v1.5.1 h1:6vhZGc5M7Y/YD8cIUcY8kcuQLB4cHR7U+0KMqAA0KcU=
|
||||||
@@ -100,8 +100,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
|
|||||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||||
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1 h1:ZpSNrOZBOH2iCJIPeER5X0mfxOe64yP3JRX7FzBNfwY=
|
github.com/SigNoz/signoz-otel-collector v0.111.16 h1:535uKH5Oux+35EsI+L3C6pnAP/Ye0PTCbVizXoL+VqE=
|
||||||
github.com/SigNoz/signoz-otel-collector v0.111.39-beta.1/go.mod h1:DCu/D+lqhsPNSGS4IMD+4gn7q06TGzOCKazSy+GURVc=
|
github.com/SigNoz/signoz-otel-collector v0.111.16/go.mod h1:HJ4m0LY1MPsuZmuRF7Ixb+bY8rxgRzI0VXzOedESsjg=
|
||||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
@@ -820,8 +820,8 @@ github.com/prometheus/prometheus v0.300.1/go.mod h1:gtTPY/XVyCdqqnjA3NzDMb0/nc5H
|
|||||||
github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4=
|
github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4=
|
||||||
github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||||
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||||
github.com/redis/go-redis/v9 v9.6.3 h1:8Dr5ygF1QFXRxIH/m3Xg9MMG1rS8YCtAgosrsewT6i0=
|
github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4=
|
||||||
github.com/redis/go-redis/v9 v9.6.3/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
|
github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA=
|
||||||
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
|
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
|
||||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||||
|
|||||||
@@ -25,6 +25,25 @@ type postableAlert struct {
|
|||||||
Receivers []string `json:"receivers"`
|
Receivers []string `json:"receivers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (pa *postableAlert) MarshalJSON() ([]byte, error) {
|
||||||
|
// Marshal the embedded PostableAlert to get its JSON representation.
|
||||||
|
alertJSON, err := json.Marshal(pa.PostableAlert)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal that JSON into a map so we can add extra fields.
|
||||||
|
var m map[string]interface{}
|
||||||
|
if err := json.Unmarshal(alertJSON, &m); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the Receivers field.
|
||||||
|
m["receivers"] = pa.Receivers
|
||||||
|
|
||||||
|
return json.Marshal(m)
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
alertsPath string = "/v1/alerts"
|
alertsPath string = "/v1/alerts"
|
||||||
routesPath string = "/v1/routes"
|
routesPath string = "/v1/routes"
|
||||||
|
|||||||
35
pkg/alertmanager/legacyalertmanager/provider_test.go
Normal file
35
pkg/alertmanager/legacyalertmanager/provider_test.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package legacyalertmanager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||||
|
"github.com/prometheus/alertmanager/api/v2/models"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestProvider_TestAlert(t *testing.T) {
|
||||||
|
pa := &postableAlert{
|
||||||
|
PostableAlert: &alertmanagertypes.PostableAlert{
|
||||||
|
Alert: models.Alert{
|
||||||
|
Labels: models.LabelSet{
|
||||||
|
"alertname": "test",
|
||||||
|
},
|
||||||
|
GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1",
|
||||||
|
},
|
||||||
|
Annotations: models.LabelSet{
|
||||||
|
"summary": "test",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Receivers: []string{"receiver1", "receiver2"},
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := json.Marshal(pa)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to marshal postable alert: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Contains(t, string(body), "receiver1")
|
||||||
|
assert.Contains(t, string(body), "receiver2")
|
||||||
|
}
|
||||||
@@ -3928,11 +3928,16 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
|
|||||||
var rows driver.Rows
|
var rows driver.Rows
|
||||||
var response v3.FilterAttributeKeyResponse
|
var response v3.FilterAttributeKeyResponse
|
||||||
|
|
||||||
|
tagTypeFilter := `tag_type != 'logfield'`
|
||||||
|
if req.TagType != "" {
|
||||||
|
tagTypeFilter = fmt.Sprintf(`tag_type != 'logfield' and tag_type = '%s'`, req.TagType)
|
||||||
|
}
|
||||||
|
|
||||||
if len(req.SearchText) != 0 {
|
if len(req.SearchText) != 0 {
|
||||||
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2)
|
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
|
||||||
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
|
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
|
||||||
} else {
|
} else {
|
||||||
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' limit $1", r.logsDB, r.logsTagAttributeTableV2)
|
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where %s limit $1", r.logsDB, r.logsTagAttributeTableV2, tagTypeFilter)
|
||||||
rows, err = r.db.Query(ctx, query, req.Limit)
|
rows, err = r.db.Query(ctx, query, req.Limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3967,7 +3972,9 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
|
|||||||
response.AttributeKeys = append(response.AttributeKeys, key)
|
response.AttributeKeys = append(response.AttributeKeys, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
// add other attributes
|
// add other attributes only when the tagType is not specified
|
||||||
|
// i.e retrieve all attributes
|
||||||
|
if req.TagType == "" {
|
||||||
for _, f := range constants.StaticFieldsLogsV3 {
|
for _, f := range constants.StaticFieldsLogsV3 {
|
||||||
if (v3.AttributeKey{} == f) {
|
if (v3.AttributeKey{} == f) {
|
||||||
continue
|
continue
|
||||||
@@ -3976,6 +3983,7 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
|
|||||||
response.AttributeKeys = append(response.AttributeKeys, f)
|
response.AttributeKeys = append(response.AttributeKeys, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &response, nil
|
return &response, nil
|
||||||
}
|
}
|
||||||
@@ -4715,7 +4723,12 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
|
|||||||
var rows driver.Rows
|
var rows driver.Rows
|
||||||
var response v3.FilterAttributeKeyResponse
|
var response v3.FilterAttributeKeyResponse
|
||||||
|
|
||||||
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and tag_type != 'spanfield' LIMIT $2", r.TraceDB, r.spanAttributeTableV2)
|
tagTypeFilter := `tag_type != 'spanfield'`
|
||||||
|
if req.TagType != "" {
|
||||||
|
tagTypeFilter = fmt.Sprintf(`tag_type != 'spanfield' and tag_type = '%s'`, req.TagType)
|
||||||
|
}
|
||||||
|
|
||||||
|
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and %s LIMIT $2", r.TraceDB, r.spanAttributeTableV2, tagTypeFilter)
|
||||||
|
|
||||||
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
|
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
|
||||||
|
|
||||||
@@ -4760,7 +4773,9 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
|
|||||||
fields = constants.DeprecatedStaticFieldsTraces
|
fields = constants.DeprecatedStaticFieldsTraces
|
||||||
}
|
}
|
||||||
|
|
||||||
// add the new static fields
|
// add the new static fields only when the tagType is not specified
|
||||||
|
// i.e retrieve all attributes
|
||||||
|
if req.TagType == "" {
|
||||||
for _, f := range fields {
|
for _, f := range fields {
|
||||||
if (v3.AttributeKey{} == f) {
|
if (v3.AttributeKey{} == f) {
|
||||||
continue
|
continue
|
||||||
@@ -4769,6 +4784,7 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
|
|||||||
response.AttributeKeys = append(response.AttributeKeys, f)
|
response.AttributeKeys = append(response.AttributeKeys, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &response, nil
|
return &response, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,68 +8,59 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/google/uuid"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
)
|
)
|
||||||
|
|
||||||
type cloudProviderAccountsRepository interface {
|
type cloudProviderAccountsRepository interface {
|
||||||
listConnected(ctx context.Context, cloudProvider string) ([]AccountRecord, *model.ApiError)
|
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
|
||||||
|
|
||||||
get(ctx context.Context, cloudProvider string, id string) (*AccountRecord, *model.ApiError)
|
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
|
||||||
|
|
||||||
getConnectedCloudAccount(
|
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
|
||||||
ctx context.Context, cloudProvider string, cloudAccountId string,
|
|
||||||
) (*AccountRecord, *model.ApiError)
|
|
||||||
|
|
||||||
// Insert an account or update it by (cloudProvider, id)
|
// Insert an account or update it by (cloudProvider, id)
|
||||||
// for specified non-empty fields
|
// for specified non-empty fields
|
||||||
upsert(
|
upsert(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
cloudProvider string,
|
orgId string,
|
||||||
|
provider string,
|
||||||
id *string,
|
id *string,
|
||||||
config *AccountConfig,
|
config *types.AccountConfig,
|
||||||
cloudAccountId *string,
|
accountId *string,
|
||||||
agentReport *AgentReport,
|
agentReport *types.AgentReport,
|
||||||
removedAt *time.Time,
|
removedAt *time.Time,
|
||||||
) (*AccountRecord, *model.ApiError)
|
) (*types.CloudIntegration, *model.ApiError)
|
||||||
}
|
}
|
||||||
|
|
||||||
func newCloudProviderAccountsRepository(db *sqlx.DB) (
|
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
|
||||||
*cloudProviderAccountsSQLRepository, error,
|
*cloudProviderAccountsSQLRepository, error,
|
||||||
) {
|
) {
|
||||||
return &cloudProviderAccountsSQLRepository{
|
return &cloudProviderAccountsSQLRepository{
|
||||||
db: db,
|
store: store,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type cloudProviderAccountsSQLRepository struct {
|
type cloudProviderAccountsSQLRepository struct {
|
||||||
db *sqlx.DB
|
store sqlstore.SQLStore
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *cloudProviderAccountsSQLRepository) listConnected(
|
func (r *cloudProviderAccountsSQLRepository) listConnected(
|
||||||
ctx context.Context, cloudProvider string,
|
ctx context.Context, orgId string, cloudProvider string,
|
||||||
) ([]AccountRecord, *model.ApiError) {
|
) ([]types.CloudIntegration, *model.ApiError) {
|
||||||
accounts := []AccountRecord{}
|
accounts := []types.CloudIntegration{}
|
||||||
|
|
||||||
|
err := r.store.BunDB().NewSelect().
|
||||||
|
Model(&accounts).
|
||||||
|
Where("org_id = ?", orgId).
|
||||||
|
Where("provider = ?", cloudProvider).
|
||||||
|
Where("removed_at is NULL").
|
||||||
|
Where("account_id is not NULL").
|
||||||
|
Where("last_agent_report is not NULL").
|
||||||
|
Order("created_at").
|
||||||
|
Scan(ctx)
|
||||||
|
|
||||||
err := r.db.SelectContext(
|
|
||||||
ctx, &accounts, `
|
|
||||||
select
|
|
||||||
cloud_provider,
|
|
||||||
id,
|
|
||||||
config_json,
|
|
||||||
cloud_account_id,
|
|
||||||
last_agent_report_json,
|
|
||||||
created_at,
|
|
||||||
removed_at
|
|
||||||
from cloud_integrations_accounts
|
|
||||||
where
|
|
||||||
cloud_provider=$1
|
|
||||||
and removed_at is NULL
|
|
||||||
and cloud_account_id is not NULL
|
|
||||||
and last_agent_report_json is not NULL
|
|
||||||
order by created_at
|
|
||||||
`, cloudProvider,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"could not query connected cloud accounts: %w", err,
|
"could not query connected cloud accounts: %w", err,
|
||||||
@@ -80,27 +71,16 @@ func (r *cloudProviderAccountsSQLRepository) listConnected(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *cloudProviderAccountsSQLRepository) get(
|
func (r *cloudProviderAccountsSQLRepository) get(
|
||||||
ctx context.Context, cloudProvider string, id string,
|
ctx context.Context, orgId string, provider string, id string,
|
||||||
) (*AccountRecord, *model.ApiError) {
|
) (*types.CloudIntegration, *model.ApiError) {
|
||||||
var result AccountRecord
|
var result types.CloudIntegration
|
||||||
|
|
||||||
err := r.db.GetContext(
|
err := r.store.BunDB().NewSelect().
|
||||||
ctx, &result, `
|
Model(&result).
|
||||||
select
|
Where("org_id = ?", orgId).
|
||||||
cloud_provider,
|
Where("provider = ?", provider).
|
||||||
id,
|
Where("id = ?", id).
|
||||||
config_json,
|
Scan(ctx)
|
||||||
cloud_account_id,
|
|
||||||
last_agent_report_json,
|
|
||||||
created_at,
|
|
||||||
removed_at
|
|
||||||
from cloud_integrations_accounts
|
|
||||||
where
|
|
||||||
cloud_provider=$1
|
|
||||||
and id=$2
|
|
||||||
`,
|
|
||||||
cloudProvider, id,
|
|
||||||
)
|
|
||||||
|
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
return nil, model.NotFoundError(fmt.Errorf(
|
return nil, model.NotFoundError(fmt.Errorf(
|
||||||
@@ -116,33 +96,22 @@ func (r *cloudProviderAccountsSQLRepository) get(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
||||||
ctx context.Context, cloudProvider string, cloudAccountId string,
|
ctx context.Context, orgId string, provider string, accountId string,
|
||||||
) (*AccountRecord, *model.ApiError) {
|
) (*types.CloudIntegration, *model.ApiError) {
|
||||||
var result AccountRecord
|
var result types.CloudIntegration
|
||||||
|
|
||||||
err := r.db.GetContext(
|
err := r.store.BunDB().NewSelect().
|
||||||
ctx, &result, `
|
Model(&result).
|
||||||
select
|
Where("org_id = ?", orgId).
|
||||||
cloud_provider,
|
Where("provider = ?", provider).
|
||||||
id,
|
Where("account_id = ?", accountId).
|
||||||
config_json,
|
Where("last_agent_report is not NULL").
|
||||||
cloud_account_id,
|
Where("removed_at is NULL").
|
||||||
last_agent_report_json,
|
Scan(ctx)
|
||||||
created_at,
|
|
||||||
removed_at
|
|
||||||
from cloud_integrations_accounts
|
|
||||||
where
|
|
||||||
cloud_provider=$1
|
|
||||||
and cloud_account_id=$2
|
|
||||||
and last_agent_report_json is not NULL
|
|
||||||
and removed_at is NULL
|
|
||||||
`,
|
|
||||||
cloudProvider, cloudAccountId,
|
|
||||||
)
|
|
||||||
|
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
return nil, model.NotFoundError(fmt.Errorf(
|
return nil, model.NotFoundError(fmt.Errorf(
|
||||||
"couldn't find connected cloud account %s", cloudAccountId,
|
"couldn't find connected cloud account %s", accountId,
|
||||||
))
|
))
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
@@ -155,17 +124,18 @@ func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
|
|||||||
|
|
||||||
func (r *cloudProviderAccountsSQLRepository) upsert(
|
func (r *cloudProviderAccountsSQLRepository) upsert(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
cloudProvider string,
|
orgId string,
|
||||||
|
provider string,
|
||||||
id *string,
|
id *string,
|
||||||
config *AccountConfig,
|
config *types.AccountConfig,
|
||||||
cloudAccountId *string,
|
accountId *string,
|
||||||
agentReport *AgentReport,
|
agentReport *types.AgentReport,
|
||||||
removedAt *time.Time,
|
removedAt *time.Time,
|
||||||
) (*AccountRecord, *model.ApiError) {
|
) (*types.CloudIntegration, *model.ApiError) {
|
||||||
// Insert
|
// Insert
|
||||||
if id == nil {
|
if id == nil {
|
||||||
newId := uuid.NewString()
|
temp := valuer.GenerateUUID().StringValue()
|
||||||
id = &newId
|
id = &temp
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare clause for setting values in `on conflict do update`
|
// Prepare clause for setting values in `on conflict do update`
|
||||||
@@ -176,19 +146,19 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
|||||||
|
|
||||||
if config != nil {
|
if config != nil {
|
||||||
onConflictSetStmts = append(
|
onConflictSetStmts = append(
|
||||||
onConflictSetStmts, setColStatement("config_json"),
|
onConflictSetStmts, setColStatement("config"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cloudAccountId != nil {
|
if accountId != nil {
|
||||||
onConflictSetStmts = append(
|
onConflictSetStmts = append(
|
||||||
onConflictSetStmts, setColStatement("cloud_account_id"),
|
onConflictSetStmts, setColStatement("account_id"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if agentReport != nil {
|
if agentReport != nil {
|
||||||
onConflictSetStmts = append(
|
onConflictSetStmts = append(
|
||||||
onConflictSetStmts, setColStatement("last_agent_report_json"),
|
onConflictSetStmts, setColStatement("last_agent_report"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -198,37 +168,45 @@ func (r *cloudProviderAccountsSQLRepository) upsert(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// set updated_at to current timestamp if it's an upsert
|
||||||
|
onConflictSetStmts = append(
|
||||||
|
onConflictSetStmts, setColStatement("updated_at"),
|
||||||
|
)
|
||||||
|
|
||||||
onConflictClause := ""
|
onConflictClause := ""
|
||||||
if len(onConflictSetStmts) > 0 {
|
if len(onConflictSetStmts) > 0 {
|
||||||
onConflictClause = fmt.Sprintf(
|
onConflictClause = fmt.Sprintf(
|
||||||
"on conflict(cloud_provider, id) do update SET\n%s",
|
"conflict(id, provider, org_id) do update SET\n%s",
|
||||||
strings.Join(onConflictSetStmts, ",\n"),
|
strings.Join(onConflictSetStmts, ",\n"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
insertQuery := fmt.Sprintf(`
|
integration := types.CloudIntegration{
|
||||||
INSERT INTO cloud_integrations_accounts (
|
OrgID: orgId,
|
||||||
cloud_provider,
|
Provider: provider,
|
||||||
id,
|
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
|
||||||
config_json,
|
TimeAuditable: types.TimeAuditable{
|
||||||
cloud_account_id,
|
CreatedAt: time.Now(),
|
||||||
last_agent_report_json,
|
UpdatedAt: time.Now(),
|
||||||
removed_at
|
},
|
||||||
) values ($1, $2, $3, $4, $5, $6)
|
Config: config,
|
||||||
%s`, onConflictClause,
|
AccountID: accountId,
|
||||||
)
|
LastAgentReport: agentReport,
|
||||||
|
RemovedAt: removedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, dbErr := r.store.BunDB().NewInsert().
|
||||||
|
Model(&integration).
|
||||||
|
On(onConflictClause).
|
||||||
|
Exec(ctx)
|
||||||
|
|
||||||
_, dbErr := r.db.ExecContext(
|
|
||||||
ctx, insertQuery,
|
|
||||||
cloudProvider, id, config, cloudAccountId, agentReport, removedAt,
|
|
||||||
)
|
|
||||||
if dbErr != nil {
|
if dbErr != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"could not upsert cloud account record: %w", dbErr,
|
"could not upsert cloud account record: %w", dbErr,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
upsertedAccount, apiErr := r.get(ctx, cloudProvider, *id)
|
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"couldn't fetch upserted account by id: %w", apiErr.ToError(),
|
"couldn't fetch upserted account by id: %w", apiErr.ToError(),
|
||||||
|
|||||||
@@ -33,12 +33,12 @@ type Controller struct {
|
|||||||
func NewController(sqlStore sqlstore.SQLStore) (
|
func NewController(sqlStore sqlstore.SQLStore) (
|
||||||
*Controller, error,
|
*Controller, error,
|
||||||
) {
|
) {
|
||||||
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore.SQLxDB())
|
accountsRepo, err := newCloudProviderAccountsRepository(sqlStore)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("couldn't create cloud provider accounts repo: %w", err)
|
return nil, fmt.Errorf("couldn't create cloud provider accounts repo: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
serviceConfigRepo, err := newServiceConfigRepository(sqlStore.SQLxDB())
|
serviceConfigRepo, err := newServiceConfigRepository(sqlStore)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("couldn't create cloud provider service config repo: %w", err)
|
return nil, fmt.Errorf("couldn't create cloud provider service config repo: %w", err)
|
||||||
}
|
}
|
||||||
@@ -49,19 +49,12 @@ func NewController(sqlStore sqlstore.SQLStore) (
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type Account struct {
|
|
||||||
Id string `json:"id"`
|
|
||||||
CloudAccountId string `json:"cloud_account_id"`
|
|
||||||
Config AccountConfig `json:"config"`
|
|
||||||
Status AccountStatus `json:"status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ConnectedAccountsListResponse struct {
|
type ConnectedAccountsListResponse struct {
|
||||||
Accounts []Account `json:"accounts"`
|
Accounts []types.Account `json:"accounts"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) ListConnectedAccounts(
|
func (c *Controller) ListConnectedAccounts(
|
||||||
ctx context.Context, cloudProvider string,
|
ctx context.Context, orgId string, cloudProvider string,
|
||||||
) (
|
) (
|
||||||
*ConnectedAccountsListResponse, *model.ApiError,
|
*ConnectedAccountsListResponse, *model.ApiError,
|
||||||
) {
|
) {
|
||||||
@@ -69,14 +62,14 @@ func (c *Controller) ListConnectedAccounts(
|
|||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
|
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgId, cloudProvider)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
|
return nil, model.WrapApiError(apiErr, "couldn't list cloud accounts")
|
||||||
}
|
}
|
||||||
|
|
||||||
connectedAccounts := []Account{}
|
connectedAccounts := []types.Account{}
|
||||||
for _, a := range accountRecords {
|
for _, a := range accountRecords {
|
||||||
connectedAccounts = append(connectedAccounts, a.account())
|
connectedAccounts = append(connectedAccounts, a.Account())
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ConnectedAccountsListResponse{
|
return &ConnectedAccountsListResponse{
|
||||||
@@ -88,7 +81,7 @@ type GenerateConnectionUrlRequest struct {
|
|||||||
// Optional. To be specified for updates.
|
// Optional. To be specified for updates.
|
||||||
AccountId *string `json:"account_id,omitempty"`
|
AccountId *string `json:"account_id,omitempty"`
|
||||||
|
|
||||||
AccountConfig AccountConfig `json:"account_config"`
|
AccountConfig types.AccountConfig `json:"account_config"`
|
||||||
|
|
||||||
AgentConfig SigNozAgentConfig `json:"agent_config"`
|
AgentConfig SigNozAgentConfig `json:"agent_config"`
|
||||||
}
|
}
|
||||||
@@ -109,7 +102,7 @@ type GenerateConnectionUrlResponse struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) GenerateConnectionUrl(
|
func (c *Controller) GenerateConnectionUrl(
|
||||||
ctx context.Context, cloudProvider string, req GenerateConnectionUrlRequest,
|
ctx context.Context, orgId string, cloudProvider string, req GenerateConnectionUrlRequest,
|
||||||
) (*GenerateConnectionUrlResponse, *model.ApiError) {
|
) (*GenerateConnectionUrlResponse, *model.ApiError) {
|
||||||
// Account connection with a simple connection URL may not be available for all providers.
|
// Account connection with a simple connection URL may not be available for all providers.
|
||||||
if cloudProvider != "aws" {
|
if cloudProvider != "aws" {
|
||||||
@@ -117,7 +110,7 @@ func (c *Controller) GenerateConnectionUrl(
|
|||||||
}
|
}
|
||||||
|
|
||||||
account, apiErr := c.accountsRepo.upsert(
|
account, apiErr := c.accountsRepo.upsert(
|
||||||
ctx, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
|
ctx, orgId, cloudProvider, req.AccountId, &req.AccountConfig, nil, nil, nil,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
||||||
@@ -135,7 +128,7 @@ func (c *Controller) GenerateConnectionUrl(
|
|||||||
"param_SigNozIntegrationAgentVersion": agentVersion,
|
"param_SigNozIntegrationAgentVersion": agentVersion,
|
||||||
"param_SigNozApiUrl": req.AgentConfig.SigNozAPIUrl,
|
"param_SigNozApiUrl": req.AgentConfig.SigNozAPIUrl,
|
||||||
"param_SigNozApiKey": req.AgentConfig.SigNozAPIKey,
|
"param_SigNozApiKey": req.AgentConfig.SigNozAPIKey,
|
||||||
"param_SigNozAccountId": account.Id,
|
"param_SigNozAccountId": account.ID.StringValue(),
|
||||||
"param_IngestionUrl": req.AgentConfig.IngestionUrl,
|
"param_IngestionUrl": req.AgentConfig.IngestionUrl,
|
||||||
"param_IngestionKey": req.AgentConfig.IngestionKey,
|
"param_IngestionKey": req.AgentConfig.IngestionKey,
|
||||||
"stackName": "signoz-integration",
|
"stackName": "signoz-integration",
|
||||||
@@ -148,7 +141,7 @@ func (c *Controller) GenerateConnectionUrl(
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &GenerateConnectionUrlResponse{
|
return &GenerateConnectionUrlResponse{
|
||||||
AccountId: account.Id,
|
AccountId: account.ID.StringValue(),
|
||||||
ConnectionUrl: connectionUrl,
|
ConnectionUrl: connectionUrl,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
@@ -156,11 +149,11 @@ func (c *Controller) GenerateConnectionUrl(
|
|||||||
type AccountStatusResponse struct {
|
type AccountStatusResponse struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
CloudAccountId *string `json:"cloud_account_id,omitempty"`
|
||||||
Status AccountStatus `json:"status"`
|
Status types.AccountStatus `json:"status"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) GetAccountStatus(
|
func (c *Controller) GetAccountStatus(
|
||||||
ctx context.Context, cloudProvider string, accountId string,
|
ctx context.Context, orgId string, cloudProvider string, accountId string,
|
||||||
) (
|
) (
|
||||||
*AccountStatusResponse, *model.ApiError,
|
*AccountStatusResponse, *model.ApiError,
|
||||||
) {
|
) {
|
||||||
@@ -168,23 +161,23 @@ func (c *Controller) GetAccountStatus(
|
|||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
|
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
resp := AccountStatusResponse{
|
resp := AccountStatusResponse{
|
||||||
Id: account.Id,
|
Id: account.ID.StringValue(),
|
||||||
CloudAccountId: account.CloudAccountId,
|
CloudAccountId: account.AccountID,
|
||||||
Status: account.status(),
|
Status: account.Status(),
|
||||||
}
|
}
|
||||||
|
|
||||||
return &resp, nil
|
return &resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type AgentCheckInRequest struct {
|
type AgentCheckInRequest struct {
|
||||||
AccountId string `json:"account_id"`
|
ID string `json:"account_id"`
|
||||||
CloudAccountId string `json:"cloud_account_id"`
|
AccountID string `json:"cloud_account_id"`
|
||||||
// Arbitrary cloud specific Agent data
|
// Arbitrary cloud specific Agent data
|
||||||
Data map[string]any `json:"data,omitempty"`
|
Data map[string]any `json:"data,omitempty"`
|
||||||
}
|
}
|
||||||
@@ -204,35 +197,35 @@ type IntegrationConfigForAgent struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) CheckInAsAgent(
|
func (c *Controller) CheckInAsAgent(
|
||||||
ctx context.Context, cloudProvider string, req AgentCheckInRequest,
|
ctx context.Context, orgId string, cloudProvider string, req AgentCheckInRequest,
|
||||||
) (*AgentCheckInResponse, *model.ApiError) {
|
) (*AgentCheckInResponse, *model.ApiError) {
|
||||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
existingAccount, apiErr := c.accountsRepo.get(ctx, cloudProvider, req.AccountId)
|
existingAccount, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, req.ID)
|
||||||
if existingAccount != nil && existingAccount.CloudAccountId != nil && *existingAccount.CloudAccountId != req.CloudAccountId {
|
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
|
||||||
return nil, model.BadRequest(fmt.Errorf(
|
return nil, model.BadRequest(fmt.Errorf(
|
||||||
"can't check in with new %s account id %s for account %s with existing %s id %s",
|
"can't check in with new %s account id %s for account %s with existing %s id %s",
|
||||||
cloudProvider, req.CloudAccountId, existingAccount.Id, cloudProvider, *existingAccount.CloudAccountId,
|
cloudProvider, req.AccountID, existingAccount.ID.StringValue(), cloudProvider, *existingAccount.AccountID,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, cloudProvider, req.CloudAccountId)
|
existingAccount, apiErr = c.accountsRepo.getConnectedCloudAccount(ctx, orgId, cloudProvider, req.AccountID)
|
||||||
if existingAccount != nil && existingAccount.Id != req.AccountId {
|
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
|
||||||
return nil, model.BadRequest(fmt.Errorf(
|
return nil, model.BadRequest(fmt.Errorf(
|
||||||
"can't check in to %s account %s with id %s. already connected with id %s",
|
"can't check in to %s account %s with id %s. already connected with id %s",
|
||||||
cloudProvider, req.CloudAccountId, req.AccountId, existingAccount.Id,
|
cloudProvider, req.AccountID, req.ID, existingAccount.ID.StringValue(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
agentReport := AgentReport{
|
agentReport := types.AgentReport{
|
||||||
TimestampMillis: time.Now().UnixMilli(),
|
TimestampMillis: time.Now().UnixMilli(),
|
||||||
Data: req.Data,
|
Data: req.Data,
|
||||||
}
|
}
|
||||||
|
|
||||||
account, apiErr := c.accountsRepo.upsert(
|
account, apiErr := c.accountsRepo.upsert(
|
||||||
ctx, cloudProvider, &req.AccountId, nil, &req.CloudAccountId, &agentReport, nil,
|
ctx, orgId, cloudProvider, &req.ID, nil, &req.AccountID, &agentReport, nil,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
||||||
@@ -265,7 +258,7 @@ func (c *Controller) CheckInAsAgent(
|
|||||||
}
|
}
|
||||||
|
|
||||||
svcConfigs, apiErr := c.serviceConfigRepo.getAllForAccount(
|
svcConfigs, apiErr := c.serviceConfigRepo.getAllForAccount(
|
||||||
ctx, cloudProvider, *account.CloudAccountId,
|
ctx, orgId, account.ID.StringValue(),
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
@@ -298,54 +291,55 @@ func (c *Controller) CheckInAsAgent(
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &AgentCheckInResponse{
|
return &AgentCheckInResponse{
|
||||||
AccountId: account.Id,
|
AccountId: account.ID.StringValue(),
|
||||||
CloudAccountId: *account.CloudAccountId,
|
CloudAccountId: *account.AccountID,
|
||||||
RemovedAt: account.RemovedAt,
|
RemovedAt: account.RemovedAt,
|
||||||
IntegrationConfig: agentConfig,
|
IntegrationConfig: agentConfig,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type UpdateAccountConfigRequest struct {
|
type UpdateAccountConfigRequest struct {
|
||||||
Config AccountConfig `json:"config"`
|
Config types.AccountConfig `json:"config"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) UpdateAccountConfig(
|
func (c *Controller) UpdateAccountConfig(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
cloudProvider string,
|
cloudProvider string,
|
||||||
accountId string,
|
accountId string,
|
||||||
req UpdateAccountConfigRequest,
|
req UpdateAccountConfigRequest,
|
||||||
) (*Account, *model.ApiError) {
|
) (*types.Account, *model.ApiError) {
|
||||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
accountRecord, apiErr := c.accountsRepo.upsert(
|
accountRecord, apiErr := c.accountsRepo.upsert(
|
||||||
ctx, cloudProvider, &accountId, &req.Config, nil, nil, nil,
|
ctx, orgId, cloudProvider, &accountId, &req.Config, nil, nil, nil,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
return nil, model.WrapApiError(apiErr, "couldn't upsert cloud account")
|
||||||
}
|
}
|
||||||
|
|
||||||
account := accountRecord.account()
|
account := accountRecord.Account()
|
||||||
|
|
||||||
return &account, nil
|
return &account, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) DisconnectAccount(
|
func (c *Controller) DisconnectAccount(
|
||||||
ctx context.Context, cloudProvider string, accountId string,
|
ctx context.Context, orgId string, cloudProvider string, accountId string,
|
||||||
) (*AccountRecord, *model.ApiError) {
|
) (*types.CloudIntegration, *model.ApiError) {
|
||||||
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
if apiErr := validateCloudProviderName(cloudProvider); apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
account, apiErr := c.accountsRepo.get(ctx, cloudProvider, accountId)
|
account, apiErr := c.accountsRepo.get(ctx, orgId, cloudProvider, accountId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
|
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
|
||||||
}
|
}
|
||||||
|
|
||||||
tsNow := time.Now()
|
tsNow := time.Now()
|
||||||
account, apiErr = c.accountsRepo.upsert(
|
account, apiErr = c.accountsRepo.upsert(
|
||||||
ctx, cloudProvider, &accountId, nil, nil, nil, &tsNow,
|
ctx, orgId, cloudProvider, &accountId, nil, nil, nil, &tsNow,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
|
return nil, model.WrapApiError(apiErr, "couldn't disconnect account")
|
||||||
@@ -360,6 +354,7 @@ type ListServicesResponse struct {
|
|||||||
|
|
||||||
func (c *Controller) ListServices(
|
func (c *Controller) ListServices(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgID string,
|
||||||
cloudProvider string,
|
cloudProvider string,
|
||||||
cloudAccountId *string,
|
cloudAccountId *string,
|
||||||
) (*ListServicesResponse, *model.ApiError) {
|
) (*ListServicesResponse, *model.ApiError) {
|
||||||
@@ -373,10 +368,16 @@ func (c *Controller) ListServices(
|
|||||||
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
|
return nil, model.WrapApiError(apiErr, "couldn't list cloud services")
|
||||||
}
|
}
|
||||||
|
|
||||||
svcConfigs := map[string]*CloudServiceConfig{}
|
svcConfigs := map[string]*types.CloudServiceConfig{}
|
||||||
if cloudAccountId != nil {
|
if cloudAccountId != nil {
|
||||||
|
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
||||||
|
ctx, orgID, cloudProvider, *cloudAccountId,
|
||||||
|
)
|
||||||
|
if apiErr != nil {
|
||||||
|
return nil, model.WrapApiError(apiErr, "couldn't get active account")
|
||||||
|
}
|
||||||
svcConfigs, apiErr = c.serviceConfigRepo.getAllForAccount(
|
svcConfigs, apiErr = c.serviceConfigRepo.getAllForAccount(
|
||||||
ctx, cloudProvider, *cloudAccountId,
|
ctx, orgID, activeAccount.ID.StringValue(),
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
@@ -400,6 +401,7 @@ func (c *Controller) ListServices(
|
|||||||
|
|
||||||
func (c *Controller) GetServiceDetails(
|
func (c *Controller) GetServiceDetails(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgID string,
|
||||||
cloudProvider string,
|
cloudProvider string,
|
||||||
serviceId string,
|
serviceId string,
|
||||||
cloudAccountId *string,
|
cloudAccountId *string,
|
||||||
@@ -415,8 +417,16 @@ func (c *Controller) GetServiceDetails(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if cloudAccountId != nil {
|
if cloudAccountId != nil {
|
||||||
|
|
||||||
|
activeAccount, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
||||||
|
ctx, orgID, cloudProvider, *cloudAccountId,
|
||||||
|
)
|
||||||
|
if apiErr != nil {
|
||||||
|
return nil, model.WrapApiError(apiErr, "couldn't get active account")
|
||||||
|
}
|
||||||
|
|
||||||
config, apiErr := c.serviceConfigRepo.get(
|
config, apiErr := c.serviceConfigRepo.get(
|
||||||
ctx, cloudProvider, *cloudAccountId, serviceId,
|
ctx, orgID, activeAccount.ID.StringValue(), serviceId,
|
||||||
)
|
)
|
||||||
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
|
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't fetch service config")
|
return nil, model.WrapApiError(apiErr, "couldn't fetch service config")
|
||||||
@@ -425,15 +435,22 @@ func (c *Controller) GetServiceDetails(
|
|||||||
if config != nil {
|
if config != nil {
|
||||||
service.Config = config
|
service.Config = config
|
||||||
|
|
||||||
|
enabled := false
|
||||||
if config.Metrics != nil && config.Metrics.Enabled {
|
if config.Metrics != nil && config.Metrics.Enabled {
|
||||||
|
enabled = true
|
||||||
|
}
|
||||||
|
|
||||||
// add links to service dashboards, making them clickable.
|
// add links to service dashboards, making them clickable.
|
||||||
for i, d := range service.Assets.Dashboards {
|
for i, d := range service.Assets.Dashboards {
|
||||||
dashboardUuid := c.dashboardUuid(
|
dashboardUuid := c.dashboardUuid(
|
||||||
cloudProvider, serviceId, d.Id,
|
cloudProvider, serviceId, d.Id,
|
||||||
)
|
)
|
||||||
|
if enabled {
|
||||||
service.Assets.Dashboards[i].Url = fmt.Sprintf(
|
service.Assets.Dashboards[i].Url = fmt.Sprintf(
|
||||||
"/dashboard/%s", dashboardUuid,
|
"/dashboard/%s", dashboardUuid,
|
||||||
)
|
)
|
||||||
|
} else {
|
||||||
|
service.Assets.Dashboards[i].Url = ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -444,16 +461,17 @@ func (c *Controller) GetServiceDetails(
|
|||||||
|
|
||||||
type UpdateServiceConfigRequest struct {
|
type UpdateServiceConfigRequest struct {
|
||||||
CloudAccountId string `json:"cloud_account_id"`
|
CloudAccountId string `json:"cloud_account_id"`
|
||||||
Config CloudServiceConfig `json:"config"`
|
Config types.CloudServiceConfig `json:"config"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type UpdateServiceConfigResponse struct {
|
type UpdateServiceConfigResponse struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
Config CloudServiceConfig `json:"config"`
|
Config types.CloudServiceConfig `json:"config"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) UpdateServiceConfig(
|
func (c *Controller) UpdateServiceConfig(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgID string,
|
||||||
cloudProvider string,
|
cloudProvider string,
|
||||||
serviceId string,
|
serviceId string,
|
||||||
req UpdateServiceConfigRequest,
|
req UpdateServiceConfigRequest,
|
||||||
@@ -465,7 +483,7 @@ func (c *Controller) UpdateServiceConfig(
|
|||||||
|
|
||||||
// can only update config for a connected cloud account id
|
// can only update config for a connected cloud account id
|
||||||
_, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
_, apiErr := c.accountsRepo.getConnectedCloudAccount(
|
||||||
ctx, cloudProvider, req.CloudAccountId,
|
ctx, orgID, cloudProvider, req.CloudAccountId,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't find connected cloud account")
|
return nil, model.WrapApiError(apiErr, "couldn't find connected cloud account")
|
||||||
@@ -478,7 +496,7 @@ func (c *Controller) UpdateServiceConfig(
|
|||||||
}
|
}
|
||||||
|
|
||||||
updatedConfig, apiErr := c.serviceConfigRepo.upsert(
|
updatedConfig, apiErr := c.serviceConfigRepo.upsert(
|
||||||
ctx, cloudProvider, req.CloudAccountId, serviceId, req.Config,
|
ctx, orgID, cloudProvider, req.CloudAccountId, serviceId, req.Config,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't update service config")
|
return nil, model.WrapApiError(apiErr, "couldn't update service config")
|
||||||
@@ -492,13 +510,13 @@ func (c *Controller) UpdateServiceConfig(
|
|||||||
|
|
||||||
// All dashboards that are available based on cloud integrations configuration
|
// All dashboards that are available based on cloud integrations configuration
|
||||||
// across all cloud providers
|
// across all cloud providers
|
||||||
func (c *Controller) AvailableDashboards(ctx context.Context) (
|
func (c *Controller) AvailableDashboards(ctx context.Context, orgId string) (
|
||||||
[]types.Dashboard, *model.ApiError,
|
[]types.Dashboard, *model.ApiError,
|
||||||
) {
|
) {
|
||||||
allDashboards := []types.Dashboard{}
|
allDashboards := []types.Dashboard{}
|
||||||
|
|
||||||
for _, provider := range []string{"aws"} {
|
for _, provider := range []string{"aws"} {
|
||||||
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, provider)
|
providerDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, provider)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
apiErr, fmt.Sprintf("couldn't get available dashboards for %s", provider),
|
apiErr, fmt.Sprintf("couldn't get available dashboards for %s", provider),
|
||||||
@@ -512,10 +530,10 @@ func (c *Controller) AvailableDashboards(ctx context.Context) (
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) AvailableDashboardsForCloudProvider(
|
func (c *Controller) AvailableDashboardsForCloudProvider(
|
||||||
ctx context.Context, cloudProvider string,
|
ctx context.Context, orgID string, cloudProvider string,
|
||||||
) ([]types.Dashboard, *model.ApiError) {
|
) ([]types.Dashboard, *model.ApiError) {
|
||||||
|
|
||||||
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, cloudProvider)
|
accountRecords, apiErr := c.accountsRepo.listConnected(ctx, orgID, cloudProvider)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, "couldn't list connected cloud accounts")
|
return nil, model.WrapApiError(apiErr, "couldn't list connected cloud accounts")
|
||||||
}
|
}
|
||||||
@@ -524,9 +542,9 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
|
|||||||
servicesWithAvailableMetrics := map[string]*time.Time{}
|
servicesWithAvailableMetrics := map[string]*time.Time{}
|
||||||
|
|
||||||
for _, ar := range accountRecords {
|
for _, ar := range accountRecords {
|
||||||
if ar.CloudAccountId != nil {
|
if ar.AccountID != nil {
|
||||||
configsBySvcId, apiErr := c.serviceConfigRepo.getAllForAccount(
|
configsBySvcId, apiErr := c.serviceConfigRepo.getAllForAccount(
|
||||||
ctx, cloudProvider, *ar.CloudAccountId,
|
ctx, orgID, ar.ID.StringValue(),
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
@@ -574,6 +592,7 @@ func (c *Controller) AvailableDashboardsForCloudProvider(
|
|||||||
}
|
}
|
||||||
func (c *Controller) GetDashboardById(
|
func (c *Controller) GetDashboardById(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
dashboardUuid string,
|
dashboardUuid string,
|
||||||
) (*types.Dashboard, *model.ApiError) {
|
) (*types.Dashboard, *model.ApiError) {
|
||||||
cloudProvider, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
|
cloudProvider, _, _, apiErr := c.parseDashboardUuid(dashboardUuid)
|
||||||
@@ -581,7 +600,7 @@ func (c *Controller) GetDashboardById(
|
|||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, cloudProvider)
|
allDashboards, apiErr := c.AvailableDashboardsForCloudProvider(ctx, orgId, cloudProvider)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
apiErr, fmt.Sprintf("couldn't list available dashboards"),
|
apiErr, fmt.Sprintf("couldn't list available dashboards"),
|
||||||
|
|||||||
@@ -4,23 +4,30 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||||
|
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||||
|
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
||||||
require := require.New(t)
|
require := require.New(t)
|
||||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||||
controller, err := NewController(sqlStore)
|
controller, err := NewController(sqlStore)
|
||||||
require.NoError(err)
|
require.NoError(err)
|
||||||
|
|
||||||
|
user, apiErr := createTestUser()
|
||||||
|
require.Nil(apiErr)
|
||||||
|
|
||||||
// should be able to generate connection url for
|
// should be able to generate connection url for
|
||||||
// same account id again with updated config
|
// same account id again with updated config
|
||||||
testAccountConfig1 := AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
|
testAccountConfig1 := types.AccountConfig{EnabledRegions: []string{"us-east-1", "us-west-1"}}
|
||||||
resp1, apiErr := controller.GenerateConnectionUrl(
|
resp1, apiErr := controller.GenerateConnectionUrl(
|
||||||
context.TODO(), "aws", GenerateConnectionUrlRequest{
|
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
|
||||||
AccountConfig: testAccountConfig1,
|
AccountConfig: testAccountConfig1,
|
||||||
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
|
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
|
||||||
},
|
},
|
||||||
@@ -31,14 +38,14 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
|||||||
|
|
||||||
testAccountId := resp1.AccountId
|
testAccountId := resp1.AccountId
|
||||||
account, apiErr := controller.accountsRepo.get(
|
account, apiErr := controller.accountsRepo.get(
|
||||||
context.TODO(), "aws", testAccountId,
|
context.TODO(), user.OrgID, "aws", testAccountId,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(testAccountConfig1, *account.Config)
|
require.Equal(testAccountConfig1, *account.Config)
|
||||||
|
|
||||||
testAccountConfig2 := AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
|
testAccountConfig2 := types.AccountConfig{EnabledRegions: []string{"us-east-2", "us-west-2"}}
|
||||||
resp2, apiErr := controller.GenerateConnectionUrl(
|
resp2, apiErr := controller.GenerateConnectionUrl(
|
||||||
context.TODO(), "aws", GenerateConnectionUrlRequest{
|
context.TODO(), user.OrgID, "aws", GenerateConnectionUrlRequest{
|
||||||
AccountId: &testAccountId,
|
AccountId: &testAccountId,
|
||||||
AccountConfig: testAccountConfig2,
|
AccountConfig: testAccountConfig2,
|
||||||
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
|
AgentConfig: SigNozAgentConfig{Region: "us-east-2"},
|
||||||
@@ -48,7 +55,7 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
|||||||
require.Equal(testAccountId, resp2.AccountId)
|
require.Equal(testAccountId, resp2.AccountId)
|
||||||
|
|
||||||
account, apiErr = controller.accountsRepo.get(
|
account, apiErr = controller.accountsRepo.get(
|
||||||
context.TODO(), "aws", testAccountId,
|
context.TODO(), user.OrgID, "aws", testAccountId,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(testAccountConfig2, *account.Config)
|
require.Equal(testAccountConfig2, *account.Config)
|
||||||
@@ -56,18 +63,21 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) {
|
|||||||
|
|
||||||
func TestAgentCheckIns(t *testing.T) {
|
func TestAgentCheckIns(t *testing.T) {
|
||||||
require := require.New(t)
|
require := require.New(t)
|
||||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||||
controller, err := NewController(sqlStore)
|
controller, err := NewController(sqlStore)
|
||||||
require.NoError(err)
|
require.NoError(err)
|
||||||
|
|
||||||
|
user, apiErr := createTestUser()
|
||||||
|
require.Nil(apiErr)
|
||||||
|
|
||||||
// An agent should be able to check in from a cloud account even
|
// An agent should be able to check in from a cloud account even
|
||||||
// if no connection url was requested (no account with agent's account id exists)
|
// if no connection url was requested (no account with agent's account id exists)
|
||||||
testAccountId1 := uuid.NewString()
|
testAccountId1 := uuid.NewString()
|
||||||
testCloudAccountId1 := "546311234"
|
testCloudAccountId1 := "546311234"
|
||||||
resp1, apiErr := controller.CheckInAsAgent(
|
resp1, apiErr := controller.CheckInAsAgent(
|
||||||
context.TODO(), "aws", AgentCheckInRequest{
|
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||||
AccountId: testAccountId1,
|
ID: testAccountId1,
|
||||||
CloudAccountId: testCloudAccountId1,
|
AccountID: testCloudAccountId1,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
@@ -78,9 +88,9 @@ func TestAgentCheckIns(t *testing.T) {
|
|||||||
// cloud account id for the same account.
|
// cloud account id for the same account.
|
||||||
testCloudAccountId2 := "99999999"
|
testCloudAccountId2 := "99999999"
|
||||||
_, apiErr = controller.CheckInAsAgent(
|
_, apiErr = controller.CheckInAsAgent(
|
||||||
context.TODO(), "aws", AgentCheckInRequest{
|
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||||
AccountId: testAccountId1,
|
ID: testAccountId1,
|
||||||
CloudAccountId: testCloudAccountId2,
|
AccountID: testCloudAccountId2,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.NotNil(apiErr)
|
require.NotNil(apiErr)
|
||||||
@@ -90,18 +100,18 @@ func TestAgentCheckIns(t *testing.T) {
|
|||||||
// i.e. there can't be 2 connected account records for the same cloud account id
|
// i.e. there can't be 2 connected account records for the same cloud account id
|
||||||
// at any point in time.
|
// at any point in time.
|
||||||
existingConnected, apiErr := controller.accountsRepo.getConnectedCloudAccount(
|
existingConnected, apiErr := controller.accountsRepo.getConnectedCloudAccount(
|
||||||
context.TODO(), "aws", testCloudAccountId1,
|
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.NotNil(existingConnected)
|
require.NotNil(existingConnected)
|
||||||
require.Equal(testCloudAccountId1, *existingConnected.CloudAccountId)
|
require.Equal(testCloudAccountId1, *existingConnected.AccountID)
|
||||||
require.Nil(existingConnected.RemovedAt)
|
require.Nil(existingConnected.RemovedAt)
|
||||||
|
|
||||||
testAccountId2 := uuid.NewString()
|
testAccountId2 := uuid.NewString()
|
||||||
_, apiErr = controller.CheckInAsAgent(
|
_, apiErr = controller.CheckInAsAgent(
|
||||||
context.TODO(), "aws", AgentCheckInRequest{
|
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||||
AccountId: testAccountId2,
|
ID: testAccountId2,
|
||||||
CloudAccountId: testCloudAccountId1,
|
AccountID: testCloudAccountId1,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.NotNil(apiErr)
|
require.NotNil(apiErr)
|
||||||
@@ -109,29 +119,29 @@ func TestAgentCheckIns(t *testing.T) {
|
|||||||
// After disconnecting existing account record, the agent should be able to
|
// After disconnecting existing account record, the agent should be able to
|
||||||
// connected for a particular cloud account id
|
// connected for a particular cloud account id
|
||||||
_, apiErr = controller.DisconnectAccount(
|
_, apiErr = controller.DisconnectAccount(
|
||||||
context.TODO(), "aws", testAccountId1,
|
context.TODO(), user.OrgID, "aws", testAccountId1,
|
||||||
)
|
)
|
||||||
|
|
||||||
existingConnected, apiErr = controller.accountsRepo.getConnectedCloudAccount(
|
existingConnected, apiErr = controller.accountsRepo.getConnectedCloudAccount(
|
||||||
context.TODO(), "aws", testCloudAccountId1,
|
context.TODO(), user.OrgID, "aws", testCloudAccountId1,
|
||||||
)
|
)
|
||||||
require.Nil(existingConnected)
|
require.Nil(existingConnected)
|
||||||
require.NotNil(apiErr)
|
require.NotNil(apiErr)
|
||||||
require.Equal(model.ErrorNotFound, apiErr.Type())
|
require.Equal(model.ErrorNotFound, apiErr.Type())
|
||||||
|
|
||||||
_, apiErr = controller.CheckInAsAgent(
|
_, apiErr = controller.CheckInAsAgent(
|
||||||
context.TODO(), "aws", AgentCheckInRequest{
|
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||||
AccountId: testAccountId2,
|
ID: testAccountId2,
|
||||||
CloudAccountId: testCloudAccountId1,
|
AccountID: testCloudAccountId1,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
|
|
||||||
// should be able to keep checking in
|
// should be able to keep checking in
|
||||||
_, apiErr = controller.CheckInAsAgent(
|
_, apiErr = controller.CheckInAsAgent(
|
||||||
context.TODO(), "aws", AgentCheckInRequest{
|
context.TODO(), user.OrgID, "aws", AgentCheckInRequest{
|
||||||
AccountId: testAccountId2,
|
ID: testAccountId2,
|
||||||
CloudAccountId: testCloudAccountId1,
|
AccountID: testCloudAccountId1,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
@@ -139,13 +149,16 @@ func TestAgentCheckIns(t *testing.T) {
|
|||||||
|
|
||||||
func TestCantDisconnectNonExistentAccount(t *testing.T) {
|
func TestCantDisconnectNonExistentAccount(t *testing.T) {
|
||||||
require := require.New(t)
|
require := require.New(t)
|
||||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||||
controller, err := NewController(sqlStore)
|
controller, err := NewController(sqlStore)
|
||||||
require.NoError(err)
|
require.NoError(err)
|
||||||
|
|
||||||
|
user, apiErr := createTestUser()
|
||||||
|
require.Nil(apiErr)
|
||||||
|
|
||||||
// Attempting to disconnect a non-existent account should return error
|
// Attempting to disconnect a non-existent account should return error
|
||||||
account, apiErr := controller.DisconnectAccount(
|
account, apiErr := controller.DisconnectAccount(
|
||||||
context.TODO(), "aws", uuid.NewString(),
|
context.TODO(), user.OrgID, "aws", uuid.NewString(),
|
||||||
)
|
)
|
||||||
require.NotNil(apiErr)
|
require.NotNil(apiErr)
|
||||||
require.Equal(model.ErrorNotFound, apiErr.Type())
|
require.Equal(model.ErrorNotFound, apiErr.Type())
|
||||||
@@ -154,15 +167,23 @@ func TestCantDisconnectNonExistentAccount(t *testing.T) {
|
|||||||
|
|
||||||
func TestConfigureService(t *testing.T) {
|
func TestConfigureService(t *testing.T) {
|
||||||
require := require.New(t)
|
require := require.New(t)
|
||||||
sqlStore, _ := utils.NewTestSqliteDB(t)
|
sqlStore := utils.NewQueryServiceDBForTests(t)
|
||||||
controller, err := NewController(sqlStore)
|
controller, err := NewController(sqlStore)
|
||||||
require.NoError(err)
|
require.NoError(err)
|
||||||
|
|
||||||
|
user, apiErr := createTestUser()
|
||||||
|
require.Nil(apiErr)
|
||||||
|
|
||||||
|
// create a connected account
|
||||||
testCloudAccountId := "546311234"
|
testCloudAccountId := "546311234"
|
||||||
|
testConnectedAccount := makeTestConnectedAccount(t, user.OrgID, controller, testCloudAccountId)
|
||||||
|
require.Nil(testConnectedAccount.RemovedAt)
|
||||||
|
require.NotEmpty(testConnectedAccount.AccountID)
|
||||||
|
require.Equal(testCloudAccountId, *testConnectedAccount.AccountID)
|
||||||
|
|
||||||
// should start out without any service config
|
// should start out without any service config
|
||||||
svcListResp, apiErr := controller.ListServices(
|
svcListResp, apiErr := controller.ListServices(
|
||||||
context.TODO(), "aws", &testCloudAccountId,
|
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
|
|
||||||
@@ -170,25 +191,20 @@ func TestConfigureService(t *testing.T) {
|
|||||||
require.Nil(svcListResp.Services[0].Config)
|
require.Nil(svcListResp.Services[0].Config)
|
||||||
|
|
||||||
svcDetails, apiErr := controller.GetServiceDetails(
|
svcDetails, apiErr := controller.GetServiceDetails(
|
||||||
context.TODO(), "aws", testSvcId, &testCloudAccountId,
|
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(testSvcId, svcDetails.Id)
|
require.Equal(testSvcId, svcDetails.Id)
|
||||||
require.Nil(svcDetails.Config)
|
require.Nil(svcDetails.Config)
|
||||||
|
|
||||||
// should be able to configure a service for a connected account
|
// should be able to configure a service for a connected account
|
||||||
testConnectedAccount := makeTestConnectedAccount(t, controller, testCloudAccountId)
|
testSvcConfig := types.CloudServiceConfig{
|
||||||
require.Nil(testConnectedAccount.RemovedAt)
|
Metrics: &types.CloudServiceMetricsConfig{
|
||||||
require.NotNil(testConnectedAccount.CloudAccountId)
|
|
||||||
require.Equal(testCloudAccountId, *testConnectedAccount.CloudAccountId)
|
|
||||||
|
|
||||||
testSvcConfig := CloudServiceConfig{
|
|
||||||
Metrics: &CloudServiceMetricsConfig{
|
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
updateSvcConfigResp, apiErr := controller.UpdateServiceConfig(
|
updateSvcConfigResp, apiErr := controller.UpdateServiceConfig(
|
||||||
context.TODO(), "aws", testSvcId, UpdateServiceConfigRequest{
|
context.TODO(), user.OrgID, "aws", testSvcId, UpdateServiceConfigRequest{
|
||||||
CloudAccountId: testCloudAccountId,
|
CloudAccountId: testCloudAccountId,
|
||||||
Config: testSvcConfig,
|
Config: testSvcConfig,
|
||||||
},
|
},
|
||||||
@@ -198,14 +214,14 @@ func TestConfigureService(t *testing.T) {
|
|||||||
require.Equal(testSvcConfig, updateSvcConfigResp.Config)
|
require.Equal(testSvcConfig, updateSvcConfigResp.Config)
|
||||||
|
|
||||||
svcDetails, apiErr = controller.GetServiceDetails(
|
svcDetails, apiErr = controller.GetServiceDetails(
|
||||||
context.TODO(), "aws", testSvcId, &testCloudAccountId,
|
context.TODO(), user.OrgID, "aws", testSvcId, &testCloudAccountId,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(testSvcId, svcDetails.Id)
|
require.Equal(testSvcId, svcDetails.Id)
|
||||||
require.Equal(testSvcConfig, *svcDetails.Config)
|
require.Equal(testSvcConfig, *svcDetails.Config)
|
||||||
|
|
||||||
svcListResp, apiErr = controller.ListServices(
|
svcListResp, apiErr = controller.ListServices(
|
||||||
context.TODO(), "aws", &testCloudAccountId,
|
context.TODO(), user.OrgID, "aws", &testCloudAccountId,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
for _, svc := range svcListResp.Services {
|
for _, svc := range svcListResp.Services {
|
||||||
@@ -216,12 +232,12 @@ func TestConfigureService(t *testing.T) {
|
|||||||
|
|
||||||
// should not be able to configure service after cloud account has been disconnected
|
// should not be able to configure service after cloud account has been disconnected
|
||||||
_, apiErr = controller.DisconnectAccount(
|
_, apiErr = controller.DisconnectAccount(
|
||||||
context.TODO(), "aws", testConnectedAccount.Id,
|
context.TODO(), user.OrgID, "aws", testConnectedAccount.ID.StringValue(),
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
|
|
||||||
_, apiErr = controller.UpdateServiceConfig(
|
_, apiErr = controller.UpdateServiceConfig(
|
||||||
context.TODO(), "aws", testSvcId,
|
context.TODO(), user.OrgID, "aws", testSvcId,
|
||||||
UpdateServiceConfigRequest{
|
UpdateServiceConfigRequest{
|
||||||
CloudAccountId: testCloudAccountId,
|
CloudAccountId: testCloudAccountId,
|
||||||
Config: testSvcConfig,
|
Config: testSvcConfig,
|
||||||
@@ -231,7 +247,7 @@ func TestConfigureService(t *testing.T) {
|
|||||||
|
|
||||||
// should not be able to configure a service for a cloud account id that is not connected yet
|
// should not be able to configure a service for a cloud account id that is not connected yet
|
||||||
_, apiErr = controller.UpdateServiceConfig(
|
_, apiErr = controller.UpdateServiceConfig(
|
||||||
context.TODO(), "aws", testSvcId,
|
context.TODO(), user.OrgID, "aws", testSvcId,
|
||||||
UpdateServiceConfigRequest{
|
UpdateServiceConfigRequest{
|
||||||
CloudAccountId: "9999999999",
|
CloudAccountId: "9999999999",
|
||||||
Config: testSvcConfig,
|
Config: testSvcConfig,
|
||||||
@@ -241,7 +257,7 @@ func TestConfigureService(t *testing.T) {
|
|||||||
|
|
||||||
// should not be able to set config for an unsupported service
|
// should not be able to set config for an unsupported service
|
||||||
_, apiErr = controller.UpdateServiceConfig(
|
_, apiErr = controller.UpdateServiceConfig(
|
||||||
context.TODO(), "aws", "bad-service", UpdateServiceConfigRequest{
|
context.TODO(), user.OrgID, "aws", "bad-service", UpdateServiceConfigRequest{
|
||||||
CloudAccountId: testCloudAccountId,
|
CloudAccountId: testCloudAccountId,
|
||||||
Config: testSvcConfig,
|
Config: testSvcConfig,
|
||||||
},
|
},
|
||||||
@@ -250,22 +266,54 @@ func TestConfigureService(t *testing.T) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeTestConnectedAccount(t *testing.T, controller *Controller, cloudAccountId string) *AccountRecord {
|
func makeTestConnectedAccount(t *testing.T, orgId string, controller *Controller, cloudAccountId string) *types.CloudIntegration {
|
||||||
require := require.New(t)
|
require := require.New(t)
|
||||||
|
|
||||||
// a check in from SigNoz agent creates or updates a connected account.
|
// a check in from SigNoz agent creates or updates a connected account.
|
||||||
testAccountId := uuid.NewString()
|
testAccountId := uuid.NewString()
|
||||||
resp, apiErr := controller.CheckInAsAgent(
|
resp, apiErr := controller.CheckInAsAgent(
|
||||||
context.TODO(), "aws", AgentCheckInRequest{
|
context.TODO(), orgId, "aws", AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: cloudAccountId,
|
AccountID: cloudAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(testAccountId, resp.AccountId)
|
require.Equal(testAccountId, resp.AccountId)
|
||||||
require.Equal(cloudAccountId, resp.CloudAccountId)
|
require.Equal(cloudAccountId, resp.CloudAccountId)
|
||||||
|
|
||||||
acc, err := controller.accountsRepo.get(context.TODO(), "aws", resp.AccountId)
|
acc, err := controller.accountsRepo.get(context.TODO(), orgId, "aws", resp.AccountId)
|
||||||
require.Nil(err)
|
require.Nil(err)
|
||||||
return acc
|
return acc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func createTestUser() (*types.User, *model.ApiError) {
|
||||||
|
// Create a test user for auth
|
||||||
|
ctx := context.Background()
|
||||||
|
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
|
||||||
|
Name: "test",
|
||||||
|
})
|
||||||
|
if apiErr != nil {
|
||||||
|
return nil, apiErr
|
||||||
|
}
|
||||||
|
|
||||||
|
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
|
||||||
|
if apiErr != nil {
|
||||||
|
return nil, apiErr
|
||||||
|
}
|
||||||
|
|
||||||
|
auth.InitAuthCache(ctx)
|
||||||
|
|
||||||
|
userId := uuid.NewString()
|
||||||
|
return dao.DB().CreateUser(
|
||||||
|
ctx,
|
||||||
|
&types.User{
|
||||||
|
ID: userId,
|
||||||
|
Name: "test",
|
||||||
|
Email: userId[:8] + "test@test.com",
|
||||||
|
Password: "test",
|
||||||
|
OrgID: org.ID,
|
||||||
|
GroupID: group.ID,
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,123 +1,11 @@
|
|||||||
package cloudintegrations
|
package cloudintegrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql/driver"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Represents a cloud provider account for cloud integrations
|
|
||||||
type AccountRecord struct {
|
|
||||||
CloudProvider string `json:"cloud_provider" db:"cloud_provider"`
|
|
||||||
Id string `json:"id" db:"id"`
|
|
||||||
Config *AccountConfig `json:"config" db:"config_json"`
|
|
||||||
CloudAccountId *string `json:"cloud_account_id" db:"cloud_account_id"`
|
|
||||||
LastAgentReport *AgentReport `json:"last_agent_report" db:"last_agent_report_json"`
|
|
||||||
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
|
||||||
RemovedAt *time.Time `json:"removed_at" db:"removed_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type AccountConfig struct {
|
|
||||||
EnabledRegions []string `json:"regions"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func DefaultAccountConfig() AccountConfig {
|
|
||||||
return AccountConfig{
|
|
||||||
EnabledRegions: []string{},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For serializing from db
|
|
||||||
func (c *AccountConfig) Scan(src any) error {
|
|
||||||
data, ok := src.([]byte)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("tried to scan from %T instead of bytes", src)
|
|
||||||
}
|
|
||||||
|
|
||||||
return json.Unmarshal(data, &c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// For serializing to db
|
|
||||||
func (c *AccountConfig) Value() (driver.Value, error) {
|
|
||||||
if c == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
serialized, err := json.Marshal(c)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf(
|
|
||||||
"couldn't serialize cloud account config to JSON: %w", err,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return serialized, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type AgentReport struct {
|
|
||||||
TimestampMillis int64 `json:"timestamp_millis"`
|
|
||||||
Data map[string]any `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// For serializing from db
|
|
||||||
func (r *AgentReport) Scan(src any) error {
|
|
||||||
data, ok := src.([]byte)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("tried to scan from %T instead of bytes", src)
|
|
||||||
}
|
|
||||||
|
|
||||||
return json.Unmarshal(data, &r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// For serializing to db
|
|
||||||
func (r *AgentReport) Value() (driver.Value, error) {
|
|
||||||
if r == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
serialized, err := json.Marshal(r)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf(
|
|
||||||
"couldn't serialize agent report to JSON: %w", err,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return serialized, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type AccountStatus struct {
|
|
||||||
Integration AccountIntegrationStatus `json:"integration"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type AccountIntegrationStatus struct {
|
|
||||||
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AccountRecord) status() AccountStatus {
|
|
||||||
status := AccountStatus{}
|
|
||||||
if a.LastAgentReport != nil {
|
|
||||||
lastHeartbeat := a.LastAgentReport.TimestampMillis
|
|
||||||
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
|
|
||||||
}
|
|
||||||
return status
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AccountRecord) account() Account {
|
|
||||||
ca := Account{Id: a.Id, Status: a.status()}
|
|
||||||
|
|
||||||
if a.CloudAccountId != nil {
|
|
||||||
ca.CloudAccountId = *a.CloudAccountId
|
|
||||||
}
|
|
||||||
|
|
||||||
if a.Config != nil {
|
|
||||||
ca.Config = *a.Config
|
|
||||||
} else {
|
|
||||||
ca.Config = DefaultAccountConfig()
|
|
||||||
}
|
|
||||||
|
|
||||||
return ca
|
|
||||||
}
|
|
||||||
|
|
||||||
type CloudServiceSummary struct {
|
type CloudServiceSummary struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
Title string `json:"title"`
|
Title string `json:"title"`
|
||||||
@@ -125,7 +13,7 @@ type CloudServiceSummary struct {
|
|||||||
|
|
||||||
// Present only if the service has been configured in the
|
// Present only if the service has been configured in the
|
||||||
// context of a cloud provider account.
|
// context of a cloud provider account.
|
||||||
Config *CloudServiceConfig `json:"config,omitempty"`
|
Config *types.CloudServiceConfig `json:"config,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type CloudServiceDetails struct {
|
type CloudServiceDetails struct {
|
||||||
@@ -144,44 +32,6 @@ type CloudServiceDetails struct {
|
|||||||
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry_collection_strategy"`
|
TelemetryCollectionStrategy *CloudTelemetryCollectionStrategy `json:"telemetry_collection_strategy"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type CloudServiceConfig struct {
|
|
||||||
Logs *CloudServiceLogsConfig `json:"logs,omitempty"`
|
|
||||||
Metrics *CloudServiceMetricsConfig `json:"metrics,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// For serializing from db
|
|
||||||
func (c *CloudServiceConfig) Scan(src any) error {
|
|
||||||
data, ok := src.([]byte)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("tried to scan from %T instead of bytes", src)
|
|
||||||
}
|
|
||||||
|
|
||||||
return json.Unmarshal(data, &c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// For serializing to db
|
|
||||||
func (c *CloudServiceConfig) Value() (driver.Value, error) {
|
|
||||||
if c == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
serialized, err := json.Marshal(c)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf(
|
|
||||||
"couldn't serialize cloud service config to JSON: %w", err,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return serialized, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type CloudServiceLogsConfig struct {
|
|
||||||
Enabled bool `json:"enabled"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type CloudServiceMetricsConfig struct {
|
|
||||||
Enabled bool `json:"enabled"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type CloudServiceAssets struct {
|
type CloudServiceAssets struct {
|
||||||
Dashboards []CloudServiceDashboard `json:"dashboards"`
|
Dashboards []CloudServiceDashboard `json:"dashboards"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,161 +4,161 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
)
|
)
|
||||||
|
|
||||||
type serviceConfigRepository interface {
|
type serviceConfigRepository interface {
|
||||||
get(
|
get(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
cloudProvider string,
|
orgID string,
|
||||||
cloudAccountId string,
|
cloudAccountId string,
|
||||||
serviceId string,
|
serviceType string,
|
||||||
) (*CloudServiceConfig, *model.ApiError)
|
) (*types.CloudServiceConfig, *model.ApiError)
|
||||||
|
|
||||||
upsert(
|
upsert(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgID string,
|
||||||
cloudProvider string,
|
cloudProvider string,
|
||||||
cloudAccountId string,
|
cloudAccountId string,
|
||||||
serviceId string,
|
serviceId string,
|
||||||
config CloudServiceConfig,
|
config types.CloudServiceConfig,
|
||||||
) (*CloudServiceConfig, *model.ApiError)
|
) (*types.CloudServiceConfig, *model.ApiError)
|
||||||
|
|
||||||
getAllForAccount(
|
getAllForAccount(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
cloudProvider string,
|
orgID string,
|
||||||
cloudAccountId string,
|
cloudAccountId string,
|
||||||
) (
|
) (
|
||||||
configsBySvcId map[string]*CloudServiceConfig,
|
configsBySvcId map[string]*types.CloudServiceConfig,
|
||||||
apiErr *model.ApiError,
|
apiErr *model.ApiError,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func newServiceConfigRepository(db *sqlx.DB) (
|
func newServiceConfigRepository(store sqlstore.SQLStore) (
|
||||||
*serviceConfigSQLRepository, error,
|
*serviceConfigSQLRepository, error,
|
||||||
) {
|
) {
|
||||||
return &serviceConfigSQLRepository{
|
return &serviceConfigSQLRepository{
|
||||||
db: db,
|
store: store,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type serviceConfigSQLRepository struct {
|
type serviceConfigSQLRepository struct {
|
||||||
db *sqlx.DB
|
store sqlstore.SQLStore
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *serviceConfigSQLRepository) get(
|
func (r *serviceConfigSQLRepository) get(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
cloudProvider string,
|
orgID string,
|
||||||
cloudAccountId string,
|
cloudAccountId string,
|
||||||
serviceId string,
|
serviceType string,
|
||||||
) (*CloudServiceConfig, *model.ApiError) {
|
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||||
|
|
||||||
var result CloudServiceConfig
|
var result types.CloudIntegrationService
|
||||||
|
|
||||||
err := r.db.GetContext(
|
err := r.store.BunDB().NewSelect().
|
||||||
ctx, &result, `
|
Model(&result).
|
||||||
select
|
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
|
||||||
config_json
|
Where("ci.org_id = ?", orgID).
|
||||||
from cloud_integrations_service_configs
|
Where("ci.id = ?", cloudAccountId).
|
||||||
where
|
Where("cis.type = ?", serviceType).
|
||||||
cloud_provider=$1
|
Scan(ctx)
|
||||||
and cloud_account_id=$2
|
|
||||||
and service_id=$3
|
|
||||||
`,
|
|
||||||
cloudProvider, cloudAccountId, serviceId,
|
|
||||||
)
|
|
||||||
|
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
return nil, model.NotFoundError(fmt.Errorf(
|
return nil, model.NotFoundError(fmt.Errorf(
|
||||||
"couldn't find %s %s config for %s",
|
"couldn't find config for cloud account %s",
|
||||||
cloudProvider, serviceId, cloudAccountId,
|
cloudAccountId,
|
||||||
))
|
))
|
||||||
|
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"couldn't query cloud service config: %w", err,
|
"couldn't query cloud service config: %w", err,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
return &result, nil
|
return &result.Config, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *serviceConfigSQLRepository) upsert(
|
func (r *serviceConfigSQLRepository) upsert(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgID string,
|
||||||
cloudProvider string,
|
cloudProvider string,
|
||||||
cloudAccountId string,
|
cloudAccountId string,
|
||||||
serviceId string,
|
serviceId string,
|
||||||
config CloudServiceConfig,
|
config types.CloudServiceConfig,
|
||||||
) (*CloudServiceConfig, *model.ApiError) {
|
) (*types.CloudServiceConfig, *model.ApiError) {
|
||||||
|
|
||||||
query := `
|
// get cloud integration id from account id
|
||||||
INSERT INTO cloud_integrations_service_configs (
|
// if the account is not connected, we don't need to upsert the config
|
||||||
cloud_provider,
|
var cloudIntegrationId string
|
||||||
cloud_account_id,
|
err := r.store.BunDB().NewSelect().
|
||||||
service_id,
|
Model((*types.CloudIntegration)(nil)).
|
||||||
config_json
|
Column("id").
|
||||||
) values ($1, $2, $3, $4)
|
Where("provider = ?", cloudProvider).
|
||||||
on conflict(cloud_provider, cloud_account_id, service_id)
|
Where("account_id = ?", cloudAccountId).
|
||||||
do update set config_json=excluded.config_json
|
Where("org_id = ?", orgID).
|
||||||
`
|
Where("removed_at is NULL").
|
||||||
_, dbErr := r.db.ExecContext(
|
Where("last_agent_report is not NULL").
|
||||||
ctx, query,
|
Scan(ctx, &cloudIntegrationId)
|
||||||
cloudProvider, cloudAccountId, serviceId, &config,
|
|
||||||
)
|
if err != nil {
|
||||||
if dbErr != nil {
|
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"could not upsert cloud service config: %w", dbErr,
|
"couldn't query cloud integration id: %w", err,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
upsertedConfig, apiErr := r.get(ctx, cloudProvider, cloudAccountId, serviceId)
|
serviceConfig := types.CloudIntegrationService{
|
||||||
if apiErr != nil {
|
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||||
|
TimeAuditable: types.TimeAuditable{
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
},
|
||||||
|
Config: config,
|
||||||
|
Type: serviceId,
|
||||||
|
CloudIntegrationID: cloudIntegrationId,
|
||||||
|
}
|
||||||
|
_, err = r.store.BunDB().NewInsert().
|
||||||
|
Model(&serviceConfig).
|
||||||
|
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"couldn't fetch upserted service config: %w", apiErr.ToError(),
|
"could not upsert cloud service config: %w", err,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
return upsertedConfig, nil
|
return &serviceConfig.Config, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *serviceConfigSQLRepository) getAllForAccount(
|
func (r *serviceConfigSQLRepository) getAllForAccount(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
cloudProvider string,
|
orgID string,
|
||||||
cloudAccountId string,
|
cloudAccountId string,
|
||||||
) (map[string]*CloudServiceConfig, *model.ApiError) {
|
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
|
||||||
|
|
||||||
type ScannedServiceConfigRecord struct {
|
serviceConfigs := []types.CloudIntegrationService{}
|
||||||
ServiceId string `db:"service_id"`
|
|
||||||
Config CloudServiceConfig `db:"config_json"`
|
|
||||||
}
|
|
||||||
|
|
||||||
records := []ScannedServiceConfigRecord{}
|
err := r.store.BunDB().NewSelect().
|
||||||
|
Model(&serviceConfigs).
|
||||||
err := r.db.SelectContext(
|
Join("JOIN cloud_integration ci ON ci.id = cis.cloud_integration_id").
|
||||||
ctx, &records, `
|
Where("ci.id = ?", cloudAccountId).
|
||||||
select
|
Where("ci.org_id = ?", orgID).
|
||||||
service_id,
|
Scan(ctx)
|
||||||
config_json
|
|
||||||
from cloud_integrations_service_configs
|
|
||||||
where
|
|
||||||
cloud_provider=$1
|
|
||||||
and cloud_account_id=$2
|
|
||||||
`,
|
|
||||||
cloudProvider, cloudAccountId,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"could not query service configs from db: %w", err,
|
"could not query service configs from db: %w", err,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
result := map[string]*CloudServiceConfig{}
|
result := map[string]*types.CloudServiceConfig{}
|
||||||
|
|
||||||
for _, r := range records {
|
for _, r := range serviceConfigs {
|
||||||
result[r.ServiceId] = &r.Config
|
result[r.Type] = &r.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ import (
|
|||||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/http/render"
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||||
|
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
||||||
"github.com/SigNoz/signoz/pkg/signoz"
|
"github.com/SigNoz/signoz/pkg/signoz"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
@@ -37,7 +38,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
"github.com/SigNoz/signoz/pkg/query-service/app/dashboards"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/explorer"
|
"github.com/SigNoz/signoz/pkg/query-service/app/explorer"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
|
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
|
||||||
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
|
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
|
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
|
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
|
||||||
@@ -1082,14 +1082,14 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ic := aH.IntegrationsController
|
ic := aH.IntegrationsController
|
||||||
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context())
|
installedIntegrationDashboards, err := ic.GetDashboardsForInstalledIntegrations(r.Context(), claims.OrgID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(err))
|
zap.L().Error("failed to get dashboards for installed integrations", zap.Error(err))
|
||||||
} else {
|
} else {
|
||||||
allDashboards = append(allDashboards, installedIntegrationDashboards...)
|
allDashboards = append(allDashboards, installedIntegrationDashboards...)
|
||||||
}
|
}
|
||||||
|
|
||||||
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context())
|
cloudIntegrationDashboards, err := aH.CloudIntegrationsController.AvailableDashboards(r.Context(), claims.OrgID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to get cloud dashboards", zap.Error(err))
|
zap.L().Error("failed to get cloud dashboards", zap.Error(err))
|
||||||
} else {
|
} else {
|
||||||
@@ -1267,7 +1267,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(uuid) {
|
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(uuid) {
|
||||||
dashboard, apiError = aH.CloudIntegrationsController.GetDashboardById(
|
dashboard, apiError = aH.CloudIntegrationsController.GetDashboardById(
|
||||||
r.Context(), uuid,
|
r.Context(), claims.OrgID, uuid,
|
||||||
)
|
)
|
||||||
if apiError != nil {
|
if apiError != nil {
|
||||||
RespondError(w, apiError, nil)
|
RespondError(w, apiError, nil)
|
||||||
@@ -1276,7 +1276,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
} else {
|
} else {
|
||||||
dashboard, apiError = aH.IntegrationsController.GetInstalledIntegrationDashboardById(
|
dashboard, apiError = aH.IntegrationsController.GetInstalledIntegrationDashboardById(
|
||||||
r.Context(), uuid,
|
r.Context(), claims.OrgID, uuid,
|
||||||
)
|
)
|
||||||
if apiError != nil {
|
if apiError != nil {
|
||||||
RespondError(w, apiError, nil)
|
RespondError(w, apiError, nil)
|
||||||
@@ -2207,6 +2207,11 @@ func (aH *APIHandler) editUser(w http.ResponseWriter, r *http.Request) {
|
|||||||
old.ProfilePictureURL = update.ProfilePictureURL
|
old.ProfilePictureURL = update.ProfilePictureURL
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(old.Email)) {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
_, apiErr = dao.DB().EditUser(ctx, &types.User{
|
_, apiErr = dao.DB().EditUser(ctx, &types.User{
|
||||||
ID: old.ID,
|
ID: old.ID,
|
||||||
Name: old.Name,
|
Name: old.Name,
|
||||||
@@ -2238,6 +2243,11 @@ func (aH *APIHandler) deleteUser(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email)) {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "integration user cannot be updated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if user == nil {
|
if user == nil {
|
||||||
RespondError(w, &model.ApiError{
|
RespondError(w, &model.ApiError{
|
||||||
Typ: model.ErrorNotFound,
|
Typ: model.ErrorNotFound,
|
||||||
@@ -3497,9 +3507,14 @@ func (aH *APIHandler) ListIntegrations(
|
|||||||
for k, values := range r.URL.Query() {
|
for k, values := range r.URL.Query() {
|
||||||
params[k] = values[0]
|
params[k] = values[0]
|
||||||
}
|
}
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
resp, apiErr := aH.IntegrationsController.ListIntegrations(
|
resp, apiErr := aH.IntegrationsController.ListIntegrations(
|
||||||
r.Context(), params,
|
r.Context(), claims.OrgID, params,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, "Failed to fetch integrations")
|
RespondError(w, apiErr, "Failed to fetch integrations")
|
||||||
@@ -3512,8 +3527,13 @@ func (aH *APIHandler) GetIntegration(
|
|||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
integrationId := mux.Vars(r)["integrationId"]
|
integrationId := mux.Vars(r)["integrationId"]
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
integration, apiErr := aH.IntegrationsController.GetIntegration(
|
integration, apiErr := aH.IntegrationsController.GetIntegration(
|
||||||
r.Context(), integrationId,
|
r.Context(), claims.OrgID, integrationId,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, "Failed to fetch integration details")
|
RespondError(w, apiErr, "Failed to fetch integration details")
|
||||||
@@ -3527,8 +3547,13 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
|
|||||||
w http.ResponseWriter, r *http.Request,
|
w http.ResponseWriter, r *http.Request,
|
||||||
) {
|
) {
|
||||||
integrationId := mux.Vars(r)["integrationId"]
|
integrationId := mux.Vars(r)["integrationId"]
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
isInstalled, apiErr := aH.IntegrationsController.IsIntegrationInstalled(
|
isInstalled, apiErr := aH.IntegrationsController.IsIntegrationInstalled(
|
||||||
r.Context(), integrationId,
|
r.Context(), claims.OrgID, integrationId,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, "failed to check if integration is installed")
|
RespondError(w, apiErr, "failed to check if integration is installed")
|
||||||
@@ -3542,7 +3567,7 @@ func (aH *APIHandler) GetIntegrationConnectionStatus(
|
|||||||
}
|
}
|
||||||
|
|
||||||
connectionTests, apiErr := aH.IntegrationsController.GetIntegrationConnectionTests(
|
connectionTests, apiErr := aH.IntegrationsController.GetIntegrationConnectionTests(
|
||||||
r.Context(), integrationId,
|
r.Context(), claims.OrgID, integrationId,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, "failed to fetch integration connection tests")
|
RespondError(w, apiErr, "failed to fetch integration connection tests")
|
||||||
@@ -3741,8 +3766,14 @@ func (aH *APIHandler) InstallIntegration(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
integration, apiErr := aH.IntegrationsController.Install(
|
integration, apiErr := aH.IntegrationsController.Install(
|
||||||
r.Context(), &req,
|
r.Context(), claims.OrgID, &req,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, nil)
|
RespondError(w, apiErr, nil)
|
||||||
@@ -3763,7 +3794,13 @@ func (aH *APIHandler) UninstallIntegration(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
apiErr := aH.IntegrationsController.Uninstall(r.Context(), &req)
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
apiErr := aH.IntegrationsController.Uninstall(r.Context(), claims.OrgID, &req)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, nil)
|
RespondError(w, apiErr, nil)
|
||||||
return
|
return
|
||||||
@@ -3819,8 +3856,14 @@ func (aH *APIHandler) CloudIntegrationsListConnectedAccounts(
|
|||||||
) {
|
) {
|
||||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
|
resp, apiErr := aH.CloudIntegrationsController.ListConnectedAccounts(
|
||||||
r.Context(), cloudProvider,
|
r.Context(), claims.OrgID, cloudProvider,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -3841,8 +3884,14 @@ func (aH *APIHandler) CloudIntegrationsGenerateConnectionUrl(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
|
result, apiErr := aH.CloudIntegrationsController.GenerateConnectionUrl(
|
||||||
r.Context(), cloudProvider, req,
|
r.Context(), claims.OrgID, cloudProvider, req,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -3859,8 +3908,14 @@ func (aH *APIHandler) CloudIntegrationsGetAccountStatus(
|
|||||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||||
accountId := mux.Vars(r)["accountId"]
|
accountId := mux.Vars(r)["accountId"]
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
|
resp, apiErr := aH.CloudIntegrationsController.GetAccountStatus(
|
||||||
r.Context(), cloudProvider, accountId,
|
r.Context(), claims.OrgID, cloudProvider, accountId,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -3881,8 +3936,14 @@ func (aH *APIHandler) CloudIntegrationsAgentCheckIn(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
result, apiErr := aH.CloudIntegrationsController.CheckInAsAgent(
|
result, apiErr := aH.CloudIntegrationsController.CheckInAsAgent(
|
||||||
r.Context(), cloudProvider, req,
|
r.Context(), claims.OrgID, cloudProvider, req,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -3905,8 +3966,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateAccountConfig(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
|
result, apiErr := aH.CloudIntegrationsController.UpdateAccountConfig(
|
||||||
r.Context(), cloudProvider, accountId, req,
|
r.Context(), claims.OrgID, cloudProvider, accountId, req,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -3923,8 +3990,14 @@ func (aH *APIHandler) CloudIntegrationsDisconnectAccount(
|
|||||||
cloudProvider := mux.Vars(r)["cloudProvider"]
|
cloudProvider := mux.Vars(r)["cloudProvider"]
|
||||||
accountId := mux.Vars(r)["accountId"]
|
accountId := mux.Vars(r)["accountId"]
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
|
result, apiErr := aH.CloudIntegrationsController.DisconnectAccount(
|
||||||
r.Context(), cloudProvider, accountId,
|
r.Context(), claims.OrgID, cloudProvider, accountId,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -3947,8 +4020,14 @@ func (aH *APIHandler) CloudIntegrationsListServices(
|
|||||||
cloudAccountId = &cloudAccountIdQP
|
cloudAccountId = &cloudAccountIdQP
|
||||||
}
|
}
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
resp, apiErr := aH.CloudIntegrationsController.ListServices(
|
resp, apiErr := aH.CloudIntegrationsController.ListServices(
|
||||||
r.Context(), cloudProvider, cloudAccountId,
|
r.Context(), claims.OrgID, cloudProvider, cloudAccountId,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -3971,8 +4050,14 @@ func (aH *APIHandler) CloudIntegrationsGetServiceDetails(
|
|||||||
cloudAccountId = &cloudAccountIdQP
|
cloudAccountId = &cloudAccountIdQP
|
||||||
}
|
}
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
resp, apiErr := aH.CloudIntegrationsController.GetServiceDetails(
|
resp, apiErr := aH.CloudIntegrationsController.GetServiceDetails(
|
||||||
r.Context(), cloudProvider, serviceId, cloudAccountId,
|
r.Context(), claims.OrgID, cloudProvider, serviceId, cloudAccountId,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, nil)
|
RespondError(w, apiErr, nil)
|
||||||
@@ -4211,8 +4296,14 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||||
|
if !ok {
|
||||||
|
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
result, apiErr := aH.CloudIntegrationsController.UpdateServiceConfig(
|
result, apiErr := aH.CloudIntegrationsController.UpdateServiceConfig(
|
||||||
r.Context(), cloudProvider, serviceId, req,
|
r.Context(), claims.OrgID, cloudProvider, serviceId, req,
|
||||||
)
|
)
|
||||||
|
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ type Controller struct {
|
|||||||
func NewController(sqlStore sqlstore.SQLStore) (
|
func NewController(sqlStore sqlstore.SQLStore) (
|
||||||
*Controller, error,
|
*Controller, error,
|
||||||
) {
|
) {
|
||||||
mgr, err := NewManager(sqlStore.SQLxDB())
|
mgr, err := NewManager(sqlStore)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("couldn't create integrations manager: %w", err)
|
return nil, fmt.Errorf("couldn't create integrations manager: %w", err)
|
||||||
}
|
}
|
||||||
@@ -35,7 +35,7 @@ type IntegrationsListResponse struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) ListIntegrations(
|
func (c *Controller) ListIntegrations(
|
||||||
ctx context.Context, params map[string]string,
|
ctx context.Context, orgId string, params map[string]string,
|
||||||
) (
|
) (
|
||||||
*IntegrationsListResponse, *model.ApiError,
|
*IntegrationsListResponse, *model.ApiError,
|
||||||
) {
|
) {
|
||||||
@@ -47,7 +47,7 @@ func (c *Controller) ListIntegrations(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
integrations, apiErr := c.mgr.ListIntegrations(ctx, filters)
|
integrations, apiErr := c.mgr.ListIntegrations(ctx, orgId, filters)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
@@ -58,16 +58,15 @@ func (c *Controller) ListIntegrations(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) GetIntegration(
|
func (c *Controller) GetIntegration(
|
||||||
ctx context.Context, integrationId string,
|
ctx context.Context, orgId string, integrationId string,
|
||||||
) (*Integration, *model.ApiError) {
|
) (*Integration, *model.ApiError) {
|
||||||
return c.mgr.GetIntegration(ctx, integrationId)
|
return c.mgr.GetIntegration(ctx, orgId, integrationId)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) IsIntegrationInstalled(
|
func (c *Controller) IsIntegrationInstalled(
|
||||||
ctx context.Context,
|
ctx context.Context, orgId string, integrationId string,
|
||||||
integrationId string,
|
|
||||||
) (bool, *model.ApiError) {
|
) (bool, *model.ApiError) {
|
||||||
installation, apiErr := c.mgr.getInstalledIntegration(ctx, integrationId)
|
installation, apiErr := c.mgr.getInstalledIntegration(ctx, orgId, integrationId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return false, apiErr
|
return false, apiErr
|
||||||
}
|
}
|
||||||
@@ -76,9 +75,9 @@ func (c *Controller) IsIntegrationInstalled(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) GetIntegrationConnectionTests(
|
func (c *Controller) GetIntegrationConnectionTests(
|
||||||
ctx context.Context, integrationId string,
|
ctx context.Context, orgId string, integrationId string,
|
||||||
) (*IntegrationConnectionTests, *model.ApiError) {
|
) (*IntegrationConnectionTests, *model.ApiError) {
|
||||||
return c.mgr.GetIntegrationConnectionTests(ctx, integrationId)
|
return c.mgr.GetIntegrationConnectionTests(ctx, orgId, integrationId)
|
||||||
}
|
}
|
||||||
|
|
||||||
type InstallIntegrationRequest struct {
|
type InstallIntegrationRequest struct {
|
||||||
@@ -87,10 +86,10 @@ type InstallIntegrationRequest struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) Install(
|
func (c *Controller) Install(
|
||||||
ctx context.Context, req *InstallIntegrationRequest,
|
ctx context.Context, orgId string, req *InstallIntegrationRequest,
|
||||||
) (*IntegrationsListItem, *model.ApiError) {
|
) (*IntegrationsListItem, *model.ApiError) {
|
||||||
res, apiErr := c.mgr.InstallIntegration(
|
res, apiErr := c.mgr.InstallIntegration(
|
||||||
ctx, req.IntegrationId, req.Config,
|
ctx, orgId, req.IntegrationId, req.Config,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
@@ -104,7 +103,7 @@ type UninstallIntegrationRequest struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) Uninstall(
|
func (c *Controller) Uninstall(
|
||||||
ctx context.Context, req *UninstallIntegrationRequest,
|
ctx context.Context, orgId string, req *UninstallIntegrationRequest,
|
||||||
) *model.ApiError {
|
) *model.ApiError {
|
||||||
if len(req.IntegrationId) < 1 {
|
if len(req.IntegrationId) < 1 {
|
||||||
return model.BadRequest(fmt.Errorf(
|
return model.BadRequest(fmt.Errorf(
|
||||||
@@ -113,7 +112,7 @@ func (c *Controller) Uninstall(
|
|||||||
}
|
}
|
||||||
|
|
||||||
apiErr := c.mgr.UninstallIntegration(
|
apiErr := c.mgr.UninstallIntegration(
|
||||||
ctx, req.IntegrationId,
|
ctx, orgId, req.IntegrationId,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return apiErr
|
return apiErr
|
||||||
@@ -123,19 +122,19 @@ func (c *Controller) Uninstall(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) GetPipelinesForInstalledIntegrations(
|
func (c *Controller) GetPipelinesForInstalledIntegrations(
|
||||||
ctx context.Context,
|
ctx context.Context, orgId string,
|
||||||
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||||
return c.mgr.GetPipelinesForInstalledIntegrations(ctx)
|
return c.mgr.GetPipelinesForInstalledIntegrations(ctx, orgId)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) GetDashboardsForInstalledIntegrations(
|
func (c *Controller) GetDashboardsForInstalledIntegrations(
|
||||||
ctx context.Context,
|
ctx context.Context, orgId string,
|
||||||
) ([]types.Dashboard, *model.ApiError) {
|
) ([]types.Dashboard, *model.ApiError) {
|
||||||
return c.mgr.GetDashboardsForInstalledIntegrations(ctx)
|
return c.mgr.GetDashboardsForInstalledIntegrations(ctx, orgId)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Controller) GetInstalledIntegrationDashboardById(
|
func (c *Controller) GetInstalledIntegrationDashboardById(
|
||||||
ctx context.Context, dashboardUuid string,
|
ctx context.Context, orgId string, dashboardUuid string,
|
||||||
) (*types.Dashboard, *model.ApiError) {
|
) (*types.Dashboard, *model.ApiError) {
|
||||||
return c.mgr.GetInstalledIntegrationDashboardById(ctx, dashboardUuid)
|
return c.mgr.GetInstalledIntegrationDashboardById(ctx, orgId, dashboardUuid)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,15 +5,14 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||||
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/jmoiron/sqlx"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type IntegrationAuthor struct {
|
type IntegrationAuthor struct {
|
||||||
@@ -105,16 +104,9 @@ type IntegrationsListItem struct {
|
|||||||
IsInstalled bool `json:"is_installed"`
|
IsInstalled bool `json:"is_installed"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type InstalledIntegration struct {
|
|
||||||
IntegrationId string `json:"integration_id" db:"integration_id"`
|
|
||||||
Config InstalledIntegrationConfig `json:"config_json" db:"config_json"`
|
|
||||||
InstalledAt time.Time `json:"installed_at" db:"installed_at"`
|
|
||||||
}
|
|
||||||
type InstalledIntegrationConfig map[string]interface{}
|
|
||||||
|
|
||||||
type Integration struct {
|
type Integration struct {
|
||||||
IntegrationDetails
|
IntegrationDetails
|
||||||
Installation *InstalledIntegration `json:"installation"`
|
Installation *types.InstalledIntegration `json:"installation"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Manager struct {
|
type Manager struct {
|
||||||
@@ -122,8 +114,8 @@ type Manager struct {
|
|||||||
installedIntegrationsRepo InstalledIntegrationsRepo
|
installedIntegrationsRepo InstalledIntegrationsRepo
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewManager(db *sqlx.DB) (*Manager, error) {
|
func NewManager(store sqlstore.SQLStore) (*Manager, error) {
|
||||||
iiRepo, err := NewInstalledIntegrationsSqliteRepo(db)
|
iiRepo, err := NewInstalledIntegrationsSqliteRepo(store)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf(
|
return nil, fmt.Errorf(
|
||||||
"could not init sqlite DB for installed integrations: %w", err,
|
"could not init sqlite DB for installed integrations: %w", err,
|
||||||
@@ -142,6 +134,7 @@ type IntegrationsFilter struct {
|
|||||||
|
|
||||||
func (m *Manager) ListIntegrations(
|
func (m *Manager) ListIntegrations(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
filter *IntegrationsFilter,
|
filter *IntegrationsFilter,
|
||||||
// Expected to have pagination over time.
|
// Expected to have pagination over time.
|
||||||
) ([]IntegrationsListItem, *model.ApiError) {
|
) ([]IntegrationsListItem, *model.ApiError) {
|
||||||
@@ -152,22 +145,22 @@ func (m *Manager) ListIntegrations(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
installed, apiErr := m.installedIntegrationsRepo.list(ctx)
|
installed, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
apiErr, "could not fetch installed integrations",
|
apiErr, "could not fetch installed integrations",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
installedIds := []string{}
|
installedTypes := []string{}
|
||||||
for _, ii := range installed {
|
for _, ii := range installed {
|
||||||
installedIds = append(installedIds, ii.IntegrationId)
|
installedTypes = append(installedTypes, ii.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
result := []IntegrationsListItem{}
|
result := []IntegrationsListItem{}
|
||||||
for _, ai := range available {
|
for _, ai := range available {
|
||||||
result = append(result, IntegrationsListItem{
|
result = append(result, IntegrationsListItem{
|
||||||
IntegrationSummary: ai.IntegrationSummary,
|
IntegrationSummary: ai.IntegrationSummary,
|
||||||
IsInstalled: slices.Contains(installedIds, ai.Id),
|
IsInstalled: slices.Contains(installedTypes, ai.Id),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -188,6 +181,7 @@ func (m *Manager) ListIntegrations(
|
|||||||
|
|
||||||
func (m *Manager) GetIntegration(
|
func (m *Manager) GetIntegration(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
integrationId string,
|
integrationId string,
|
||||||
) (*Integration, *model.ApiError) {
|
) (*Integration, *model.ApiError) {
|
||||||
integrationDetails, apiErr := m.getIntegrationDetails(
|
integrationDetails, apiErr := m.getIntegrationDetails(
|
||||||
@@ -198,7 +192,7 @@ func (m *Manager) GetIntegration(
|
|||||||
}
|
}
|
||||||
|
|
||||||
installation, apiErr := m.getInstalledIntegration(
|
installation, apiErr := m.getInstalledIntegration(
|
||||||
ctx, integrationId,
|
ctx, orgId, integrationId,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
@@ -212,6 +206,7 @@ func (m *Manager) GetIntegration(
|
|||||||
|
|
||||||
func (m *Manager) GetIntegrationConnectionTests(
|
func (m *Manager) GetIntegrationConnectionTests(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
integrationId string,
|
integrationId string,
|
||||||
) (*IntegrationConnectionTests, *model.ApiError) {
|
) (*IntegrationConnectionTests, *model.ApiError) {
|
||||||
integrationDetails, apiErr := m.getIntegrationDetails(
|
integrationDetails, apiErr := m.getIntegrationDetails(
|
||||||
@@ -225,8 +220,9 @@ func (m *Manager) GetIntegrationConnectionTests(
|
|||||||
|
|
||||||
func (m *Manager) InstallIntegration(
|
func (m *Manager) InstallIntegration(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
integrationId string,
|
integrationId string,
|
||||||
config InstalledIntegrationConfig,
|
config types.InstalledIntegrationConfig,
|
||||||
) (*IntegrationsListItem, *model.ApiError) {
|
) (*IntegrationsListItem, *model.ApiError) {
|
||||||
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
|
integrationDetails, apiErr := m.getIntegrationDetails(ctx, integrationId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
@@ -234,7 +230,7 @@ func (m *Manager) InstallIntegration(
|
|||||||
}
|
}
|
||||||
|
|
||||||
_, apiErr = m.installedIntegrationsRepo.upsert(
|
_, apiErr = m.installedIntegrationsRepo.upsert(
|
||||||
ctx, integrationId, config,
|
ctx, orgId, integrationId, config,
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
@@ -250,15 +246,17 @@ func (m *Manager) InstallIntegration(
|
|||||||
|
|
||||||
func (m *Manager) UninstallIntegration(
|
func (m *Manager) UninstallIntegration(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
integrationId string,
|
integrationId string,
|
||||||
) *model.ApiError {
|
) *model.ApiError {
|
||||||
return m.installedIntegrationsRepo.delete(ctx, integrationId)
|
return m.installedIntegrationsRepo.delete(ctx, orgId, integrationId)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Manager) GetPipelinesForInstalledIntegrations(
|
func (m *Manager) GetPipelinesForInstalledIntegrations(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||||
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
|
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
@@ -308,6 +306,7 @@ func (m *Manager) parseDashboardUuid(dashboardUuid string) (
|
|||||||
|
|
||||||
func (m *Manager) GetInstalledIntegrationDashboardById(
|
func (m *Manager) GetInstalledIntegrationDashboardById(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
dashboardUuid string,
|
dashboardUuid string,
|
||||||
) (*types.Dashboard, *model.ApiError) {
|
) (*types.Dashboard, *model.ApiError) {
|
||||||
integrationId, dashboardId, apiErr := m.parseDashboardUuid(dashboardUuid)
|
integrationId, dashboardId, apiErr := m.parseDashboardUuid(dashboardUuid)
|
||||||
@@ -315,7 +314,7 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
|
|||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
integration, apiErr := m.GetIntegration(ctx, integrationId)
|
integration, apiErr := m.GetIntegration(ctx, orgId, integrationId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
@@ -355,8 +354,9 @@ func (m *Manager) GetInstalledIntegrationDashboardById(
|
|||||||
|
|
||||||
func (m *Manager) GetDashboardsForInstalledIntegrations(
|
func (m *Manager) GetDashboardsForInstalledIntegrations(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
) ([]types.Dashboard, *model.ApiError) {
|
) ([]types.Dashboard, *model.ApiError) {
|
||||||
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx)
|
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
@@ -421,10 +421,11 @@ func (m *Manager) getIntegrationDetails(
|
|||||||
|
|
||||||
func (m *Manager) getInstalledIntegration(
|
func (m *Manager) getInstalledIntegration(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
integrationId string,
|
integrationId string,
|
||||||
) (*InstalledIntegration, *model.ApiError) {
|
) (*types.InstalledIntegration, *model.ApiError) {
|
||||||
iis, apiErr := m.installedIntegrationsRepo.get(
|
iis, apiErr := m.installedIntegrationsRepo.get(
|
||||||
ctx, []string{integrationId},
|
ctx, orgId, []string{integrationId},
|
||||||
)
|
)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(apiErr, fmt.Sprintf(
|
return nil, model.WrapApiError(apiErr, fmt.Sprintf(
|
||||||
@@ -441,32 +442,33 @@ func (m *Manager) getInstalledIntegration(
|
|||||||
|
|
||||||
func (m *Manager) getInstalledIntegrations(
|
func (m *Manager) getInstalledIntegrations(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
orgId string,
|
||||||
) (
|
) (
|
||||||
map[string]Integration, *model.ApiError,
|
map[string]Integration, *model.ApiError,
|
||||||
) {
|
) {
|
||||||
installations, apiErr := m.installedIntegrationsRepo.list(ctx)
|
installations, apiErr := m.installedIntegrationsRepo.list(ctx, orgId)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
installedIds := utils.MapSlice(installations, func(i InstalledIntegration) string {
|
installedTypes := utils.MapSlice(installations, func(i types.InstalledIntegration) string {
|
||||||
return i.IntegrationId
|
return i.Type
|
||||||
})
|
})
|
||||||
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedIds)
|
integrationDetails, apiErr := m.availableIntegrationsRepo.get(ctx, installedTypes)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, apiErr
|
return nil, apiErr
|
||||||
}
|
}
|
||||||
|
|
||||||
result := map[string]Integration{}
|
result := map[string]Integration{}
|
||||||
for _, ii := range installations {
|
for _, ii := range installations {
|
||||||
iDetails, exists := integrationDetails[ii.IntegrationId]
|
iDetails, exists := integrationDetails[ii.Type]
|
||||||
if !exists {
|
if !exists {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"couldn't find integration details for %s", ii.IntegrationId,
|
"couldn't find integration details for %s", ii.Type,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
result[ii.IntegrationId] = Integration{
|
result[ii.Type] = Integration{
|
||||||
Installation: &ii,
|
Installation: &ii,
|
||||||
IntegrationDetails: iDetails,
|
IntegrationDetails: iDetails,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,18 +14,23 @@ func TestIntegrationLifecycle(t *testing.T) {
|
|||||||
mgr := NewTestIntegrationsManager(t)
|
mgr := NewTestIntegrationsManager(t)
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
|
user, apiErr := createTestUser()
|
||||||
|
if apiErr != nil {
|
||||||
|
t.Fatalf("could not create test user: %v", apiErr)
|
||||||
|
}
|
||||||
|
|
||||||
ii := true
|
ii := true
|
||||||
installedIntegrationsFilter := &IntegrationsFilter{
|
installedIntegrationsFilter := &IntegrationsFilter{
|
||||||
IsInstalled: &ii,
|
IsInstalled: &ii,
|
||||||
}
|
}
|
||||||
|
|
||||||
installedIntegrations, apiErr := mgr.ListIntegrations(
|
installedIntegrations, apiErr := mgr.ListIntegrations(
|
||||||
ctx, installedIntegrationsFilter,
|
ctx, user.OrgID, installedIntegrationsFilter,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal([]IntegrationsListItem{}, installedIntegrations)
|
require.Equal([]IntegrationsListItem{}, installedIntegrations)
|
||||||
|
|
||||||
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, nil)
|
availableIntegrations, apiErr := mgr.ListIntegrations(ctx, user.OrgID, nil)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(2, len(availableIntegrations))
|
require.Equal(2, len(availableIntegrations))
|
||||||
require.False(availableIntegrations[0].IsInstalled)
|
require.False(availableIntegrations[0].IsInstalled)
|
||||||
@@ -33,44 +38,44 @@ func TestIntegrationLifecycle(t *testing.T) {
|
|||||||
|
|
||||||
testIntegrationConfig := map[string]interface{}{}
|
testIntegrationConfig := map[string]interface{}{}
|
||||||
installed, apiErr := mgr.InstallIntegration(
|
installed, apiErr := mgr.InstallIntegration(
|
||||||
ctx, availableIntegrations[1].Id, testIntegrationConfig,
|
ctx, user.OrgID, availableIntegrations[1].Id, testIntegrationConfig,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(installed.Id, availableIntegrations[1].Id)
|
require.Equal(installed.Id, availableIntegrations[1].Id)
|
||||||
|
|
||||||
integration, apiErr := mgr.GetIntegration(ctx, availableIntegrations[1].Id)
|
integration, apiErr := mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(integration.Id, availableIntegrations[1].Id)
|
require.Equal(integration.Id, availableIntegrations[1].Id)
|
||||||
require.NotNil(integration.Installation)
|
require.NotNil(integration.Installation)
|
||||||
|
|
||||||
installedIntegrations, apiErr = mgr.ListIntegrations(
|
installedIntegrations, apiErr = mgr.ListIntegrations(
|
||||||
ctx, installedIntegrationsFilter,
|
ctx, user.OrgID, installedIntegrationsFilter,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(1, len(installedIntegrations))
|
require.Equal(1, len(installedIntegrations))
|
||||||
require.Equal(availableIntegrations[1].Id, installedIntegrations[0].Id)
|
require.Equal(availableIntegrations[1].Id, installedIntegrations[0].Id)
|
||||||
|
|
||||||
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
|
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(2, len(availableIntegrations))
|
require.Equal(2, len(availableIntegrations))
|
||||||
require.False(availableIntegrations[0].IsInstalled)
|
require.False(availableIntegrations[0].IsInstalled)
|
||||||
require.True(availableIntegrations[1].IsInstalled)
|
require.True(availableIntegrations[1].IsInstalled)
|
||||||
|
|
||||||
apiErr = mgr.UninstallIntegration(ctx, installed.Id)
|
apiErr = mgr.UninstallIntegration(ctx, user.OrgID, installed.Id)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
|
|
||||||
integration, apiErr = mgr.GetIntegration(ctx, availableIntegrations[1].Id)
|
integration, apiErr = mgr.GetIntegration(ctx, user.OrgID, availableIntegrations[1].Id)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(integration.Id, availableIntegrations[1].Id)
|
require.Equal(integration.Id, availableIntegrations[1].Id)
|
||||||
require.Nil(integration.Installation)
|
require.Nil(integration.Installation)
|
||||||
|
|
||||||
installedIntegrations, apiErr = mgr.ListIntegrations(
|
installedIntegrations, apiErr = mgr.ListIntegrations(
|
||||||
ctx, installedIntegrationsFilter,
|
ctx, user.OrgID, installedIntegrationsFilter,
|
||||||
)
|
)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(0, len(installedIntegrations))
|
require.Equal(0, len(installedIntegrations))
|
||||||
|
|
||||||
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, nil)
|
availableIntegrations, apiErr = mgr.ListIntegrations(ctx, user.OrgID, nil)
|
||||||
require.Nil(apiErr)
|
require.Nil(apiErr)
|
||||||
require.Equal(2, len(availableIntegrations))
|
require.Equal(2, len(availableIntegrations))
|
||||||
require.False(availableIntegrations[0].IsInstalled)
|
require.False(availableIntegrations[0].IsInstalled)
|
||||||
|
|||||||
@@ -2,51 +2,33 @@ package integrations
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql/driver"
|
|
||||||
"encoding/json"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
// For serializing from db
|
|
||||||
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
|
|
||||||
if data, ok := src.([]byte); ok {
|
|
||||||
return json.Unmarshal(data, &c)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// For serializing to db
|
|
||||||
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
|
|
||||||
filterSetJson, err := json.Marshal(c)
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.Wrap(err, "could not serialize integration config to JSON")
|
|
||||||
}
|
|
||||||
return filterSetJson, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type InstalledIntegrationsRepo interface {
|
type InstalledIntegrationsRepo interface {
|
||||||
list(context.Context) ([]InstalledIntegration, *model.ApiError)
|
list(ctx context.Context, orgId string) ([]types.InstalledIntegration, *model.ApiError)
|
||||||
|
|
||||||
get(
|
get(
|
||||||
ctx context.Context, integrationIds []string,
|
ctx context.Context, orgId string, integrationTypes []string,
|
||||||
) (map[string]InstalledIntegration, *model.ApiError)
|
) (map[string]types.InstalledIntegration, *model.ApiError)
|
||||||
|
|
||||||
upsert(
|
upsert(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
integrationId string,
|
orgId string,
|
||||||
config InstalledIntegrationConfig,
|
integrationType string,
|
||||||
) (*InstalledIntegration, *model.ApiError)
|
config types.InstalledIntegrationConfig,
|
||||||
|
) (*types.InstalledIntegration, *model.ApiError)
|
||||||
|
|
||||||
delete(ctx context.Context, integrationId string) *model.ApiError
|
delete(ctx context.Context, orgId string, integrationType string) *model.ApiError
|
||||||
}
|
}
|
||||||
|
|
||||||
type AvailableIntegrationsRepo interface {
|
type AvailableIntegrationsRepo interface {
|
||||||
list(context.Context) ([]IntegrationDetails, *model.ApiError)
|
list(context.Context) ([]IntegrationDetails, *model.ApiError)
|
||||||
|
|
||||||
get(
|
get(
|
||||||
ctx context.Context, integrationIds []string,
|
ctx context.Context, integrationTypes []string,
|
||||||
) (map[string]IntegrationDetails, *model.ApiError)
|
) (map[string]IntegrationDetails, *model.ApiError)
|
||||||
|
|
||||||
// AvailableIntegrationsRepo implementations are expected to cache
|
// AvailableIntegrationsRepo implementations are expected to cache
|
||||||
|
|||||||
@@ -3,39 +3,37 @@ package integrations
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
)
|
)
|
||||||
|
|
||||||
type InstalledIntegrationsSqliteRepo struct {
|
type InstalledIntegrationsSqliteRepo struct {
|
||||||
db *sqlx.DB
|
store sqlstore.SQLStore
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewInstalledIntegrationsSqliteRepo(db *sqlx.DB) (
|
func NewInstalledIntegrationsSqliteRepo(store sqlstore.SQLStore) (
|
||||||
*InstalledIntegrationsSqliteRepo, error,
|
*InstalledIntegrationsSqliteRepo, error,
|
||||||
) {
|
) {
|
||||||
return &InstalledIntegrationsSqliteRepo{
|
return &InstalledIntegrationsSqliteRepo{
|
||||||
db: db,
|
store: store,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *InstalledIntegrationsSqliteRepo) list(
|
func (r *InstalledIntegrationsSqliteRepo) list(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
) ([]InstalledIntegration, *model.ApiError) {
|
orgId string,
|
||||||
integrations := []InstalledIntegration{}
|
) ([]types.InstalledIntegration, *model.ApiError) {
|
||||||
|
integrations := []types.InstalledIntegration{}
|
||||||
|
|
||||||
err := r.db.SelectContext(
|
err := r.store.BunDB().NewSelect().
|
||||||
ctx, &integrations, `
|
Model(&integrations).
|
||||||
select
|
Where("org_id = ?", orgId).
|
||||||
integration_id,
|
Order("installed_at").
|
||||||
config_json,
|
Scan(ctx)
|
||||||
installed_at
|
|
||||||
from integrations_installed
|
|
||||||
order by installed_at
|
|
||||||
`,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"could not query installed integrations: %w", err,
|
"could not query installed integrations: %w", err,
|
||||||
@@ -45,38 +43,28 @@ func (r *InstalledIntegrationsSqliteRepo) list(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *InstalledIntegrationsSqliteRepo) get(
|
func (r *InstalledIntegrationsSqliteRepo) get(
|
||||||
ctx context.Context, integrationIds []string,
|
ctx context.Context, orgId string, integrationTypes []string,
|
||||||
) (map[string]InstalledIntegration, *model.ApiError) {
|
) (map[string]types.InstalledIntegration, *model.ApiError) {
|
||||||
integrations := []InstalledIntegration{}
|
integrations := []types.InstalledIntegration{}
|
||||||
|
|
||||||
idPlaceholders := []string{}
|
typeValues := []interface{}{}
|
||||||
idValues := []interface{}{}
|
for _, integrationType := range integrationTypes {
|
||||||
for _, id := range integrationIds {
|
typeValues = append(typeValues, integrationType)
|
||||||
idPlaceholders = append(idPlaceholders, "?")
|
|
||||||
idValues = append(idValues, id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err := r.db.SelectContext(
|
err := r.store.BunDB().NewSelect().Model(&integrations).
|
||||||
ctx, &integrations, fmt.Sprintf(`
|
Where("org_id = ?", orgId).
|
||||||
select
|
Where("type IN (?)", bun.In(typeValues)).
|
||||||
integration_id,
|
Scan(ctx)
|
||||||
config_json,
|
|
||||||
installed_at
|
|
||||||
from integrations_installed
|
|
||||||
where integration_id in (%s)`,
|
|
||||||
strings.Join(idPlaceholders, ", "),
|
|
||||||
),
|
|
||||||
idValues...,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"could not query installed integrations: %w", err,
|
"could not query installed integrations: %w", err,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
result := map[string]InstalledIntegration{}
|
result := map[string]types.InstalledIntegration{}
|
||||||
for _, ii := range integrations {
|
for _, ii := range integrations {
|
||||||
result[ii.IntegrationId] = ii
|
result[ii.Type] = ii
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
@@ -84,55 +72,57 @@ func (r *InstalledIntegrationsSqliteRepo) get(
|
|||||||
|
|
||||||
func (r *InstalledIntegrationsSqliteRepo) upsert(
|
func (r *InstalledIntegrationsSqliteRepo) upsert(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
integrationId string,
|
orgId string,
|
||||||
config InstalledIntegrationConfig,
|
integrationType string,
|
||||||
) (*InstalledIntegration, *model.ApiError) {
|
config types.InstalledIntegrationConfig,
|
||||||
serializedConfig, err := config.Value()
|
) (*types.InstalledIntegration, *model.ApiError) {
|
||||||
if err != nil {
|
|
||||||
return nil, model.BadRequest(fmt.Errorf(
|
integration := types.InstalledIntegration{
|
||||||
"could not serialize integration config: %w", err,
|
Identifiable: types.Identifiable{
|
||||||
))
|
ID: valuer.GenerateUUID(),
|
||||||
|
},
|
||||||
|
OrgID: orgId,
|
||||||
|
Type: integrationType,
|
||||||
|
Config: config,
|
||||||
}
|
}
|
||||||
|
|
||||||
_, dbErr := r.db.ExecContext(
|
_, dbErr := r.store.BunDB().NewInsert().
|
||||||
ctx, `
|
Model(&integration).
|
||||||
INSERT INTO integrations_installed (
|
On("conflict (type, org_id) DO UPDATE").
|
||||||
integration_id,
|
Set("config = EXCLUDED.config").
|
||||||
config_json
|
Exec(ctx)
|
||||||
) values ($1, $2)
|
|
||||||
on conflict(integration_id) do update
|
|
||||||
set config_json=excluded.config_json
|
|
||||||
`, integrationId, serializedConfig,
|
|
||||||
)
|
|
||||||
if dbErr != nil {
|
if dbErr != nil {
|
||||||
return nil, model.InternalError(fmt.Errorf(
|
return nil, model.InternalError(fmt.Errorf(
|
||||||
"could not insert record for integration installation: %w", dbErr,
|
"could not insert record for integration installation: %w", dbErr,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
res, apiErr := r.get(ctx, []string{integrationId})
|
res, apiErr := r.get(ctx, orgId, []string{integrationType})
|
||||||
if apiErr != nil || len(res) < 1 {
|
if apiErr != nil || len(res) < 1 {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
apiErr, "could not fetch installed integration",
|
apiErr, "could not fetch installed integration",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
installed := res[integrationId]
|
installed := res[integrationType]
|
||||||
|
|
||||||
return &installed, nil
|
return &installed, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *InstalledIntegrationsSqliteRepo) delete(
|
func (r *InstalledIntegrationsSqliteRepo) delete(
|
||||||
ctx context.Context, integrationId string,
|
ctx context.Context, orgId string, integrationType string,
|
||||||
) *model.ApiError {
|
) *model.ApiError {
|
||||||
_, dbErr := r.db.ExecContext(ctx, `
|
_, dbErr := r.store.BunDB().NewDelete().
|
||||||
DELETE FROM integrations_installed where integration_id = ?
|
Model(&types.InstalledIntegration{}).
|
||||||
`, integrationId)
|
Where("type = ?", integrationType).
|
||||||
|
Where("org_id = ?", orgId).
|
||||||
|
Exec(ctx)
|
||||||
|
|
||||||
if dbErr != nil {
|
if dbErr != nil {
|
||||||
return model.InternalError(fmt.Errorf(
|
return model.InternalError(fmt.Errorf(
|
||||||
"could not delete installed integration record for %s: %w",
|
"could not delete installed integration record for %s: %w",
|
||||||
integrationId, dbErr,
|
integrationType, dbErr,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,18 +5,22 @@ import (
|
|||||||
"slices"
|
"slices"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||||
|
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||||
|
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||||
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewTestIntegrationsManager(t *testing.T) *Manager {
|
func NewTestIntegrationsManager(t *testing.T) *Manager {
|
||||||
testDB := utils.NewQueryServiceDBForTests(t)
|
testDB := utils.NewQueryServiceDBForTests(t)
|
||||||
|
|
||||||
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB.SQLxDB())
|
installedIntegrationsRepo, err := NewInstalledIntegrationsSqliteRepo(testDB)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("could not init sqlite DB for installed integrations: %v", err)
|
t.Fatalf("could not init sqlite DB for installed integrations: %v", err)
|
||||||
}
|
}
|
||||||
@@ -27,6 +31,38 @@ func NewTestIntegrationsManager(t *testing.T) *Manager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func createTestUser() (*types.User, *model.ApiError) {
|
||||||
|
// Create a test user for auth
|
||||||
|
ctx := context.Background()
|
||||||
|
org, apiErr := dao.DB().CreateOrg(ctx, &types.Organization{
|
||||||
|
Name: "test",
|
||||||
|
})
|
||||||
|
if apiErr != nil {
|
||||||
|
return nil, apiErr
|
||||||
|
}
|
||||||
|
|
||||||
|
group, apiErr := dao.DB().GetGroupByName(ctx, constants.AdminGroup)
|
||||||
|
if apiErr != nil {
|
||||||
|
return nil, apiErr
|
||||||
|
}
|
||||||
|
|
||||||
|
auth.InitAuthCache(ctx)
|
||||||
|
|
||||||
|
userId := uuid.NewString()
|
||||||
|
return dao.DB().CreateUser(
|
||||||
|
ctx,
|
||||||
|
&types.User{
|
||||||
|
ID: userId,
|
||||||
|
Name: "test",
|
||||||
|
Email: userId[:8] + "test@test.com",
|
||||||
|
Password: "test",
|
||||||
|
OrgID: org.ID,
|
||||||
|
GroupID: group.ID,
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
type TestAvailableIntegrationsRepo struct{}
|
type TestAvailableIntegrationsRepo struct{}
|
||||||
|
|
||||||
func (t *TestAvailableIntegrationsRepo) list(
|
func (t *TestAvailableIntegrationsRepo) list(
|
||||||
|
|||||||
@@ -25,12 +25,12 @@ import (
|
|||||||
type LogParsingPipelineController struct {
|
type LogParsingPipelineController struct {
|
||||||
Repo
|
Repo
|
||||||
|
|
||||||
GetIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError)
|
GetIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLogParsingPipelinesController(
|
func NewLogParsingPipelinesController(
|
||||||
sqlStore sqlstore.SQLStore,
|
sqlStore sqlstore.SQLStore,
|
||||||
getIntegrationPipelines func(context.Context) ([]pipelinetypes.GettablePipeline, *model.ApiError),
|
getIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError),
|
||||||
) (*LogParsingPipelineController, error) {
|
) (*LogParsingPipelineController, error) {
|
||||||
repo := NewRepo(sqlStore)
|
repo := NewRepo(sqlStore)
|
||||||
return &LogParsingPipelineController{
|
return &LogParsingPipelineController{
|
||||||
@@ -164,7 +164,7 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
|||||||
result = savedPipelines
|
result = savedPipelines
|
||||||
}
|
}
|
||||||
|
|
||||||
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx)
|
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx, defaultOrgID)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
return nil, model.WrapApiError(
|
return nil, model.WrapApiError(
|
||||||
apiErr, "could not get pipelines for installed integrations",
|
apiErr, "could not get pipelines for installed integrations",
|
||||||
|
|||||||
@@ -131,11 +131,9 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
operator.If = fmt.Sprintf(
|
operator.If = fmt.Sprintf(
|
||||||
`%s && (
|
`%s && %s matches "^\\s*{.*}\\s*$"`, parseFromNotNilCheck, operator.ParseFrom,
|
||||||
(typeOf(%s) == "string" && %s matches "^\\s*{.*}\\s*$" ) ||
|
|
||||||
typeOf(%s) == "map[string]any"
|
|
||||||
)`, parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
} else if operator.Type == "add" {
|
} else if operator.Type == "add" {
|
||||||
if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") {
|
if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") {
|
||||||
expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")")
|
expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")")
|
||||||
|
|||||||
@@ -646,7 +646,7 @@ func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
|
|||||||
require := require.New(t)
|
require := require.New(t)
|
||||||
|
|
||||||
testLogs := []model.SignozLog{
|
testLogs := []model.SignozLog{
|
||||||
makeTestSignozLog("test log", map[string]any{
|
makeTestSignozLog("test log", map[string]interface{}{
|
||||||
"http.method": "GET",
|
"http.method": "GET",
|
||||||
"order.products": `{"ids": ["pid0", "pid1"]}`,
|
"order.products": `{"ids": ["pid0", "pid1"]}`,
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -719,6 +719,21 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
|
|||||||
aggregateOperator := v3.AggregateOperator(r.URL.Query().Get("aggregateOperator"))
|
aggregateOperator := v3.AggregateOperator(r.URL.Query().Get("aggregateOperator"))
|
||||||
aggregateAttribute := r.URL.Query().Get("aggregateAttribute")
|
aggregateAttribute := r.URL.Query().Get("aggregateAttribute")
|
||||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||||
|
tagType := v3.TagType(r.URL.Query().Get("tagType"))
|
||||||
|
|
||||||
|
// empty string is a valid tagType
|
||||||
|
// i.e retrieve all attributes
|
||||||
|
if tagType != "" {
|
||||||
|
// what is happening here?
|
||||||
|
// if tagType is undefined(uh oh javascript) or any invalid value, set it to empty string
|
||||||
|
// instead of failing the request. Ideally, we should fail the request.
|
||||||
|
// but we are not doing that to maintain backward compatibility.
|
||||||
|
if err := tagType.Validate(); err != nil {
|
||||||
|
// if the tagType is invalid, set it to empty string
|
||||||
|
tagType = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
limit = 50
|
limit = 50
|
||||||
}
|
}
|
||||||
@@ -739,6 +754,7 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
|
|||||||
AggregateAttribute: aggregateAttribute,
|
AggregateAttribute: aggregateAttribute,
|
||||||
Limit: limit,
|
Limit: limit,
|
||||||
SearchText: r.URL.Query().Get("searchText"),
|
SearchText: r.URL.Query().Get("searchText"),
|
||||||
|
TagType: tagType,
|
||||||
}
|
}
|
||||||
return &req, nil
|
return &req, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -112,6 +112,7 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
|
|||||||
expectedSearchText string
|
expectedSearchText string
|
||||||
expectErr bool
|
expectErr bool
|
||||||
errMsg string
|
errMsg string
|
||||||
|
expectedTagType v3.TagType
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
desc: "valid operator and data source",
|
desc: "valid operator and data source",
|
||||||
@@ -168,6 +169,38 @@ func TestParseFilterAttributeKeyRequest(t *testing.T) {
|
|||||||
expectedDataSource: v3.DataSourceTraces,
|
expectedDataSource: v3.DataSourceTraces,
|
||||||
expectedLimit: 50,
|
expectedLimit: 50,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "invalid tag type",
|
||||||
|
queryString: "aggregateOperator=avg&dataSource=traces&tagType=invalid",
|
||||||
|
expectedOperator: v3.AggregateOperatorAvg,
|
||||||
|
expectedDataSource: v3.DataSourceTraces,
|
||||||
|
expectedTagType: "",
|
||||||
|
expectedLimit: 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "valid tag type",
|
||||||
|
queryString: "aggregateOperator=avg&dataSource=traces&tagType=resource",
|
||||||
|
expectedOperator: v3.AggregateOperatorAvg,
|
||||||
|
expectedDataSource: v3.DataSourceTraces,
|
||||||
|
expectedTagType: v3.TagTypeResource,
|
||||||
|
expectedLimit: 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "valid tag type",
|
||||||
|
queryString: "aggregateOperator=avg&dataSource=traces&tagType=scope",
|
||||||
|
expectedOperator: v3.AggregateOperatorAvg,
|
||||||
|
expectedDataSource: v3.DataSourceTraces,
|
||||||
|
expectedTagType: v3.TagTypeInstrumentationScope,
|
||||||
|
expectedLimit: 50,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "valid tag type",
|
||||||
|
queryString: "aggregateOperator=avg&dataSource=traces&tagType=tag",
|
||||||
|
expectedOperator: v3.AggregateOperatorAvg,
|
||||||
|
expectedDataSource: v3.DataSourceTraces,
|
||||||
|
expectedTagType: v3.TagTypeTag,
|
||||||
|
expectedLimit: 50,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, reqCase := range reqCases {
|
for _, reqCase := range reqCases {
|
||||||
|
|||||||
@@ -439,7 +439,7 @@ func RegisterFirstUser(ctx context.Context, req *RegisterRequest) (*types.User,
|
|||||||
}
|
}
|
||||||
|
|
||||||
user := &types.User{
|
user := &types.User{
|
||||||
ID: uuid.NewString(),
|
ID: uuid.New().String(),
|
||||||
Name: req.Name,
|
Name: req.Name,
|
||||||
Email: req.Email,
|
Email: req.Email,
|
||||||
Password: hash,
|
Password: hash,
|
||||||
@@ -519,7 +519,7 @@ func RegisterInvitedUser(ctx context.Context, req *RegisterRequest, nopassword b
|
|||||||
}
|
}
|
||||||
|
|
||||||
user := &types.User{
|
user := &types.User{
|
||||||
ID: uuid.NewString(),
|
ID: uuid.New().String(),
|
||||||
Name: req.Name,
|
Name: req.Name,
|
||||||
Email: req.Email,
|
Email: req.Email,
|
||||||
Password: hash,
|
Password: hash,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package auth
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
@@ -51,7 +52,7 @@ func InitAuthCache(ctx context.Context) error {
|
|||||||
func GetUserFromReqContext(ctx context.Context) (*types.GettableUser, error) {
|
func GetUserFromReqContext(ctx context.Context) (*types.GettableUser, error) {
|
||||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, errors.New("no claims found in context")
|
return nil, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "no claims found in context")
|
||||||
}
|
}
|
||||||
|
|
||||||
user := &types.GettableUser{
|
user := &types.GettableUser{
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ type Feature struct {
|
|||||||
|
|
||||||
const UseSpanMetrics = "USE_SPAN_METRICS"
|
const UseSpanMetrics = "USE_SPAN_METRICS"
|
||||||
const AnomalyDetection = "ANOMALY_DETECTION"
|
const AnomalyDetection = "ANOMALY_DETECTION"
|
||||||
const TraceFunnels = "TRACE_FUNNELS"
|
|
||||||
|
|
||||||
var BasicPlan = FeatureSet{
|
var BasicPlan = FeatureSet{
|
||||||
Feature{
|
Feature{
|
||||||
@@ -28,11 +27,4 @@ var BasicPlan = FeatureSet{
|
|||||||
UsageLimit: -1,
|
UsageLimit: -1,
|
||||||
Route: "",
|
Route: "",
|
||||||
},
|
},
|
||||||
Feature{
|
|
||||||
Name: TraceFunnels,
|
|
||||||
Active: false,
|
|
||||||
Usage: 0,
|
|
||||||
UsageLimit: -1,
|
|
||||||
Route: "",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -248,6 +248,7 @@ func (q TagType) Validate() error {
|
|||||||
type FilterAttributeKeyRequest struct {
|
type FilterAttributeKeyRequest struct {
|
||||||
DataSource DataSource `json:"dataSource"`
|
DataSource DataSource `json:"dataSource"`
|
||||||
AggregateOperator AggregateOperator `json:"aggregateOperator"`
|
AggregateOperator AggregateOperator `json:"aggregateOperator"`
|
||||||
|
TagType TagType `json:"tagType"`
|
||||||
AggregateAttribute string `json:"aggregateAttribute"`
|
AggregateAttribute string `json:"aggregateAttribute"`
|
||||||
SearchText string `json:"searchText"`
|
SearchText string `json:"searchText"`
|
||||||
Limit int `json:"limit"`
|
Limit int `json:"limit"`
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Should be able to generate a connection url from UI - initializing an integration account
|
// Should be able to generate a connection url from UI - initializing an integration account
|
||||||
testAccountConfig := cloudintegrations.AccountConfig{
|
testAccountConfig := types.AccountConfig{
|
||||||
EnabledRegions: []string{"us-east-1", "us-east-2"},
|
EnabledRegions: []string{"us-east-1", "us-east-2"},
|
||||||
}
|
}
|
||||||
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
|
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
|
||||||
@@ -65,8 +65,8 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
|||||||
testAWSAccountId := "4563215233"
|
testAWSAccountId := "4563215233"
|
||||||
agentCheckInResp := testbed.CheckInAsAgentWithQS(
|
agentCheckInResp := testbed.CheckInAsAgentWithQS(
|
||||||
"aws", cloudintegrations.AgentCheckInRequest{
|
"aws", cloudintegrations.AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: testAWSAccountId,
|
AccountID: testAWSAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Equal(testAccountId, agentCheckInResp.AccountId)
|
require.Equal(testAccountId, agentCheckInResp.AccountId)
|
||||||
@@ -91,20 +91,20 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
|||||||
require.Equal(testAWSAccountId, accountsListResp2.Accounts[0].CloudAccountId)
|
require.Equal(testAWSAccountId, accountsListResp2.Accounts[0].CloudAccountId)
|
||||||
|
|
||||||
// Should be able to update account config from UI
|
// Should be able to update account config from UI
|
||||||
testAccountConfig2 := cloudintegrations.AccountConfig{
|
testAccountConfig2 := types.AccountConfig{
|
||||||
EnabledRegions: []string{"us-east-2", "us-west-1"},
|
EnabledRegions: []string{"us-east-2", "us-west-1"},
|
||||||
}
|
}
|
||||||
latestAccount := testbed.UpdateAccountConfigWithQS(
|
latestAccount := testbed.UpdateAccountConfigWithQS(
|
||||||
"aws", testAccountId, testAccountConfig2,
|
"aws", testAccountId, testAccountConfig2,
|
||||||
)
|
)
|
||||||
require.Equal(testAccountId, latestAccount.Id)
|
require.Equal(testAccountId, latestAccount.ID.StringValue())
|
||||||
require.Equal(testAccountConfig2, *latestAccount.Config)
|
require.Equal(testAccountConfig2, *latestAccount.Config)
|
||||||
|
|
||||||
// The agent should now receive latest account config.
|
// The agent should now receive latest account config.
|
||||||
agentCheckInResp1 := testbed.CheckInAsAgentWithQS(
|
agentCheckInResp1 := testbed.CheckInAsAgentWithQS(
|
||||||
"aws", cloudintegrations.AgentCheckInRequest{
|
"aws", cloudintegrations.AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: testAWSAccountId,
|
AccountID: testAWSAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Equal(testAccountId, agentCheckInResp1.AccountId)
|
require.Equal(testAccountId, agentCheckInResp1.AccountId)
|
||||||
@@ -114,14 +114,14 @@ func TestAWSIntegrationAccountLifecycle(t *testing.T) {
|
|||||||
// Should be able to disconnect/remove account from UI.
|
// Should be able to disconnect/remove account from UI.
|
||||||
tsBeforeDisconnect := time.Now()
|
tsBeforeDisconnect := time.Now()
|
||||||
latestAccount = testbed.DisconnectAccountWithQS("aws", testAccountId)
|
latestAccount = testbed.DisconnectAccountWithQS("aws", testAccountId)
|
||||||
require.Equal(testAccountId, latestAccount.Id)
|
require.Equal(testAccountId, latestAccount.ID.StringValue())
|
||||||
require.LessOrEqual(tsBeforeDisconnect, *latestAccount.RemovedAt)
|
require.LessOrEqual(tsBeforeDisconnect, *latestAccount.RemovedAt)
|
||||||
|
|
||||||
// The agent should receive the disconnected status in account config post disconnection
|
// The agent should receive the disconnected status in account config post disconnection
|
||||||
agentCheckInResp2 := testbed.CheckInAsAgentWithQS(
|
agentCheckInResp2 := testbed.CheckInAsAgentWithQS(
|
||||||
"aws", cloudintegrations.AgentCheckInRequest{
|
"aws", cloudintegrations.AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: testAWSAccountId,
|
AccountID: testAWSAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Equal(testAccountId, agentCheckInResp2.AccountId)
|
require.Equal(testAccountId, agentCheckInResp2.AccountId)
|
||||||
@@ -157,13 +157,13 @@ func TestAWSIntegrationServices(t *testing.T) {
|
|||||||
testAWSAccountId := "389389489489"
|
testAWSAccountId := "389389489489"
|
||||||
testbed.CheckInAsAgentWithQS(
|
testbed.CheckInAsAgentWithQS(
|
||||||
"aws", cloudintegrations.AgentCheckInRequest{
|
"aws", cloudintegrations.AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: testAWSAccountId,
|
AccountID: testAWSAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
testSvcConfig := cloudintegrations.CloudServiceConfig{
|
testSvcConfig := types.CloudServiceConfig{
|
||||||
Metrics: &cloudintegrations.CloudServiceMetricsConfig{
|
Metrics: &types.CloudServiceMetricsConfig{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -199,7 +199,7 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
|||||||
testbed := NewCloudIntegrationsTestBed(t, nil)
|
testbed := NewCloudIntegrationsTestBed(t, nil)
|
||||||
|
|
||||||
// configure a connected account
|
// configure a connected account
|
||||||
testAccountConfig := cloudintegrations.AccountConfig{
|
testAccountConfig := types.AccountConfig{
|
||||||
EnabledRegions: []string{"us-east-1", "us-east-2"},
|
EnabledRegions: []string{"us-east-1", "us-east-2"},
|
||||||
}
|
}
|
||||||
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
|
connectionUrlResp := testbed.GenerateConnectionUrlFromQS(
|
||||||
@@ -218,8 +218,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
|||||||
testAWSAccountId := "389389489489"
|
testAWSAccountId := "389389489489"
|
||||||
checkinResp := testbed.CheckInAsAgentWithQS(
|
checkinResp := testbed.CheckInAsAgentWithQS(
|
||||||
"aws", cloudintegrations.AgentCheckInRequest{
|
"aws", cloudintegrations.AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: testAWSAccountId,
|
AccountID: testAWSAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -237,14 +237,14 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
|||||||
|
|
||||||
// helper
|
// helper
|
||||||
setServiceConfig := func(svcId string, metricsEnabled bool, logsEnabled bool) {
|
setServiceConfig := func(svcId string, metricsEnabled bool, logsEnabled bool) {
|
||||||
testSvcConfig := cloudintegrations.CloudServiceConfig{}
|
testSvcConfig := types.CloudServiceConfig{}
|
||||||
if metricsEnabled {
|
if metricsEnabled {
|
||||||
testSvcConfig.Metrics = &cloudintegrations.CloudServiceMetricsConfig{
|
testSvcConfig.Metrics = &types.CloudServiceMetricsConfig{
|
||||||
Enabled: metricsEnabled,
|
Enabled: metricsEnabled,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if logsEnabled {
|
if logsEnabled {
|
||||||
testSvcConfig.Logs = &cloudintegrations.CloudServiceLogsConfig{
|
testSvcConfig.Logs = &types.CloudServiceLogsConfig{
|
||||||
Enabled: logsEnabled,
|
Enabled: logsEnabled,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -262,8 +262,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
|||||||
|
|
||||||
checkinResp = testbed.CheckInAsAgentWithQS(
|
checkinResp = testbed.CheckInAsAgentWithQS(
|
||||||
"aws", cloudintegrations.AgentCheckInRequest{
|
"aws", cloudintegrations.AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: testAWSAccountId,
|
AccountID: testAWSAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -292,13 +292,13 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
|||||||
require.True(strings.HasPrefix(logGroupPrefixes[0], "/aws/rds"))
|
require.True(strings.HasPrefix(logGroupPrefixes[0], "/aws/rds"))
|
||||||
|
|
||||||
// change regions and update service configs and validate config changes for agent
|
// change regions and update service configs and validate config changes for agent
|
||||||
testAccountConfig2 := cloudintegrations.AccountConfig{
|
testAccountConfig2 := types.AccountConfig{
|
||||||
EnabledRegions: []string{"us-east-2", "us-west-1"},
|
EnabledRegions: []string{"us-east-2", "us-west-1"},
|
||||||
}
|
}
|
||||||
latestAccount := testbed.UpdateAccountConfigWithQS(
|
latestAccount := testbed.UpdateAccountConfigWithQS(
|
||||||
"aws", testAccountId, testAccountConfig2,
|
"aws", testAccountId, testAccountConfig2,
|
||||||
)
|
)
|
||||||
require.Equal(testAccountId, latestAccount.Id)
|
require.Equal(testAccountId, latestAccount.ID.StringValue())
|
||||||
require.Equal(testAccountConfig2, *latestAccount.Config)
|
require.Equal(testAccountConfig2, *latestAccount.Config)
|
||||||
|
|
||||||
// disable metrics for one and logs for the other.
|
// disable metrics for one and logs for the other.
|
||||||
@@ -308,8 +308,8 @@ func TestConfigReturnedWhenAgentChecksIn(t *testing.T) {
|
|||||||
|
|
||||||
checkinResp = testbed.CheckInAsAgentWithQS(
|
checkinResp = testbed.CheckInAsAgentWithQS(
|
||||||
"aws", cloudintegrations.AgentCheckInRequest{
|
"aws", cloudintegrations.AgentCheckInRequest{
|
||||||
AccountId: testAccountId,
|
ID: testAccountId,
|
||||||
CloudAccountId: testAWSAccountId,
|
AccountID: testAWSAccountId,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
require.Equal(testAccountId, checkinResp.AccountId)
|
require.Equal(testAccountId, checkinResp.AccountId)
|
||||||
@@ -453,8 +453,8 @@ func (tb *CloudIntegrationsTestBed) CheckInAsAgentWithQS(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
|
func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
|
||||||
cloudProvider string, accountId string, newConfig cloudintegrations.AccountConfig,
|
cloudProvider string, accountId string, newConfig types.AccountConfig,
|
||||||
) *cloudintegrations.AccountRecord {
|
) *types.CloudIntegration {
|
||||||
respDataJson := tb.RequestQS(
|
respDataJson := tb.RequestQS(
|
||||||
fmt.Sprintf(
|
fmt.Sprintf(
|
||||||
"/api/v1/cloud-integrations/%s/accounts/%s/config",
|
"/api/v1/cloud-integrations/%s/accounts/%s/config",
|
||||||
@@ -464,7 +464,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
var resp cloudintegrations.AccountRecord
|
var resp types.CloudIntegration
|
||||||
err := json.Unmarshal(respDataJson, &resp)
|
err := json.Unmarshal(respDataJson, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
|
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
|
||||||
@@ -475,7 +475,7 @@ func (tb *CloudIntegrationsTestBed) UpdateAccountConfigWithQS(
|
|||||||
|
|
||||||
func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
|
func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
|
||||||
cloudProvider string, accountId string,
|
cloudProvider string, accountId string,
|
||||||
) *cloudintegrations.AccountRecord {
|
) *types.CloudIntegration {
|
||||||
respDataJson := tb.RequestQS(
|
respDataJson := tb.RequestQS(
|
||||||
fmt.Sprintf(
|
fmt.Sprintf(
|
||||||
"/api/v1/cloud-integrations/%s/accounts/%s/disconnect",
|
"/api/v1/cloud-integrations/%s/accounts/%s/disconnect",
|
||||||
@@ -483,7 +483,7 @@ func (tb *CloudIntegrationsTestBed) DisconnectAccountWithQS(
|
|||||||
), map[string]any{},
|
), map[string]any{},
|
||||||
)
|
)
|
||||||
|
|
||||||
var resp cloudintegrations.AccountRecord
|
var resp types.CloudIntegration
|
||||||
err := json.Unmarshal(respDataJson, &resp)
|
err := json.Unmarshal(respDataJson, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
|
tb.t.Fatalf("could not unmarshal apiResponse.Data json into Account")
|
||||||
|
|||||||
@@ -166,6 +166,7 @@ func createTestUser() (*types.User, *model.ApiError) {
|
|||||||
auth.InitAuthCache(ctx)
|
auth.InitAuthCache(ctx)
|
||||||
|
|
||||||
userId := uuid.NewString()
|
userId := uuid.NewString()
|
||||||
|
|
||||||
return dao.DB().CreateUser(
|
return dao.DB().CreateUser(
|
||||||
ctx,
|
ctx,
|
||||||
&types.User{
|
&types.User{
|
||||||
|
|||||||
@@ -48,10 +48,15 @@ func NewTestSqliteDB(t *testing.T) (sqlStore sqlstore.SQLStore, testDBFilePath s
|
|||||||
sqlmigration.NewModifyDatetimeFactory(),
|
sqlmigration.NewModifyDatetimeFactory(),
|
||||||
sqlmigration.NewModifyOrgDomainFactory(),
|
sqlmigration.NewModifyOrgDomainFactory(),
|
||||||
sqlmigration.NewUpdateOrganizationFactory(sqlStore),
|
sqlmigration.NewUpdateOrganizationFactory(sqlStore),
|
||||||
|
sqlmigration.NewAddAlertmanagerFactory(sqlStore),
|
||||||
sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore),
|
sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore),
|
||||||
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore),
|
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore),
|
||||||
sqlmigration.NewUpdatePipelines(sqlStore),
|
sqlmigration.NewUpdatePipelines(sqlStore),
|
||||||
|
sqlmigration.NewDropLicensesSitesFactory(sqlStore),
|
||||||
|
sqlmigration.NewUpdateInvitesFactory(sqlStore),
|
||||||
|
sqlmigration.NewUpdatePatFactory(sqlStore),
|
||||||
sqlmigration.NewAddVirtualFieldsFactory(),
|
sqlmigration.NewAddVirtualFieldsFactory(),
|
||||||
|
sqlmigration.NewUpdateIntegrationsFactory(sqlStore),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -70,6 +70,7 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
|
|||||||
sqlmigration.NewUpdateApdexTtlFactory(sqlstore),
|
sqlmigration.NewUpdateApdexTtlFactory(sqlstore),
|
||||||
sqlmigration.NewUpdateResetPasswordFactory(sqlstore),
|
sqlmigration.NewUpdateResetPasswordFactory(sqlstore),
|
||||||
sqlmigration.NewAddVirtualFieldsFactory(),
|
sqlmigration.NewAddVirtualFieldsFactory(),
|
||||||
|
sqlmigration.NewUpdateIntegrationsFactory(sqlstore),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
441
pkg/sqlmigration/026_update_integrations.go
Normal file
441
pkg/sqlmigration/026_update_integrations.go
Normal file
@@ -0,0 +1,441 @@
|
|||||||
|
package sqlmigration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
"github.com/uptrace/bun/migrate"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
type updateIntegrations struct {
|
||||||
|
store sqlstore.SQLStore
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewUpdateIntegrationsFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||||
|
return factory.NewProviderFactory(factory.MustNewName("update_integrations"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||||
|
return newUpdateIntegrations(ctx, ps, c, sqlstore)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func newUpdateIntegrations(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||||
|
return &updateIntegrations{
|
||||||
|
store: store,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (migration *updateIntegrations) Register(migrations *migrate.Migrations) error {
|
||||||
|
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type existingInstalledIntegration struct {
|
||||||
|
bun.BaseModel `bun:"table:integrations_installed"`
|
||||||
|
|
||||||
|
IntegrationID string `bun:"integration_id,pk,type:text"`
|
||||||
|
ConfigJSON string `bun:"config_json,type:text"`
|
||||||
|
InstalledAt time.Time `bun:"installed_at,default:current_timestamp"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type newInstalledIntegration struct {
|
||||||
|
bun.BaseModel `bun:"table:installed_integration"`
|
||||||
|
|
||||||
|
types.Identifiable
|
||||||
|
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
|
||||||
|
Config string `json:"config" bun:"config,type:text"`
|
||||||
|
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
|
||||||
|
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type existingCloudIntegration struct {
|
||||||
|
bun.BaseModel `bun:"table:cloud_integrations_accounts"`
|
||||||
|
|
||||||
|
CloudProvider string `bun:"cloud_provider,type:text,unique:cloud_provider_id"`
|
||||||
|
ID string `bun:"id,type:text,notnull,unique:cloud_provider_id"`
|
||||||
|
ConfigJSON string `bun:"config_json,type:text"`
|
||||||
|
CloudAccountID string `bun:"cloud_account_id,type:text"`
|
||||||
|
LastAgentReportJSON string `bun:"last_agent_report_json,type:text"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,notnull,default:current_timestamp"`
|
||||||
|
RemovedAt *time.Time `bun:"removed_at,type:timestamp"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type newCloudIntegration struct {
|
||||||
|
bun.BaseModel `bun:"table:cloud_integration"`
|
||||||
|
|
||||||
|
types.Identifiable
|
||||||
|
types.TimeAuditable
|
||||||
|
Provider string `json:"provider" bun:"provider,type:text"`
|
||||||
|
Config string `json:"config" bun:"config,type:text"`
|
||||||
|
AccountID string `json:"account_id" bun:"account_id,type:text"`
|
||||||
|
LastAgentReport string `json:"last_agent_report" bun:"last_agent_report,type:text"`
|
||||||
|
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp"`
|
||||||
|
OrgID string `json:"org_id" bun:"org_id,type:text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type existingCloudIntegrationService struct {
|
||||||
|
bun.BaseModel `bun:"table:cloud_integrations_service_configs,alias:c1"`
|
||||||
|
|
||||||
|
CloudProvider string `bun:"cloud_provider,type:text,notnull,unique:service_cloud_provider_account"`
|
||||||
|
CloudAccountID string `bun:"cloud_account_id,type:text,notnull,unique:service_cloud_provider_account"`
|
||||||
|
ServiceID string `bun:"service_id,type:text,notnull,unique:service_cloud_provider_account"`
|
||||||
|
ConfigJSON string `bun:"config_json,type:text"`
|
||||||
|
CreatedAt time.Time `bun:"created_at,default:current_timestamp"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type newCloudIntegrationService struct {
|
||||||
|
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
|
||||||
|
|
||||||
|
types.Identifiable
|
||||||
|
types.TimeAuditable
|
||||||
|
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
|
||||||
|
Config string `bun:"config,type:text"`
|
||||||
|
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StorablePersonalAccessToken struct {
|
||||||
|
bun.BaseModel `bun:"table:personal_access_token"`
|
||||||
|
types.Identifiable
|
||||||
|
types.TimeAuditable
|
||||||
|
OrgID string `json:"orgId" bun:"org_id,type:text,notnull"`
|
||||||
|
Role string `json:"role" bun:"role,type:text,notnull,default:'ADMIN'"`
|
||||||
|
UserID string `json:"userId" bun:"user_id,type:text,notnull"`
|
||||||
|
Token string `json:"token" bun:"token,type:text,notnull,unique"`
|
||||||
|
Name string `json:"name" bun:"name,type:text,notnull"`
|
||||||
|
ExpiresAt int64 `json:"expiresAt" bun:"expires_at,notnull,default:0"`
|
||||||
|
LastUsed int64 `json:"lastUsed" bun:"last_used,notnull,default:0"`
|
||||||
|
Revoked bool `json:"revoked" bun:"revoked,notnull,default:false"`
|
||||||
|
UpdatedByUserID string `json:"updatedByUserId" bun:"updated_by_user_id,type:text,notnull,default:''"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (migration *updateIntegrations) Up(ctx context.Context, db *bun.DB) error {
|
||||||
|
|
||||||
|
// begin transaction
|
||||||
|
tx, err := db.BeginTx(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer tx.Rollback()
|
||||||
|
|
||||||
|
// don't run the migration if there are multiple org ids
|
||||||
|
orgIDs := make([]string, 0)
|
||||||
|
err = migration.store.BunDB().NewSelect().Model((*types.Organization)(nil)).Column("id").Scan(ctx, &orgIDs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(orgIDs) > 1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---
|
||||||
|
// installed integrations
|
||||||
|
// ---
|
||||||
|
err = migration.
|
||||||
|
store.
|
||||||
|
Dialect().
|
||||||
|
RenameTableAndModifyModel(ctx, tx, new(existingInstalledIntegration), new(newInstalledIntegration), []string{OrgReference}, func(ctx context.Context) error {
|
||||||
|
existingIntegrations := make([]*existingInstalledIntegration, 0)
|
||||||
|
err = tx.
|
||||||
|
NewSelect().
|
||||||
|
Model(&existingIntegrations).
|
||||||
|
Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if err != sql.ErrNoRows {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil && len(existingIntegrations) > 0 {
|
||||||
|
newIntegrations := migration.
|
||||||
|
CopyOldIntegrationsToNewIntegrations(tx, orgIDs[0], existingIntegrations)
|
||||||
|
_, err = tx.
|
||||||
|
NewInsert().
|
||||||
|
Model(&newIntegrations).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---
|
||||||
|
// cloud integrations
|
||||||
|
// ---
|
||||||
|
err = migration.
|
||||||
|
store.
|
||||||
|
Dialect().
|
||||||
|
RenameTableAndModifyModel(ctx, tx, new(existingCloudIntegration), new(newCloudIntegration), []string{OrgReference}, func(ctx context.Context) error {
|
||||||
|
existingIntegrations := make([]*existingCloudIntegration, 0)
|
||||||
|
err = tx.
|
||||||
|
NewSelect().
|
||||||
|
Model(&existingIntegrations).
|
||||||
|
Where("removed_at IS NULL"). // we will only copy the accounts that are not removed
|
||||||
|
Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if err != sql.ErrNoRows {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil && len(existingIntegrations) > 0 {
|
||||||
|
newIntegrations := migration.
|
||||||
|
CopyOldCloudIntegrationsToNewCloudIntegrations(tx, orgIDs[0], existingIntegrations)
|
||||||
|
_, err = tx.
|
||||||
|
NewInsert().
|
||||||
|
Model(&newIntegrations).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// add unique constraint to cloud_integration table
|
||||||
|
_, err = tx.ExecContext(ctx, `CREATE UNIQUE INDEX IF NOT EXISTS unique_cloud_integration ON cloud_integration (id, provider, org_id)`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---
|
||||||
|
// cloud integration service
|
||||||
|
// ---
|
||||||
|
err = migration.
|
||||||
|
store.
|
||||||
|
Dialect().
|
||||||
|
RenameTableAndModifyModel(ctx, tx, new(existingCloudIntegrationService), new(newCloudIntegrationService), []string{CloudIntegrationReference}, func(ctx context.Context) error {
|
||||||
|
existingServices := make([]*existingCloudIntegrationService, 0)
|
||||||
|
|
||||||
|
// only one service per provider,account id and type
|
||||||
|
// so there won't be any duplicates.
|
||||||
|
// just that these will be enabled as soon as the integration for the account is enabled
|
||||||
|
err = tx.
|
||||||
|
NewSelect().
|
||||||
|
Model(&existingServices).
|
||||||
|
Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if err != sql.ErrNoRows {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil && len(existingServices) > 0 {
|
||||||
|
newServices := migration.
|
||||||
|
CopyOldCloudIntegrationServicesToNewCloudIntegrationServices(tx, orgIDs[0], existingServices)
|
||||||
|
if len(newServices) > 0 {
|
||||||
|
_, err = tx.
|
||||||
|
NewInsert().
|
||||||
|
Model(&newServices).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(orgIDs) == 0 {
|
||||||
|
err = tx.Commit()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// copy the old aws integration user to the new user
|
||||||
|
err = migration.copyOldAwsIntegrationUser(tx, orgIDs[0])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tx.Commit()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (migration *updateIntegrations) Down(ctx context.Context, db *bun.DB) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (migration *updateIntegrations) CopyOldIntegrationsToNewIntegrations(tx bun.IDB, orgID string, existingIntegrations []*existingInstalledIntegration) []*newInstalledIntegration {
|
||||||
|
newIntegrations := make([]*newInstalledIntegration, 0)
|
||||||
|
|
||||||
|
for _, integration := range existingIntegrations {
|
||||||
|
newIntegrations = append(newIntegrations, &newInstalledIntegration{
|
||||||
|
Identifiable: types.Identifiable{
|
||||||
|
ID: valuer.GenerateUUID(),
|
||||||
|
},
|
||||||
|
Type: integration.IntegrationID,
|
||||||
|
Config: integration.ConfigJSON,
|
||||||
|
InstalledAt: integration.InstalledAt,
|
||||||
|
OrgID: orgID,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return newIntegrations
|
||||||
|
}
|
||||||
|
|
||||||
|
func (migration *updateIntegrations) CopyOldCloudIntegrationsToNewCloudIntegrations(tx bun.IDB, orgID string, existingIntegrations []*existingCloudIntegration) []*newCloudIntegration {
|
||||||
|
newIntegrations := make([]*newCloudIntegration, 0)
|
||||||
|
|
||||||
|
for _, integration := range existingIntegrations {
|
||||||
|
newIntegrations = append(newIntegrations, &newCloudIntegration{
|
||||||
|
Identifiable: types.Identifiable{
|
||||||
|
ID: valuer.GenerateUUID(),
|
||||||
|
},
|
||||||
|
TimeAuditable: types.TimeAuditable{
|
||||||
|
CreatedAt: integration.CreatedAt,
|
||||||
|
UpdatedAt: integration.CreatedAt,
|
||||||
|
},
|
||||||
|
Provider: integration.CloudProvider,
|
||||||
|
AccountID: integration.CloudAccountID,
|
||||||
|
Config: integration.ConfigJSON,
|
||||||
|
LastAgentReport: integration.LastAgentReportJSON,
|
||||||
|
RemovedAt: integration.RemovedAt,
|
||||||
|
OrgID: orgID,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return newIntegrations
|
||||||
|
}
|
||||||
|
|
||||||
|
func (migration *updateIntegrations) CopyOldCloudIntegrationServicesToNewCloudIntegrationServices(tx bun.IDB, orgID string, existingServices []*existingCloudIntegrationService) []*newCloudIntegrationService {
|
||||||
|
newServices := make([]*newCloudIntegrationService, 0)
|
||||||
|
|
||||||
|
for _, service := range existingServices {
|
||||||
|
var cloudIntegrationID string
|
||||||
|
err := tx.NewSelect().
|
||||||
|
Model((*newCloudIntegration)(nil)).
|
||||||
|
Column("id").
|
||||||
|
Where("account_id = ?", service.CloudAccountID).
|
||||||
|
Where("provider = ?", service.CloudProvider).
|
||||||
|
Where("org_id = ?", orgID).
|
||||||
|
Scan(context.Background(), &cloudIntegrationID)
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
zap.L().Error("failed to get cloud integration id", zap.Error(err))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
newServices = append(newServices, &newCloudIntegrationService{
|
||||||
|
Identifiable: types.Identifiable{
|
||||||
|
ID: valuer.GenerateUUID(),
|
||||||
|
},
|
||||||
|
TimeAuditable: types.TimeAuditable{
|
||||||
|
CreatedAt: service.CreatedAt,
|
||||||
|
UpdatedAt: service.CreatedAt,
|
||||||
|
},
|
||||||
|
Type: service.ServiceID,
|
||||||
|
Config: service.ConfigJSON,
|
||||||
|
CloudIntegrationID: cloudIntegrationID,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return newServices
|
||||||
|
}
|
||||||
|
|
||||||
|
func (migration *updateIntegrations) copyOldAwsIntegrationUser(tx bun.IDB, orgID string) error {
|
||||||
|
user := &types.User{}
|
||||||
|
err := tx.NewSelect().Model(user).Where("email = ?", "aws-integration@signoz.io").Scan(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if the id is already an uuid
|
||||||
|
if _, err := uuid.Parse(user.ID); err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// new user
|
||||||
|
newUser := &types.User{
|
||||||
|
ID: uuid.New().String(),
|
||||||
|
TimeAuditable: types.TimeAuditable{
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
},
|
||||||
|
OrgID: orgID,
|
||||||
|
Name: user.Name,
|
||||||
|
Email: user.Email,
|
||||||
|
GroupID: user.GroupID,
|
||||||
|
Password: user.Password,
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the pat for old user
|
||||||
|
pat := &StorablePersonalAccessToken{}
|
||||||
|
err = tx.NewSelect().Model(pat).Where("user_id = ? and revoked = false", "aws-integration").Scan(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
// delete the old user
|
||||||
|
_, err = tx.ExecContext(context.Background(), `DELETE FROM users WHERE id = ?`, user.ID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// new pat
|
||||||
|
newPAT := &StorablePersonalAccessToken{
|
||||||
|
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||||
|
TimeAuditable: types.TimeAuditable{
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
},
|
||||||
|
OrgID: orgID,
|
||||||
|
UserID: newUser.ID,
|
||||||
|
Token: pat.Token,
|
||||||
|
Name: pat.Name,
|
||||||
|
ExpiresAt: pat.ExpiresAt,
|
||||||
|
LastUsed: pat.LastUsed,
|
||||||
|
Revoked: pat.Revoked,
|
||||||
|
Role: pat.Role,
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete old user
|
||||||
|
_, err = tx.ExecContext(context.Background(), `DELETE FROM users WHERE id = ?`, user.ID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert the new user
|
||||||
|
_, err = tx.NewInsert().Model(newUser).Exec(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert the new pat
|
||||||
|
_, err = tx.NewInsert().Model(newPAT).Exec(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -28,6 +28,7 @@ var (
|
|||||||
var (
|
var (
|
||||||
OrgReference = "org"
|
OrgReference = "org"
|
||||||
UserReference = "user"
|
UserReference = "user"
|
||||||
|
CloudIntegrationReference = "cloud_integration"
|
||||||
)
|
)
|
||||||
|
|
||||||
func New(
|
func New(
|
||||||
|
|||||||
@@ -19,11 +19,13 @@ var (
|
|||||||
var (
|
var (
|
||||||
Org = "org"
|
Org = "org"
|
||||||
User = "user"
|
User = "user"
|
||||||
|
CloudIntegration = "cloud_integration"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||||
|
CloudIntegrationReference = `("cloud_integration_id") REFERENCES "cloud_integration" ("id") ON DELETE CASCADE`
|
||||||
)
|
)
|
||||||
|
|
||||||
type dialect struct {
|
type dialect struct {
|
||||||
@@ -202,6 +204,8 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
|||||||
fkReferences = append(fkReferences, OrgReference)
|
fkReferences = append(fkReferences, OrgReference)
|
||||||
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
} else if reference == User && !slices.Contains(fkReferences, UserReference) {
|
||||||
fkReferences = append(fkReferences, UserReference)
|
fkReferences = append(fkReferences, UserReference)
|
||||||
|
} else if reference == CloudIntegration && !slices.Contains(fkReferences, CloudIntegrationReference) {
|
||||||
|
fkReferences = append(fkReferences, CloudIntegrationReference)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
149
pkg/telemetrymetadata/condition_builder.go
Normal file
149
pkg/telemetrymetadata/condition_builder.go
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
package telemetrymetadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/huandu/go-sqlbuilder"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
attributeMetadataColumns = map[string]*schema.Column{
|
||||||
|
"resource_attributes": {Name: "resource_attributes", Type: schema.MapColumnType{
|
||||||
|
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||||
|
ValueType: schema.ColumnTypeString,
|
||||||
|
}},
|
||||||
|
"attributes": {Name: "attributes", Type: schema.MapColumnType{
|
||||||
|
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||||
|
ValueType: schema.ColumnTypeString,
|
||||||
|
}},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
type conditionBuilder struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConditionBuilder() qbtypes.ConditionBuilder {
|
||||||
|
return &conditionBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *conditionBuilder) GetColumn(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||||
|
switch key.FieldContext {
|
||||||
|
case telemetrytypes.FieldContextResource:
|
||||||
|
return attributeMetadataColumns["resource_attributes"], nil
|
||||||
|
case telemetrytypes.FieldContextAttribute:
|
||||||
|
return attributeMetadataColumns["attributes"], nil
|
||||||
|
}
|
||||||
|
return nil, qbtypes.ErrColumnNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *conditionBuilder) GetTableFieldName(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||||
|
column, err := c.GetColumn(ctx, key)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch column.Type {
|
||||||
|
case schema.MapColumnType{
|
||||||
|
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||||
|
ValueType: schema.ColumnTypeString,
|
||||||
|
}:
|
||||||
|
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
|
||||||
|
}
|
||||||
|
return column.Name, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *conditionBuilder) GetCondition(
|
||||||
|
ctx context.Context,
|
||||||
|
key *telemetrytypes.TelemetryFieldKey,
|
||||||
|
operator qbtypes.FilterOperator,
|
||||||
|
value any,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
) (string, error) {
|
||||||
|
column, err := c.GetColumn(ctx, key)
|
||||||
|
if err != nil {
|
||||||
|
// if we don't have a column, we can't build a condition for related values
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
tblFieldName, err := c.GetTableFieldName(ctx, key)
|
||||||
|
if err != nil {
|
||||||
|
// if we don't have a table field name, we can't build a condition for related values
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if key.FieldDataType != telemetrytypes.FieldDataTypeString {
|
||||||
|
// if the field data type is not string, we can't build a condition for related values
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// key must exists to apply main filter
|
||||||
|
containsExp := fmt.Sprintf("mapContains(%s, %s)", column.Name, sb.Var(key.Name))
|
||||||
|
|
||||||
|
// regular operators
|
||||||
|
switch operator {
|
||||||
|
// regular operators
|
||||||
|
case qbtypes.FilterOperatorEqual:
|
||||||
|
return sb.And(containsExp, sb.E(tblFieldName, value)), nil
|
||||||
|
case qbtypes.FilterOperatorNotEqual:
|
||||||
|
return sb.And(containsExp, sb.NE(tblFieldName, value)), nil
|
||||||
|
|
||||||
|
// like and not like
|
||||||
|
case qbtypes.FilterOperatorLike:
|
||||||
|
return sb.And(containsExp, sb.Like(tblFieldName, value)), nil
|
||||||
|
case qbtypes.FilterOperatorNotLike:
|
||||||
|
return sb.And(containsExp, sb.NotLike(tblFieldName, value)), nil
|
||||||
|
case qbtypes.FilterOperatorILike:
|
||||||
|
return sb.And(containsExp, sb.ILike(tblFieldName, value)), nil
|
||||||
|
case qbtypes.FilterOperatorNotILike:
|
||||||
|
return sb.And(containsExp, sb.NotILike(tblFieldName, value)), nil
|
||||||
|
|
||||||
|
case qbtypes.FilterOperatorContains:
|
||||||
|
return sb.And(containsExp, sb.ILike(tblFieldName, fmt.Sprintf("%%%s%%", value))), nil
|
||||||
|
case qbtypes.FilterOperatorNotContains:
|
||||||
|
return sb.And(containsExp, sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value))), nil
|
||||||
|
|
||||||
|
case qbtypes.FilterOperatorRegexp:
|
||||||
|
exp := fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value))
|
||||||
|
return sb.And(containsExp, exp), nil
|
||||||
|
case qbtypes.FilterOperatorNotRegexp:
|
||||||
|
exp := fmt.Sprintf(`not match(%s, %s)`, tblFieldName, sb.Var(value))
|
||||||
|
return sb.And(containsExp, exp), nil
|
||||||
|
|
||||||
|
// in and not in
|
||||||
|
case qbtypes.FilterOperatorIn:
|
||||||
|
values, ok := value.([]any)
|
||||||
|
if !ok {
|
||||||
|
return "", qbtypes.ErrInValues
|
||||||
|
}
|
||||||
|
return sb.And(containsExp, sb.In(tblFieldName, values...)), nil
|
||||||
|
case qbtypes.FilterOperatorNotIn:
|
||||||
|
values, ok := value.([]any)
|
||||||
|
if !ok {
|
||||||
|
return "", qbtypes.ErrInValues
|
||||||
|
}
|
||||||
|
return sb.And(containsExp, sb.NotIn(tblFieldName, values...)), nil
|
||||||
|
|
||||||
|
// exists and not exists
|
||||||
|
// in the query builder, `exists` and `not exists` are used for
|
||||||
|
// key membership checks, so depending on the column type, the condition changes
|
||||||
|
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||||
|
switch column.Type {
|
||||||
|
case schema.MapColumnType{
|
||||||
|
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||||
|
ValueType: schema.ColumnTypeString,
|
||||||
|
}:
|
||||||
|
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
|
||||||
|
if operator == qbtypes.FilterOperatorExists {
|
||||||
|
return sb.E(leftOperand, true), nil
|
||||||
|
} else {
|
||||||
|
return sb.NE(leftOperand, true), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
272
pkg/telemetrymetadata/condition_builder_test.go
Normal file
272
pkg/telemetrymetadata/condition_builder_test.go
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
package telemetrymetadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/huandu/go-sqlbuilder"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGetColumn(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
conditionBuilder := NewConditionBuilder()
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
key telemetrytypes.TelemetryFieldKey
|
||||||
|
expectedCol *schema.Column
|
||||||
|
expectedError error
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Resource field",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "service.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
},
|
||||||
|
expectedCol: attributeMetadataColumns["resource_attributes"],
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Scope field - scope name",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextScope,
|
||||||
|
},
|
||||||
|
expectedCol: nil,
|
||||||
|
expectedError: qbtypes.ErrColumnNotFound,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Scope field - scope.name",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "scope.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextScope,
|
||||||
|
},
|
||||||
|
expectedCol: nil,
|
||||||
|
expectedError: qbtypes.ErrColumnNotFound,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Scope field - scope_name",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "scope_name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextScope,
|
||||||
|
},
|
||||||
|
expectedCol: nil,
|
||||||
|
expectedError: qbtypes.ErrColumnNotFound,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Scope field - version",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "version",
|
||||||
|
FieldContext: telemetrytypes.FieldContextScope,
|
||||||
|
},
|
||||||
|
expectedCol: nil,
|
||||||
|
expectedError: qbtypes.ErrColumnNotFound,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Scope field - other scope field",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "custom.scope.field",
|
||||||
|
FieldContext: telemetrytypes.FieldContextScope,
|
||||||
|
},
|
||||||
|
expectedCol: nil,
|
||||||
|
expectedError: qbtypes.ErrColumnNotFound,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Attribute field - string type",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "user.id",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
expectedCol: attributeMetadataColumns["attributes"],
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Attribute field - number type",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "request.size",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||||
|
},
|
||||||
|
expectedCol: attributeMetadataColumns["attributes"],
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Attribute field - int64 type",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "request.duration",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeInt64,
|
||||||
|
},
|
||||||
|
expectedCol: attributeMetadataColumns["attributes"],
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Attribute field - float64 type",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "cpu.utilization",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
|
||||||
|
},
|
||||||
|
expectedCol: attributeMetadataColumns["attributes"],
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Log field - nonexistent",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "nonexistent_field",
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
},
|
||||||
|
expectedCol: nil,
|
||||||
|
expectedError: qbtypes.ErrColumnNotFound,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
col, err := conditionBuilder.GetColumn(ctx, &tc.key)
|
||||||
|
|
||||||
|
if tc.expectedError != nil {
|
||||||
|
assert.Equal(t, tc.expectedError, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, tc.expectedCol, col)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetFieldKeyName(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
conditionBuilder := &conditionBuilder{}
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
key telemetrytypes.TelemetryFieldKey
|
||||||
|
expectedResult string
|
||||||
|
expectedError error
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Map column type - string attribute",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "user.id",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
expectedResult: "attributes['user.id']",
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Map column type - number attribute",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "request.size",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||||
|
},
|
||||||
|
expectedResult: "attributes['request.size']",
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Map column type - bool attribute",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "request.success",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||||
|
},
|
||||||
|
expectedResult: "attributes['request.success']",
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Map column type - resource attribute",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "service.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
},
|
||||||
|
expectedResult: "resource_attributes['service.name']",
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Non-existent column",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "nonexistent_field",
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
},
|
||||||
|
expectedResult: "",
|
||||||
|
expectedError: qbtypes.ErrColumnNotFound,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
result, err := conditionBuilder.GetTableFieldName(ctx, &tc.key)
|
||||||
|
|
||||||
|
if tc.expectedError != nil {
|
||||||
|
assert.Equal(t, tc.expectedError, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, tc.expectedResult, result)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetCondition(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
conditionBuilder := NewConditionBuilder()
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
key telemetrytypes.TelemetryFieldKey
|
||||||
|
operator qbtypes.FilterOperator
|
||||||
|
value any
|
||||||
|
expectedSQL string
|
||||||
|
expectedError error
|
||||||
|
}{
|
||||||
|
|
||||||
|
{
|
||||||
|
name: "ILike operator - string attribute",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "user.id",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
operator: qbtypes.FilterOperatorILike,
|
||||||
|
value: "%admin%",
|
||||||
|
expectedSQL: "WHERE (mapContains(attributes, ?) AND LOWER(attributes['user.id']) LIKE LOWER(?))",
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Not ILike operator - string attribute",
|
||||||
|
key: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "user.id",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
operator: qbtypes.FilterOperatorNotILike,
|
||||||
|
value: "%admin%",
|
||||||
|
expectedSQL: "WHERE (mapContains(attributes, ?) AND LOWER(attributes['user.id']) NOT LIKE LOWER(?))",
|
||||||
|
expectedError: nil,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
sb := sqlbuilder.NewSelectBuilder()
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
cond, err := conditionBuilder.GetCondition(ctx, &tc.key, tc.operator, tc.value, sb)
|
||||||
|
sb.Where(cond)
|
||||||
|
|
||||||
|
if tc.expectedError != nil {
|
||||||
|
assert.Equal(t, tc.expectedError, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
sql, _ := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
assert.Contains(t, sql, tc.expectedSQL)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
691
pkg/telemetrymetadata/metadata.go
Normal file
691
pkg/telemetrymetadata/metadata.go
Normal file
@@ -0,0 +1,691 @@
|
|||||||
|
package telemetrymetadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/huandu/go-sqlbuilder"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrFailedToGetTracesKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get traces keys")
|
||||||
|
ErrFailedToGetLogsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get logs keys")
|
||||||
|
ErrFailedToGetTblStatement = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get tbl statement")
|
||||||
|
ErrFailedToGetMetricsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get metrics keys")
|
||||||
|
ErrFailedToGetRelatedValues = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get related values")
|
||||||
|
)
|
||||||
|
|
||||||
|
type telemetryMetaStore struct {
|
||||||
|
telemetrystore telemetrystore.TelemetryStore
|
||||||
|
tracesDBName string
|
||||||
|
tracesFieldsTblName string
|
||||||
|
indexV3TblName string
|
||||||
|
metricsDBName string
|
||||||
|
metricsFieldsTblName string
|
||||||
|
timeseries1WTblName string
|
||||||
|
logsDBName string
|
||||||
|
logsFieldsTblName string
|
||||||
|
logsV2TblName string
|
||||||
|
relatedMetadataDBName string
|
||||||
|
relatedMetadataTblName string
|
||||||
|
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewTelemetryMetaStore(
|
||||||
|
telemetrystore telemetrystore.TelemetryStore,
|
||||||
|
tracesDBName string,
|
||||||
|
tracesFieldsTblName string,
|
||||||
|
indexV3TblName string,
|
||||||
|
metricsDBName string,
|
||||||
|
metricsFieldsTblName string,
|
||||||
|
timeseries1WTblName string,
|
||||||
|
logsDBName string,
|
||||||
|
logsV2TblName string,
|
||||||
|
logsFieldsTblName string,
|
||||||
|
relatedMetadataDBName string,
|
||||||
|
relatedMetadataTblName string,
|
||||||
|
) (telemetrytypes.MetadataStore, error) {
|
||||||
|
return &telemetryMetaStore{
|
||||||
|
telemetrystore: telemetrystore,
|
||||||
|
tracesDBName: tracesDBName,
|
||||||
|
tracesFieldsTblName: tracesFieldsTblName,
|
||||||
|
indexV3TblName: indexV3TblName,
|
||||||
|
metricsDBName: metricsDBName,
|
||||||
|
metricsFieldsTblName: metricsFieldsTblName,
|
||||||
|
timeseries1WTblName: timeseries1WTblName,
|
||||||
|
logsDBName: logsDBName,
|
||||||
|
logsV2TblName: logsV2TblName,
|
||||||
|
logsFieldsTblName: logsFieldsTblName,
|
||||||
|
relatedMetadataDBName: relatedMetadataDBName,
|
||||||
|
relatedMetadataTblName: relatedMetadataTblName,
|
||||||
|
|
||||||
|
conditionBuilder: NewConditionBuilder(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// tracesTblStatementToFieldKeys returns materialised attribute/resource/scope keys from the traces table
|
||||||
|
func (t *telemetryMetaStore) tracesTblStatementToFieldKeys(ctx context.Context) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", t.tracesDBName, t.indexV3TblName)
|
||||||
|
statements := []telemetrytypes.ShowCreateTableStatement{}
|
||||||
|
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ExtractFieldKeysFromTblStatement(statements[0].Statement)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getTracesKeys returns the keys from the spans that match the field selection criteria
|
||||||
|
func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
|
||||||
|
if len(fieldKeySelectors) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// pre-fetch the materialised keys from the traces table
|
||||||
|
matKeys, err := t.tracesTblStatementToFieldKeys(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
|
||||||
|
for _, key := range matKeys {
|
||||||
|
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
|
||||||
|
}
|
||||||
|
|
||||||
|
sb := sqlbuilder.Select("tag_key", "tag_type", "tag_data_type", `
|
||||||
|
CASE
|
||||||
|
WHEN tag_type = 'spanfield' THEN 1
|
||||||
|
WHEN tag_type = 'resource' THEN 2
|
||||||
|
WHEN tag_type = 'scope' THEN 3
|
||||||
|
WHEN tag_type = 'tag' THEN 4
|
||||||
|
ELSE 5
|
||||||
|
END as priority`).From(t.tracesDBName + "." + t.tracesFieldsTblName)
|
||||||
|
var limit int
|
||||||
|
|
||||||
|
conds := []string{}
|
||||||
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
|
||||||
|
if fieldKeySelector.StartUnixMilli != 0 {
|
||||||
|
conds = append(conds, sb.GE("unix_milli", fieldKeySelector.StartUnixMilli))
|
||||||
|
}
|
||||||
|
if fieldKeySelector.EndUnixMilli != 0 {
|
||||||
|
conds = append(conds, sb.LE("unix_milli", fieldKeySelector.EndUnixMilli))
|
||||||
|
}
|
||||||
|
|
||||||
|
// key part of the selector
|
||||||
|
fieldKeyConds := []string{}
|
||||||
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("tag_key", fieldKeySelector.Name))
|
||||||
|
} else {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.Like("tag_key", "%"+fieldKeySelector.Name+"%"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// now look at the field context
|
||||||
|
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("tag_type", fieldKeySelector.FieldContext.TagType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// now look at the field data type
|
||||||
|
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("tag_data_type", fieldKeySelector.FieldDataType.TagDataType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
conds = append(conds, sb.And(fieldKeyConds...))
|
||||||
|
limit += fieldKeySelector.Limit
|
||||||
|
}
|
||||||
|
sb.Where(sb.Or(conds...))
|
||||||
|
|
||||||
|
if limit == 0 {
|
||||||
|
limit = 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
mainSb := sqlbuilder.Select("tag_key", "tag_type", "tag_data_type", "max(priority) as priority")
|
||||||
|
mainSb.From(mainSb.BuilderAs(sb, "sub_query"))
|
||||||
|
mainSb.GroupBy("tag_key", "tag_type", "tag_data_type")
|
||||||
|
mainSb.OrderBy("priority")
|
||||||
|
mainSb.Limit(limit)
|
||||||
|
|
||||||
|
query, args := mainSb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
||||||
|
for rows.Next() {
|
||||||
|
var name string
|
||||||
|
var fieldContext telemetrytypes.FieldContext
|
||||||
|
var fieldDataType telemetrytypes.FieldDataType
|
||||||
|
var priority uint8
|
||||||
|
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||||
|
}
|
||||||
|
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
||||||
|
|
||||||
|
// if there is no materialised column, create a key with the field context and data type
|
||||||
|
if !ok {
|
||||||
|
key = &telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: name,
|
||||||
|
FieldContext: fieldContext,
|
||||||
|
FieldDataType: fieldDataType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
keys = append(keys, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rows.Err() != nil {
|
||||||
|
return nil, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// logsTblStatementToFieldKeys returns materialised attribute/resource/scope keys from the logs table
|
||||||
|
func (t *telemetryMetaStore) logsTblStatementToFieldKeys(ctx context.Context) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", t.logsDBName, t.logsV2TblName)
|
||||||
|
statements := []telemetrytypes.ShowCreateTableStatement{}
|
||||||
|
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ExtractFieldKeysFromTblStatement(statements[0].Statement)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getLogsKeys returns the keys from the spans that match the field selection criteria
|
||||||
|
func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
if len(fieldKeySelectors) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// pre-fetch the materialised keys from the logs table
|
||||||
|
matKeys, err := t.logsTblStatementToFieldKeys(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
|
||||||
|
for _, key := range matKeys {
|
||||||
|
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
|
||||||
|
}
|
||||||
|
|
||||||
|
sb := sqlbuilder.Select("tag_key", "tag_type", "tag_data_type", `
|
||||||
|
CASE
|
||||||
|
WHEN tag_type = 'logfield' THEN 1
|
||||||
|
WHEN tag_type = 'resource' THEN 2
|
||||||
|
WHEN tag_type = 'scope' THEN 3
|
||||||
|
WHEN tag_type = 'tag' THEN 4
|
||||||
|
ELSE 5
|
||||||
|
END as priority`).From(t.logsDBName + "." + t.logsFieldsTblName)
|
||||||
|
var limit int
|
||||||
|
|
||||||
|
conds := []string{}
|
||||||
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
|
||||||
|
if fieldKeySelector.StartUnixMilli != 0 {
|
||||||
|
conds = append(conds, sb.GE("unix_milli", fieldKeySelector.StartUnixMilli))
|
||||||
|
}
|
||||||
|
if fieldKeySelector.EndUnixMilli != 0 {
|
||||||
|
conds = append(conds, sb.LE("unix_milli", fieldKeySelector.EndUnixMilli))
|
||||||
|
}
|
||||||
|
|
||||||
|
// key part of the selector
|
||||||
|
fieldKeyConds := []string{}
|
||||||
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("tag_key", fieldKeySelector.Name))
|
||||||
|
} else {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.Like("tag_key", "%"+fieldKeySelector.Name+"%"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// now look at the field context
|
||||||
|
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("tag_type", fieldKeySelector.FieldContext.TagType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// now look at the field data type
|
||||||
|
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("tag_data_type", fieldKeySelector.FieldDataType.TagDataType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
conds = append(conds, sb.And(fieldKeyConds...))
|
||||||
|
limit += fieldKeySelector.Limit
|
||||||
|
}
|
||||||
|
sb.Where(sb.Or(conds...))
|
||||||
|
if limit == 0 {
|
||||||
|
limit = 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
mainSb := sqlbuilder.Select("tag_key", "tag_type", "tag_data_type", "max(priority) as priority")
|
||||||
|
mainSb.From(mainSb.BuilderAs(sb, "sub_query"))
|
||||||
|
mainSb.GroupBy("tag_key", "tag_type", "tag_data_type")
|
||||||
|
mainSb.OrderBy("priority")
|
||||||
|
mainSb.Limit(limit)
|
||||||
|
|
||||||
|
query, args := mainSb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
||||||
|
for rows.Next() {
|
||||||
|
var name string
|
||||||
|
var fieldContext telemetrytypes.FieldContext
|
||||||
|
var fieldDataType telemetrytypes.FieldDataType
|
||||||
|
var priority uint8
|
||||||
|
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||||
|
}
|
||||||
|
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
||||||
|
|
||||||
|
// if there is no materialised column, create a key with the field context and data type
|
||||||
|
if !ok {
|
||||||
|
key = &telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: name,
|
||||||
|
FieldContext: fieldContext,
|
||||||
|
FieldDataType: fieldDataType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
keys = append(keys, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rows.Err() != nil {
|
||||||
|
return nil, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getMetricsKeys returns the keys from the metrics that match the field selection criteria
|
||||||
|
// TODO(srikanthccv): update the implementation after the dot metrics migration is done
|
||||||
|
func (t *telemetryMetaStore) getMetricsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
if len(fieldKeySelectors) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var whereClause, innerWhereClause string
|
||||||
|
var limit int
|
||||||
|
args := []any{}
|
||||||
|
|
||||||
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
if fieldKeySelector.MetricContext != nil {
|
||||||
|
innerWhereClause += "metric_name IN ? AND"
|
||||||
|
args = append(args, fieldKeySelector.MetricContext.MetricName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
innerWhereClause += " __normalized = true"
|
||||||
|
|
||||||
|
for idx, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
||||||
|
whereClause += "(distinctTagKey = ? AND distinctTagKey NOT LIKE '\\_\\_%%')"
|
||||||
|
args = append(args, fieldKeySelector.Name)
|
||||||
|
} else {
|
||||||
|
whereClause += "(distinctTagKey ILIKE ? AND distinctTagKey NOT LIKE '\\_\\_%%')"
|
||||||
|
args = append(args, fmt.Sprintf("%%%s%%", fieldKeySelector.Name))
|
||||||
|
}
|
||||||
|
if idx != len(fieldKeySelectors)-1 {
|
||||||
|
whereClause += " OR "
|
||||||
|
}
|
||||||
|
limit += fieldKeySelector.Limit
|
||||||
|
}
|
||||||
|
args = append(args, limit)
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`
|
||||||
|
SELECT
|
||||||
|
arrayJoin(tagKeys) AS distinctTagKey
|
||||||
|
FROM (
|
||||||
|
SELECT JSONExtractKeys(labels) AS tagKeys
|
||||||
|
FROM %s.%s
|
||||||
|
WHERE `+innerWhereClause+`
|
||||||
|
GROUP BY tagKeys
|
||||||
|
)
|
||||||
|
WHERE `+whereClause+`
|
||||||
|
GROUP BY distinctTagKey
|
||||||
|
LIMIT ?
|
||||||
|
`, t.metricsDBName, t.timeseries1WTblName)
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
||||||
|
for rows.Next() {
|
||||||
|
var name string
|
||||||
|
err = rows.Scan(&name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||||
|
}
|
||||||
|
key := &telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: name,
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
}
|
||||||
|
keys = append(keys, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rows.Err() != nil {
|
||||||
|
return nil, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
var keys []*telemetrytypes.TelemetryFieldKey
|
||||||
|
var err error
|
||||||
|
switch fieldKeySelector.Signal {
|
||||||
|
case telemetrytypes.SignalTraces:
|
||||||
|
keys, err = t.getTracesKeys(ctx, []*telemetrytypes.FieldKeySelector{fieldKeySelector})
|
||||||
|
case telemetrytypes.SignalLogs:
|
||||||
|
keys, err = t.getLogsKeys(ctx, []*telemetrytypes.FieldKeySelector{fieldKeySelector})
|
||||||
|
case telemetrytypes.SignalMetrics:
|
||||||
|
keys, err = t.getMetricsKeys(ctx, []*telemetrytypes.FieldKeySelector{fieldKeySelector})
|
||||||
|
case telemetrytypes.SignalUnspecified:
|
||||||
|
// get traces keys
|
||||||
|
tracesKeys, err := t.getTracesKeys(ctx, []*telemetrytypes.FieldKeySelector{fieldKeySelector})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
keys = append(keys, tracesKeys...)
|
||||||
|
|
||||||
|
// get logs keys
|
||||||
|
logsKeys, err := t.getLogsKeys(ctx, []*telemetrytypes.FieldKeySelector{fieldKeySelector})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
keys = append(keys, logsKeys...)
|
||||||
|
|
||||||
|
// get metrics keys
|
||||||
|
metricsKeys, err := t.getMetricsKeys(ctx, []*telemetrytypes.FieldKeySelector{fieldKeySelector})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
keys = append(keys, metricsKeys...)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||||
|
for _, key := range keys {
|
||||||
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||||
|
}
|
||||||
|
|
||||||
|
return mapOfKeys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
|
||||||
|
logsSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
|
tracesSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
|
metricsSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
|
|
||||||
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
switch fieldKeySelector.Signal {
|
||||||
|
case telemetrytypes.SignalLogs:
|
||||||
|
logsSelectors = append(logsSelectors, fieldKeySelector)
|
||||||
|
case telemetrytypes.SignalTraces:
|
||||||
|
tracesSelectors = append(tracesSelectors, fieldKeySelector)
|
||||||
|
case telemetrytypes.SignalMetrics:
|
||||||
|
metricsSelectors = append(metricsSelectors, fieldKeySelector)
|
||||||
|
case telemetrytypes.SignalUnspecified:
|
||||||
|
logsSelectors = append(logsSelectors, fieldKeySelector)
|
||||||
|
tracesSelectors = append(tracesSelectors, fieldKeySelector)
|
||||||
|
metricsSelectors = append(metricsSelectors, fieldKeySelector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logsKeys, err := t.getLogsKeys(ctx, logsSelectors)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tracesKeys, err := t.getTracesKeys(ctx, tracesSelectors)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
metricsKeys, err := t.getMetricsKeys(ctx, metricsSelectors)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||||
|
for _, key := range logsKeys {
|
||||||
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||||
|
}
|
||||||
|
for _, key := range tracesKeys {
|
||||||
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||||
|
}
|
||||||
|
for _, key := range metricsKeys {
|
||||||
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||||
|
}
|
||||||
|
|
||||||
|
return mapOfKeys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) GetKey(ctx context.Context, fieldKeySelector *telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
keys, err := t.GetKeys(ctx, fieldKeySelector)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return keys[fieldKeySelector.Name], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) ([]string, error) {
|
||||||
|
|
||||||
|
args := []any{}
|
||||||
|
|
||||||
|
var andConditions []string
|
||||||
|
|
||||||
|
andConditions = append(andConditions, `unix_milli >= ?`)
|
||||||
|
args = append(args, fieldValueSelector.StartUnixMilli)
|
||||||
|
|
||||||
|
andConditions = append(andConditions, `unix_milli <= ?`)
|
||||||
|
args = append(args, fieldValueSelector.EndUnixMilli)
|
||||||
|
|
||||||
|
if len(fieldValueSelector.ExistingQuery) != 0 {
|
||||||
|
// TODO(srikanthccv): add the existing query to the where clause
|
||||||
|
}
|
||||||
|
whereClause := strings.Join(andConditions, " AND ")
|
||||||
|
|
||||||
|
key := telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: fieldValueSelector.Name,
|
||||||
|
Signal: fieldValueSelector.Signal,
|
||||||
|
FieldContext: fieldValueSelector.FieldContext,
|
||||||
|
FieldDataType: fieldValueSelector.FieldDataType,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(srikanthccv): add the select column
|
||||||
|
selectColumn, _ := t.conditionBuilder.GetTableFieldName(ctx, &key)
|
||||||
|
|
||||||
|
args = append(args, fieldValueSelector.Limit)
|
||||||
|
filterSubQuery := fmt.Sprintf(
|
||||||
|
"SELECT DISTINCT %s FROM %s.%s WHERE %s LIMIT ?",
|
||||||
|
selectColumn,
|
||||||
|
t.relatedMetadataDBName,
|
||||||
|
t.relatedMetadataTblName,
|
||||||
|
whereClause,
|
||||||
|
)
|
||||||
|
zap.L().Debug("filterSubQuery for related values", zap.String("query", filterSubQuery), zap.Any("args", args))
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, filterSubQuery, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ErrFailedToGetRelatedValues
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var attributeValues []string
|
||||||
|
for rows.Next() {
|
||||||
|
var value string
|
||||||
|
if err := rows.Scan(&value); err != nil {
|
||||||
|
return nil, ErrFailedToGetRelatedValues
|
||||||
|
}
|
||||||
|
if value != "" {
|
||||||
|
attributeValues = append(attributeValues, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return attributeValues, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) GetRelatedValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) ([]string, error) {
|
||||||
|
return t.getRelatedValues(ctx, fieldValueSelector)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) getSpanFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, error) {
|
||||||
|
// build the query to get the keys from the spans that match the field selection criteria
|
||||||
|
var limit int
|
||||||
|
|
||||||
|
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.tracesDBName + "." + t.tracesFieldsTblName)
|
||||||
|
|
||||||
|
if fieldValueSelector.Name != "" {
|
||||||
|
sb.Where(sb.E("tag_key", fieldValueSelector.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
// now look at the field context
|
||||||
|
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||||
|
sb.Where(sb.E("tag_type", fieldValueSelector.FieldContext.TagType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// now look at the field data type
|
||||||
|
if fieldValueSelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
||||||
|
sb.Where(sb.E("tag_data_type", fieldValueSelector.FieldDataType.TagDataType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldValueSelector.Value != "" {
|
||||||
|
if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeString {
|
||||||
|
sb.Where(sb.Like("string_value", "%"+fieldValueSelector.Value+"%"))
|
||||||
|
} else if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeNumber {
|
||||||
|
sb.Where(sb.IsNotNull("number_value"))
|
||||||
|
sb.Where(sb.Like("toString(number_value)", "%"+fieldValueSelector.Value+"%"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if limit == 0 {
|
||||||
|
limit = 50
|
||||||
|
}
|
||||||
|
sb.Limit(limit)
|
||||||
|
|
||||||
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
values := &telemetrytypes.TelemetryFieldValues{}
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
for rows.Next() {
|
||||||
|
var stringValue string
|
||||||
|
var numberValue float64
|
||||||
|
if err := rows.Scan(&stringValue, &numberValue); err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||||
|
}
|
||||||
|
if _, ok := seen[stringValue]; !ok {
|
||||||
|
values.StringValues = append(values.StringValues, stringValue)
|
||||||
|
seen[stringValue] = true
|
||||||
|
}
|
||||||
|
if _, ok := seen[fmt.Sprintf("%f", numberValue)]; !ok && numberValue != 0 {
|
||||||
|
values.NumberValues = append(values.NumberValues, numberValue)
|
||||||
|
seen[fmt.Sprintf("%f", numberValue)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, error) {
|
||||||
|
// build the query to get the keys from the spans that match the field selection criteria
|
||||||
|
var limit int
|
||||||
|
|
||||||
|
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
|
||||||
|
|
||||||
|
if fieldValueSelector.Name != "" {
|
||||||
|
sb.Where(sb.E("tag_key", fieldValueSelector.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||||
|
sb.Where(sb.E("tag_type", fieldValueSelector.FieldContext.TagType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldValueSelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
||||||
|
sb.Where(sb.E("tag_data_type", fieldValueSelector.FieldDataType.TagDataType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldValueSelector.Value != "" {
|
||||||
|
if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeString {
|
||||||
|
sb.Where(sb.Like("string_value", "%"+fieldValueSelector.Value+"%"))
|
||||||
|
} else if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeNumber {
|
||||||
|
sb.Where(sb.IsNotNull("number_value"))
|
||||||
|
sb.Where(sb.Like("toString(number_value)", "%"+fieldValueSelector.Value+"%"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if limit == 0 {
|
||||||
|
limit = 50
|
||||||
|
}
|
||||||
|
sb.Limit(limit)
|
||||||
|
|
||||||
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
values := &telemetrytypes.TelemetryFieldValues{}
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
for rows.Next() {
|
||||||
|
var stringValue string
|
||||||
|
var numberValue float64
|
||||||
|
if err := rows.Scan(&stringValue, &numberValue); err != nil {
|
||||||
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||||
|
}
|
||||||
|
if _, ok := seen[stringValue]; !ok {
|
||||||
|
values.StringValues = append(values.StringValues, stringValue)
|
||||||
|
seen[stringValue] = true
|
||||||
|
}
|
||||||
|
if _, ok := seen[fmt.Sprintf("%f", numberValue)]; !ok && numberValue != 0 {
|
||||||
|
values.NumberValues = append(values.NumberValues, numberValue)
|
||||||
|
seen[fmt.Sprintf("%f", numberValue)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) getMetricFieldValues(_ context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, error) {
|
||||||
|
// TODO(srikanthccv): implement this. use new tables?
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) GetAllValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, error) {
|
||||||
|
var values *telemetrytypes.TelemetryFieldValues
|
||||||
|
var err error
|
||||||
|
switch fieldValueSelector.Signal {
|
||||||
|
case telemetrytypes.SignalTraces:
|
||||||
|
values, err = t.getSpanFieldValues(ctx, fieldValueSelector)
|
||||||
|
case telemetrytypes.SignalLogs:
|
||||||
|
values, err = t.getLogFieldValues(ctx, fieldValueSelector)
|
||||||
|
case telemetrytypes.SignalMetrics:
|
||||||
|
values, err = t.getMetricFieldValues(ctx, fieldValueSelector)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
86
pkg/telemetrymetadata/metadata_test.go
Normal file
86
pkg/telemetrymetadata/metadata_test.go
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
package telemetrymetadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||||
|
)
|
||||||
|
|
||||||
|
type regexMatcher struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *regexMatcher) Match(expectedSQL, actualSQL string) error {
|
||||||
|
re, err := regexp.Compile(expectedSQL)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !re.MatchString(actualSQL) {
|
||||||
|
return fmt.Errorf("expected query to contain %s, got %s", expectedSQL, actualSQL)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetKeys(t *testing.T) {
|
||||||
|
mockTelemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||||
|
mock := mockTelemetryStore.Mock()
|
||||||
|
|
||||||
|
metadata, err := NewTelemetryMetaStore(
|
||||||
|
mockTelemetryStore,
|
||||||
|
telemetrytraces.DBName,
|
||||||
|
telemetrytraces.TagAttributesV2TableName,
|
||||||
|
telemetrytraces.SpanIndexV3TableName,
|
||||||
|
telemetrymetrics.DBName,
|
||||||
|
telemetrymetrics.TimeseriesV41weekTableName,
|
||||||
|
telemetrymetrics.TimeseriesV41weekTableName,
|
||||||
|
telemetrylogs.DBName,
|
||||||
|
telemetrylogs.LogsV2TableName,
|
||||||
|
telemetrylogs.TagAttributesV2TableName,
|
||||||
|
DBName,
|
||||||
|
AttributesMetadataLocalTableName,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create telemetry metadata store: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
rows := cmock.NewRows([]cmock.ColumnType{
|
||||||
|
{Name: "statement", Type: "String"},
|
||||||
|
}, [][]any{{"CREATE TABLE signoz_traces.signoz_index_v3"}})
|
||||||
|
|
||||||
|
mock.
|
||||||
|
ExpectSelect("SHOW CREATE TABLE signoz_traces.distributed_signoz_index_v3").
|
||||||
|
WillReturnRows(rows)
|
||||||
|
|
||||||
|
query := `SELECT.*`
|
||||||
|
|
||||||
|
mock.ExpectQuery(query).
|
||||||
|
WithArgs("%http.method%", telemetrytypes.FieldContextSpan.TagType(), telemetrytypes.FieldDataTypeString.TagDataType(), 10).
|
||||||
|
WillReturnRows(cmock.NewRows([]cmock.ColumnType{
|
||||||
|
{Name: "tag_key", Type: "String"},
|
||||||
|
{Name: "tag_type", Type: "String"},
|
||||||
|
{Name: "tag_data_type", Type: "String"},
|
||||||
|
{Name: "priority", Type: "UInt8"},
|
||||||
|
}, [][]any{{"http.method", "tag", "String", 1}, {"http.method", "tag", "String", 1}}))
|
||||||
|
keys, err := metadata.GetKeys(context.Background(), &telemetrytypes.FieldKeySelector{
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
FieldContext: telemetrytypes.FieldContextSpan,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Name: "http.method",
|
||||||
|
Limit: 10,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get keys: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Keys: %v", keys)
|
||||||
|
}
|
||||||
132
pkg/telemetrymetadata/stmt_parse.go
Normal file
132
pkg/telemetrymetadata/stmt_parse.go
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
package telemetrymetadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/AfterShip/clickhouse-sql-parser/parser"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TelemetryFieldVisitor is an AST visitor for extracting telemetry fields
|
||||||
|
type TelemetryFieldVisitor struct {
|
||||||
|
parser.DefaultASTVisitor
|
||||||
|
Fields []*telemetrytypes.TelemetryFieldKey
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewTelemetryFieldVisitor() *TelemetryFieldVisitor {
|
||||||
|
return &TelemetryFieldVisitor{
|
||||||
|
Fields: make([]*telemetrytypes.TelemetryFieldKey, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// VisitColumnDef is called when visiting a column definition
|
||||||
|
func (v *TelemetryFieldVisitor) VisitColumnDef(expr *parser.ColumnDef) error {
|
||||||
|
// Check if this is a materialized column with DEFAULT expression
|
||||||
|
if expr.DefaultExpr == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse column name to extract context and data type
|
||||||
|
columnName := expr.Name.String()
|
||||||
|
|
||||||
|
// Remove backticks if present
|
||||||
|
columnName = strings.TrimPrefix(columnName, "`")
|
||||||
|
columnName = strings.TrimSuffix(columnName, "`")
|
||||||
|
|
||||||
|
// Parse the column name to extract components
|
||||||
|
parts := strings.Split(columnName, "_")
|
||||||
|
if len(parts) < 2 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
context := parts[0]
|
||||||
|
dataType := parts[1]
|
||||||
|
|
||||||
|
// Check if this is a valid telemetry column
|
||||||
|
var fieldContext telemetrytypes.FieldContext
|
||||||
|
switch context {
|
||||||
|
case "resource":
|
||||||
|
fieldContext = telemetrytypes.FieldContextResource
|
||||||
|
case "scope":
|
||||||
|
fieldContext = telemetrytypes.FieldContextScope
|
||||||
|
case "attribute":
|
||||||
|
fieldContext = telemetrytypes.FieldContextAttribute
|
||||||
|
default:
|
||||||
|
return nil // Not a telemetry column
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check and convert data type
|
||||||
|
var fieldDataType telemetrytypes.FieldDataType
|
||||||
|
switch dataType {
|
||||||
|
case "string":
|
||||||
|
fieldDataType = telemetrytypes.FieldDataTypeString
|
||||||
|
case "bool":
|
||||||
|
fieldDataType = telemetrytypes.FieldDataTypeBool
|
||||||
|
case "int", "int64":
|
||||||
|
fieldDataType = telemetrytypes.FieldDataTypeFloat64
|
||||||
|
case "float", "float64":
|
||||||
|
fieldDataType = telemetrytypes.FieldDataTypeFloat64
|
||||||
|
case "number":
|
||||||
|
fieldDataType = telemetrytypes.FieldDataTypeFloat64
|
||||||
|
default:
|
||||||
|
return nil // Unknown data type
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract field name from the DEFAULT expression
|
||||||
|
// The DEFAULT expression should be something like: resources_string['k8s.cluster.name']
|
||||||
|
// We need to extract the key inside the square brackets
|
||||||
|
defaultExprStr := expr.DefaultExpr.String()
|
||||||
|
|
||||||
|
// Look for the pattern: map['key']
|
||||||
|
startIdx := strings.Index(defaultExprStr, "['")
|
||||||
|
endIdx := strings.Index(defaultExprStr, "']")
|
||||||
|
|
||||||
|
if startIdx == -1 || endIdx == -1 || startIdx+2 >= endIdx {
|
||||||
|
return nil // Invalid DEFAULT expression format
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldName := defaultExprStr[startIdx+2 : endIdx]
|
||||||
|
|
||||||
|
// Create and store the TelemetryFieldKey
|
||||||
|
field := telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: fieldName,
|
||||||
|
FieldContext: fieldContext,
|
||||||
|
FieldDataType: fieldDataType,
|
||||||
|
Materialized: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
v.Fields = append(v.Fields, &field)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExtractFieldKeysFromTblStatement(statement string) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
// Parse the CREATE TABLE statement using the ClickHouse parser
|
||||||
|
p := parser.NewParser(statement)
|
||||||
|
stmts, err := p.ParseStmts()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a visitor to collect telemetry fields
|
||||||
|
visitor := NewTelemetryFieldVisitor()
|
||||||
|
|
||||||
|
// Visit each statement
|
||||||
|
for _, stmt := range stmts {
|
||||||
|
// We're looking for CreateTable statements
|
||||||
|
createTable, ok := stmt.(*parser.CreateTable)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Visit the table schema to extract column definitions
|
||||||
|
if createTable.TableSchema != nil {
|
||||||
|
for _, column := range createTable.TableSchema.Columns {
|
||||||
|
if err := column.Accept(visitor); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return visitor.Fields, nil
|
||||||
|
}
|
||||||
148
pkg/telemetrymetadata/stmt_parse_test.go
Normal file
148
pkg/telemetrymetadata/stmt_parse_test.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package telemetrymetadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"slices"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestExtractFieldKeysFromTblStatement(t *testing.T) {
|
||||||
|
|
||||||
|
var statement = `CREATE TABLE signoz_logs.logs_v2
|
||||||
|
(
|
||||||
|
` + "`ts_bucket_start`" + ` UInt64 CODEC(DoubleDelta, LZ4),
|
||||||
|
` + "`resource_fingerprint`" + ` String CODEC(ZSTD(1)),
|
||||||
|
` + "`timestamp`" + ` UInt64 CODEC(DoubleDelta, LZ4),
|
||||||
|
` + "`observed_timestamp`" + ` UInt64 CODEC(DoubleDelta, LZ4),
|
||||||
|
` + "`id`" + ` String CODEC(ZSTD(1)),
|
||||||
|
` + "`trace_id`" + ` String CODEC(ZSTD(1)),
|
||||||
|
` + "`span_id`" + ` String CODEC(ZSTD(1)),
|
||||||
|
` + "`trace_flags`" + ` UInt32,
|
||||||
|
` + "`severity_text`" + ` LowCardinality(String) CODEC(ZSTD(1)),
|
||||||
|
` + "`severity_number`" + ` UInt8,
|
||||||
|
` + "`body`" + ` String CODEC(ZSTD(2)),
|
||||||
|
` + "`attributes_string`" + ` Map(LowCardinality(String), String) CODEC(ZSTD(1)),
|
||||||
|
` + "`attributes_number`" + ` Map(LowCardinality(String), Float64) CODEC(ZSTD(1)),
|
||||||
|
` + "`attributes_bool`" + ` Map(LowCardinality(String), Bool) CODEC(ZSTD(1)),
|
||||||
|
` + "`resources_string`" + ` Map(LowCardinality(String), String) CODEC(ZSTD(1)),
|
||||||
|
` + "`scope_name`" + ` String CODEC(ZSTD(1)),
|
||||||
|
` + "`scope_version`" + ` String CODEC(ZSTD(1)),
|
||||||
|
` + "`scope_string`" + ` Map(LowCardinality(String), String) CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_number_input_size`" + ` Int64 DEFAULT attributes_number['input_size'] CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_number_input_size_exists`" + ` Bool DEFAULT if(mapContains(attributes_number, 'input_size') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_string_log$$iostream`" + ` String DEFAULT attributes_string['log.iostream'] CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_string_log$$iostream_exists`" + ` Bool DEFAULT if(mapContains(attributes_string, 'log.iostream') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_string_log$$file$$path`" + ` String DEFAULT attributes_string['log.file.path'] CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_string_log$$file$$path_exists`" + ` Bool DEFAULT if(mapContains(attributes_string, 'log.file.path') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$cluster$$name`" + ` String DEFAULT resources_string['k8s.cluster.name'] CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$cluster$$name_exists`" + ` Bool DEFAULT if(mapContains(resources_string, 'k8s.cluster.name') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$namespace$$name`" + ` String DEFAULT resources_string['k8s.namespace.name'] CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$namespace$$name_exists`" + ` Bool DEFAULT if(mapContains(resources_string, 'k8s.namespace.name') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$pod$$name`" + ` String DEFAULT resources_string['k8s.pod.name'] CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$pod$$name_exists`" + ` Bool DEFAULT if(mapContains(resources_string, 'k8s.pod.name') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$node$$name`" + ` String DEFAULT resources_string['k8s.node.name'] CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$node$$name_exists`" + ` Bool DEFAULT if(mapContains(resources_string, 'k8s.node.name') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$container$$name`" + ` String DEFAULT resources_string['k8s.container.name'] CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$container$$name_exists`" + ` Bool DEFAULT if(mapContains(resources_string, 'k8s.container.name') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$deployment$$name`" + ` String DEFAULT resources_string['k8s.deployment.name'] CODEC(ZSTD(1)),
|
||||||
|
` + "`resource_string_k8s$$deployment$$name_exists`" + ` Bool DEFAULT if(mapContains(resources_string, 'k8s.deployment.name') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_string_processor`" + ` String DEFAULT attributes_string['processor'] CODEC(ZSTD(1)),
|
||||||
|
` + "`attribute_string_processor_exists`" + ` Bool DEFAULT if(mapContains(attributes_string, 'processor') != 0, true, false) CODEC(ZSTD(1)),
|
||||||
|
INDEX body_idx lower(body) TYPE ngrambf_v1(4, 60000, 5, 0) GRANULARITY 1,
|
||||||
|
INDEX id_minmax id TYPE minmax GRANULARITY 1,
|
||||||
|
INDEX severity_number_idx severity_number TYPE set(25) GRANULARITY 4,
|
||||||
|
INDEX severity_text_idx severity_text TYPE set(25) GRANULARITY 4,
|
||||||
|
INDEX trace_flags_idx trace_flags TYPE bloom_filter GRANULARITY 4,
|
||||||
|
INDEX scope_name_idx scope_name TYPE tokenbf_v1(10240, 3, 0) GRANULARITY 4,
|
||||||
|
INDEX ` + "`resource_string_k8s$$cluster$$name_idx`" + ` ` + "`resource_string_k8s$$cluster$$name`" + ` TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX ` + "`resource_string_k8s$$namespace$$name_idx`" + ` ` + "`resource_string_k8s$$namespace$$name`" + ` TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX ` + "`resource_string_k8s$$pod$$name_idx`" + ` ` + "`resource_string_k8s$$pod$$name`" + ` TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX ` + "`resource_string_k8s$$node$$name_idx`" + ` ` + "`resource_string_k8s$$node$$name`" + ` TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX ` + "`resource_string_k8s$$container$$name_idx`" + ` ` + "`resource_string_k8s$$container$$name`" + ` TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX ` + "`resource_string_k8s$$deployment$$name_idx`" + ` ` + "`resource_string_k8s$$deployment$$name`" + ` TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX attribute_string_processor_idx attribute_string_processor TYPE bloom_filter(0.01) GRANULARITY 64
|
||||||
|
)
|
||||||
|
ENGINE = ReplicatedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')
|
||||||
|
PARTITION BY toDate(timestamp / 1000000000)
|
||||||
|
ORDER BY (ts_bucket_start, resource_fingerprint, severity_text, timestamp, id)
|
||||||
|
TTL toDateTime(timestamp / 1000000000) + toIntervalSecond(2592000)
|
||||||
|
SETTINGS ttl_only_drop_parts = 1, index_granularity = 8192`
|
||||||
|
|
||||||
|
keys, err := ExtractFieldKeysFromTblStatement(statement)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to extract field keys from tbl statement: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// some expected keys
|
||||||
|
expectedKeys := []*telemetrytypes.TelemetryFieldKey{
|
||||||
|
{
|
||||||
|
Name: "k8s.pod.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "k8s.cluster.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "k8s.namespace.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "k8s.deployment.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "k8s.node.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "k8s.container.name",
|
||||||
|
FieldContext: telemetrytypes.FieldContextResource,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "processor",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "input_size",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "log.iostream",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "log.file.path",
|
||||||
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
Materialized: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, key := range expectedKeys {
|
||||||
|
if !slices.ContainsFunc(keys, func(k *telemetrytypes.TelemetryFieldKey) bool {
|
||||||
|
return k.Name == key.Name && k.FieldContext == key.FieldContext && k.FieldDataType == key.FieldDataType && k.Materialized == key.Materialized
|
||||||
|
}) {
|
||||||
|
t.Errorf("expected key %v not found", key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
7
pkg/telemetrymetadata/tables.go
Normal file
7
pkg/telemetrymetadata/tables.go
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
package telemetrymetadata
|
||||||
|
|
||||||
|
const (
|
||||||
|
DBName = "signoz_metadata"
|
||||||
|
AttributesMetadataTableName = "distributed_attributes_metadata"
|
||||||
|
AttributesMetadataLocalTableName = "attributes_metadata"
|
||||||
|
)
|
||||||
21
pkg/telemetrymetrics/tables.go
Normal file
21
pkg/telemetrymetrics/tables.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
package telemetrymetrics
|
||||||
|
|
||||||
|
const (
|
||||||
|
DBName = "signoz_metrics"
|
||||||
|
SamplesV4TableName = "distributed_samples_v4"
|
||||||
|
SamplesV4LocalTableName = "samples_v4"
|
||||||
|
SamplesV4Agg5mTableName = "distributed_samples_v4_agg_5m"
|
||||||
|
SamplesV4Agg5mLocalTableName = "samples_v4_agg_5m"
|
||||||
|
SamplesV4Agg30mTableName = "distributed_samples_v4_agg_30m"
|
||||||
|
SamplesV4Agg30mLocalTableName = "samples_v4_agg_30m"
|
||||||
|
ExpHistogramTableName = "distributed_exp_hist"
|
||||||
|
ExpHistogramLocalTableName = "exp_hist"
|
||||||
|
TimeseriesV4TableName = "distributed_time_series_v4"
|
||||||
|
TimeseriesV4LocalTableName = "time_series_v4"
|
||||||
|
TimeseriesV46hrsTableName = "distributed_time_series_v4_6hrs"
|
||||||
|
TimeseriesV46hrsLocalTableName = "time_series_v4_6hrs"
|
||||||
|
TimeseriesV41dayTableName = "distributed_time_series_v4_1day"
|
||||||
|
TimeseriesV41dayLocalTableName = "time_series_v4_1day"
|
||||||
|
TimeseriesV41weekTableName = "distributed_time_series_v4_1week"
|
||||||
|
TimeseriesV41weekLocalTableName = "time_series_v4_1week"
|
||||||
|
)
|
||||||
@@ -252,9 +252,9 @@ func (c *conditionBuilder) GetCondition(
|
|||||||
return sb.NotILike(tblFieldName, value), nil
|
return sb.NotILike(tblFieldName, value), nil
|
||||||
|
|
||||||
case qbtypes.FilterOperatorContains:
|
case qbtypes.FilterOperatorContains:
|
||||||
return sb.ILike(tblFieldName, value), nil
|
return sb.ILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||||
case qbtypes.FilterOperatorNotContains:
|
case qbtypes.FilterOperatorNotContains:
|
||||||
return sb.NotILike(tblFieldName, value), nil
|
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||||
|
|
||||||
case qbtypes.FilterOperatorRegexp:
|
case qbtypes.FilterOperatorRegexp:
|
||||||
exp := fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value))
|
exp := fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value))
|
||||||
|
|||||||
@@ -1,37 +1,246 @@
|
|||||||
package types
|
package types
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"database/sql/driver"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
"github.com/uptrace/bun"
|
"github.com/uptrace/bun"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Integration struct {
|
type IntegrationUserEmail string
|
||||||
bun.BaseModel `bun:"table:integrations_installed"`
|
|
||||||
|
|
||||||
IntegrationID string `bun:"integration_id,pk,type:text"`
|
const (
|
||||||
ConfigJSON string `bun:"config_json,type:text"`
|
AWSIntegrationUserEmail IntegrationUserEmail = "aws-integration@signoz.io"
|
||||||
InstalledAt time.Time `bun:"installed_at,default:current_timestamp"`
|
)
|
||||||
|
|
||||||
|
var AllIntegrationUserEmails = []IntegrationUserEmail{
|
||||||
|
AWSIntegrationUserEmail,
|
||||||
}
|
}
|
||||||
|
|
||||||
type CloudIntegrationAccount struct {
|
// --------------------------------------------------------------------------
|
||||||
bun.BaseModel `bun:"table:cloud_integrations_accounts"`
|
// Normal integration uses just the installed_integration table
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
CloudProvider string `bun:"cloud_provider,type:text,unique:cloud_provider_id"`
|
type InstalledIntegration struct {
|
||||||
ID string `bun:"id,type:text,notnull,unique:cloud_provider_id"`
|
bun.BaseModel `bun:"table:installed_integration"`
|
||||||
ConfigJSON string `bun:"config_json,type:text"`
|
|
||||||
CloudAccountID string `bun:"cloud_account_id,type:text"`
|
Identifiable
|
||||||
LastAgentReportJSON string `bun:"last_agent_report_json,type:text"`
|
Type string `json:"type" bun:"type,type:text,unique:org_id_type"`
|
||||||
CreatedAt time.Time `bun:"created_at,notnull,default:current_timestamp"`
|
Config InstalledIntegrationConfig `json:"config" bun:"config,type:text"`
|
||||||
RemovedAt time.Time `bun:"removed_at,type:timestamp"`
|
InstalledAt time.Time `json:"installed_at" bun:"installed_at,default:current_timestamp"`
|
||||||
|
OrgID string `json:"org_id" bun:"org_id,type:text,unique:org_id_type,references:organizations(id),on_delete:cascade"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type CloudIntegrationServiceConfig struct {
|
type InstalledIntegrationConfig map[string]interface{}
|
||||||
bun.BaseModel `bun:"table:cloud_integrations_service_configs"`
|
|
||||||
|
|
||||||
CloudProvider string `bun:"cloud_provider,type:text,notnull,unique:service_cloud_provider_account"`
|
// For serializing from db
|
||||||
CloudAccountID string `bun:"cloud_account_id,type:text,notnull,unique:service_cloud_provider_account"`
|
func (c *InstalledIntegrationConfig) Scan(src interface{}) error {
|
||||||
ServiceID string `bun:"service_id,type:text,notnull,unique:service_cloud_provider_account"`
|
var data []byte
|
||||||
ConfigJSON string `bun:"config_json,type:text"`
|
switch v := src.(type) {
|
||||||
CreatedAt time.Time `bun:"created_at,default:current_timestamp"`
|
case []byte:
|
||||||
|
data = v
|
||||||
|
case string:
|
||||||
|
data = []byte(v)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("tried to scan from %T instead of string or bytes", src)
|
||||||
|
}
|
||||||
|
|
||||||
|
return json.Unmarshal(data, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
// For serializing to db
|
||||||
|
func (c *InstalledIntegrationConfig) Value() (driver.Value, error) {
|
||||||
|
filterSetJson, err := json.Marshal(c)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not serialize integration config to JSON")
|
||||||
|
}
|
||||||
|
return filterSetJson, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Cloud integration uses the cloud_integration table
|
||||||
|
// and cloud_integrations_service table
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type CloudIntegration struct {
|
||||||
|
bun.BaseModel `bun:"table:cloud_integration"`
|
||||||
|
|
||||||
|
Identifiable
|
||||||
|
TimeAuditable
|
||||||
|
Provider string `json:"provider" bun:"provider,type:text,unique:provider_id"`
|
||||||
|
Config *AccountConfig `json:"config" bun:"config,type:text"`
|
||||||
|
AccountID *string `json:"account_id" bun:"account_id,type:text"`
|
||||||
|
LastAgentReport *AgentReport `json:"last_agent_report" bun:"last_agent_report,type:text"`
|
||||||
|
RemovedAt *time.Time `json:"removed_at" bun:"removed_at,type:timestamp,nullzero"`
|
||||||
|
OrgID string `bun:"org_id,type:text,unique:provider_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *CloudIntegration) Status() AccountStatus {
|
||||||
|
status := AccountStatus{}
|
||||||
|
if a.LastAgentReport != nil {
|
||||||
|
lastHeartbeat := a.LastAgentReport.TimestampMillis
|
||||||
|
status.Integration.LastHeartbeatTsMillis = &lastHeartbeat
|
||||||
|
}
|
||||||
|
return status
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *CloudIntegration) Account() Account {
|
||||||
|
ca := Account{Id: a.ID.StringValue(), Status: a.Status()}
|
||||||
|
|
||||||
|
if a.AccountID != nil {
|
||||||
|
ca.CloudAccountId = *a.AccountID
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.Config != nil {
|
||||||
|
ca.Config = *a.Config
|
||||||
|
} else {
|
||||||
|
ca.Config = DefaultAccountConfig()
|
||||||
|
}
|
||||||
|
return ca
|
||||||
|
}
|
||||||
|
|
||||||
|
type Account struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
CloudAccountId string `json:"cloud_account_id"`
|
||||||
|
Config AccountConfig `json:"config"`
|
||||||
|
Status AccountStatus `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AccountStatus struct {
|
||||||
|
Integration AccountIntegrationStatus `json:"integration"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AccountIntegrationStatus struct {
|
||||||
|
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func DefaultAccountConfig() AccountConfig {
|
||||||
|
return AccountConfig{
|
||||||
|
EnabledRegions: []string{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type AccountConfig struct {
|
||||||
|
EnabledRegions []string `json:"regions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// For serializing from db
|
||||||
|
func (c *AccountConfig) Scan(src any) error {
|
||||||
|
var data []byte
|
||||||
|
switch v := src.(type) {
|
||||||
|
case []byte:
|
||||||
|
data = v
|
||||||
|
case string:
|
||||||
|
data = []byte(v)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("tried to scan from %T instead of string or bytes", src)
|
||||||
|
}
|
||||||
|
|
||||||
|
return json.Unmarshal(data, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
// For serializing to db
|
||||||
|
func (c *AccountConfig) Value() (driver.Value, error) {
|
||||||
|
if c == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
serialized, err := json.Marshal(c)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf(
|
||||||
|
"couldn't serialize cloud account config to JSON: %w", err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return serialized, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentReport struct {
|
||||||
|
TimestampMillis int64 `json:"timestamp_millis"`
|
||||||
|
Data map[string]any `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// For serializing from db
|
||||||
|
func (r *AgentReport) Scan(src any) error {
|
||||||
|
var data []byte
|
||||||
|
switch v := src.(type) {
|
||||||
|
case []byte:
|
||||||
|
data = v
|
||||||
|
case string:
|
||||||
|
data = []byte(v)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("tried to scan from %T instead of string or bytes", src)
|
||||||
|
}
|
||||||
|
|
||||||
|
return json.Unmarshal(data, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// For serializing to db
|
||||||
|
func (r *AgentReport) Value() (driver.Value, error) {
|
||||||
|
if r == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
serialized, err := json.Marshal(r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf(
|
||||||
|
"couldn't serialize agent report to JSON: %w", err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return serialized, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type CloudIntegrationService struct {
|
||||||
|
bun.BaseModel `bun:"table:cloud_integration_service,alias:cis"`
|
||||||
|
|
||||||
|
Identifiable
|
||||||
|
TimeAuditable
|
||||||
|
Type string `bun:"type,type:text,notnull,unique:cloud_integration_id_type"`
|
||||||
|
Config CloudServiceConfig `bun:"config,type:text"`
|
||||||
|
CloudIntegrationID string `bun:"cloud_integration_id,type:text,notnull,unique:cloud_integration_id_type,references:cloud_integrations(id),on_delete:cascade"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CloudServiceLogsConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CloudServiceMetricsConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CloudServiceConfig struct {
|
||||||
|
Logs *CloudServiceLogsConfig `json:"logs,omitempty"`
|
||||||
|
Metrics *CloudServiceMetricsConfig `json:"metrics,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// For serializing from db
|
||||||
|
func (c *CloudServiceConfig) Scan(src any) error {
|
||||||
|
var data []byte
|
||||||
|
switch src := src.(type) {
|
||||||
|
case []byte:
|
||||||
|
data = src
|
||||||
|
case string:
|
||||||
|
data = []byte(src)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("tried to scan from %T instead of string or bytes", src)
|
||||||
|
}
|
||||||
|
|
||||||
|
return json.Unmarshal(data, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
// For serializing to db
|
||||||
|
func (c *CloudServiceConfig) Value() (driver.Value, error) {
|
||||||
|
if c == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
serialized, err := json.Marshal(c)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf(
|
||||||
|
"couldn't serialize cloud service config to JSON: %w", err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return serialized, nil
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user